1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 SET_REGNO (reg, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
456 insns = get_insns ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
519 expand_fixed_convert (to, from, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
589 fill_value = const0_rtx;
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 convert_move (to, gen_lowpart (word_mode, from), 0);
647 /* Now follow all the conversions between integers
648 no more than a word long. */
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
653 GET_MODE_BITSIZE (from_mode)))
656 && ! MEM_VOLATILE_P (from)
657 && direct_load[(int) to_mode]
658 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || GET_CODE (from) == SUBREG))
661 from = force_reg (from_mode, from);
662 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
664 from = copy_to_reg (from);
665 emit_move_insn (to, gen_lowpart (to_mode, from));
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
672 /* Convert directly if that works. */
673 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
676 emit_unop_insn (code, to, from, equiv_code);
681 enum machine_mode intermediate;
685 /* Search for a mode to convert via. */
686 for (intermediate = from_mode; intermediate != VOIDmode;
687 intermediate = GET_MODE_WIDER_MODE (intermediate))
688 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
692 GET_MODE_BITSIZE (intermediate))))
693 && (can_extend_p (intermediate, from_mode, unsignedp)
694 != CODE_FOR_nothing))
696 convert_move (to, convert_to_mode (intermediate, from,
697 unsignedp), unsignedp);
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount = build_int_cst (NULL_TREE,
704 GET_MODE_BITSIZE (to_mode)
705 - GET_MODE_BITSIZE (from_mode));
706 from = gen_lowpart (to_mode, force_reg (from_mode, from));
707 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
712 emit_move_insn (to, tmp);
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
720 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
734 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
735 emit_move_insn (to, temp);
739 /* Mode combination is not recognized. */
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
751 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
753 return convert_modes (mode, VOIDmode, x, unsignedp);
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
767 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
774 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
776 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
777 x = gen_lowpart (mode, x);
779 if (GET_MODE (x) != VOIDmode)
780 oldmode = GET_MODE (x);
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
791 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
795 HOST_WIDE_INT val = INTVAL (x);
797 if (oldmode != VOIDmode
798 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
800 int width = GET_MODE_BITSIZE (oldmode);
802 /* We need to zero extend VAL. */
803 val &= ((HOST_WIDE_INT) 1 << width) - 1;
806 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
814 if ((GET_CODE (x) == CONST_INT
815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
816 || (GET_MODE_CLASS (mode) == MODE_INT
817 && GET_MODE_CLASS (oldmode) == MODE_INT
818 && (GET_CODE (x) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
820 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
821 && direct_load[(int) mode])
823 && (! HARD_REGISTER_P (x)
824 || HARD_REGNO_MODE_OK (REGNO (x), mode))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
826 GET_MODE_BITSIZE (GET_MODE (x)))))))))
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
832 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
834 HOST_WIDE_INT val = INTVAL (x);
835 int width = GET_MODE_BITSIZE (oldmode);
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
842 val |= (HOST_WIDE_INT) (-1) << width;
844 return gen_int_mode (val, mode);
847 return gen_lowpart (mode, x);
850 /* Converting from integer constant into mode is always equivalent to an
852 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
854 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
855 return simplify_gen_subreg (mode, x, oldmode, 0);
858 temp = gen_reg_rtx (mode);
859 convert_move (temp, x, unsignedp);
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
875 can_move_by_pieces (unsigned HOST_WIDE_INT len,
876 unsigned int align ATTRIBUTE_UNUSED)
878 return MOVE_BY_PIECES_P (len, align);
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
887 ALIGN is maximum stack alignment we can assume.
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
894 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
895 unsigned int align, int endp)
897 struct move_by_pieces data;
898 rtx to_addr, from_addr = XEXP (from, 0);
899 unsigned int max_size = MOVE_MAX_PIECES + 1;
900 enum machine_mode mode = VOIDmode, tmode;
901 enum insn_code icode;
903 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.from_addr = from_addr;
909 to_addr = XEXP (to, 0);
912 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
913 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
922 #ifdef STACK_GROWS_DOWNWARD
928 data.to_addr = to_addr;
931 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
932 || GET_CODE (from_addr) == POST_INC
933 || GET_CODE (from_addr) == POST_DEC);
935 data.explicit_inc_from = 0;
936 data.explicit_inc_to = 0;
937 if (data.reverse) data.offset = len;
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data.autinc_from && data.autinc_to)
944 && move_by_pieces_ninsns (len, align, max_size) > 2)
946 /* Find the mode of the largest move... */
947 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
948 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
949 if (GET_MODE_SIZE (tmode) < max_size)
952 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
954 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
955 data.autinc_from = 1;
956 data.explicit_inc_from = -1;
958 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
960 data.from_addr = copy_addr_to_reg (from_addr);
961 data.autinc_from = 1;
962 data.explicit_inc_from = 1;
964 if (!data.autinc_from && CONSTANT_P (from_addr))
965 data.from_addr = copy_addr_to_reg (from_addr);
966 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
968 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.explicit_inc_to = -1;
972 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
974 data.to_addr = copy_addr_to_reg (to_addr);
976 data.explicit_inc_to = 1;
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
982 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
983 if (align >= GET_MODE_ALIGNMENT (tmode))
984 align = GET_MODE_ALIGNMENT (tmode);
987 enum machine_mode xmode;
989 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
992 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode, align))
996 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1002 while (max_size > 1)
1004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1006 if (GET_MODE_SIZE (tmode) < max_size)
1009 if (mode == VOIDmode)
1012 icode = optab_handler (mov_optab, mode)->insn_code;
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1016 max_size = GET_MODE_SIZE (mode);
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data.len);
1026 gcc_assert (!data.reverse);
1031 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1032 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1037 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1044 to1 = adjust_address (data.to, QImode, data.offset);
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1057 unsigned int max_size)
1059 unsigned HOST_WIDE_INT n_insns = 0;
1060 enum machine_mode tmode;
1062 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1063 if (align >= GET_MODE_ALIGNMENT (tmode))
1064 align = GET_MODE_ALIGNMENT (tmode);
1067 enum machine_mode tmode, xmode;
1069 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1072 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode, align))
1076 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1079 while (max_size > 1)
1081 enum machine_mode mode = VOIDmode;
1082 enum insn_code icode;
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1089 if (mode == VOIDmode)
1092 icode = optab_handler (mov_optab, mode)->insn_code;
1093 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1094 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1096 max_size = GET_MODE_SIZE (mode);
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1108 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1109 struct move_by_pieces *data)
1111 unsigned int size = GET_MODE_SIZE (mode);
1112 rtx to1 = NULL_RTX, from1;
1114 while (data->len >= size)
1117 data->offset -= size;
1121 if (data->autinc_to)
1122 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1125 to1 = adjust_address (data->to, mode, data->offset);
1128 if (data->autinc_from)
1129 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1132 from1 = adjust_address (data->from, mode, data->offset);
1134 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1135 emit_insn (gen_add2_insn (data->to_addr,
1136 GEN_INT (-(HOST_WIDE_INT)size)));
1137 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1138 emit_insn (gen_add2_insn (data->from_addr,
1139 GEN_INT (-(HOST_WIDE_INT)size)));
1142 emit_insn ((*genfun) (to1, from1));
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode, from1, NULL);
1152 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1153 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1154 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1155 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1157 if (! data->reverse)
1158 data->offset += size;
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1173 Return the address of the new block, if memcpy is called and returns it,
1177 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1178 unsigned int expected_align, HOST_WIDE_INT expected_size)
1186 case BLOCK_OP_NORMAL:
1187 case BLOCK_OP_TAILCALL:
1188 may_use_call = true;
1191 case BLOCK_OP_CALL_PARM:
1192 may_use_call = block_move_libcall_safe_for_call_parm ();
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1199 case BLOCK_OP_NO_LIBCALL:
1200 may_use_call = false;
1207 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1209 gcc_assert (MEM_P (x));
1210 gcc_assert (MEM_P (y));
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x = adjust_address (x, BLKmode, 0);
1216 y = adjust_address (y, BLKmode, 0);
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size) == CONST_INT)
1222 if (INTVAL (size) == 0)
1225 x = shallow_copy_rtx (x);
1226 y = shallow_copy_rtx (y);
1227 set_mem_size (x, size);
1228 set_mem_size (y, size);
1231 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1232 move_by_pieces (x, y, INTVAL (size), align, 0);
1233 else if (emit_block_move_via_movmem (x, y, size, align,
1234 expected_align, expected_size))
1236 else if (may_use_call)
1237 retval = emit_block_move_via_libcall (x, y, size,
1238 method == BLOCK_OP_TAILCALL);
1240 emit_block_move_via_loop (x, y, size, align);
1242 if (method == BLOCK_OP_CALL_PARM)
1249 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1251 return emit_block_move_hints (x, y, size, method, 0, -1);
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1259 block_move_libcall_safe_for_call_parm (void)
1261 #if defined (REG_PARM_STACK_SPACE)
1265 /* If arguments are pushed on the stack, then they're safe. */
1269 /* If registers go on the stack anyway, any argument is sure to clobber
1270 an outgoing argument. */
1271 #if defined (REG_PARM_STACK_SPACE)
1272 fn = emit_block_move_libcall_fn (false);
1273 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1274 && REG_PARM_STACK_SPACE (fn) != 0)
1278 /* If any argument goes in memory, then it might clobber an outgoing
1281 CUMULATIVE_ARGS args_so_far;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1346 rtx last = get_last_insn ();
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align
1365 GEN_INT (expected_size));
1369 volatile_ok = save_volatile_ok;
1373 delete_insns_since (last);
1377 volatile_ok = save_volatile_ok;
1381 /* A subroutine of emit_block_move. Expand a call to memcpy.
1382 Return the return value from memcpy, 0 otherwise. */
1385 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1387 rtx dst_addr, src_addr;
1388 tree call_expr, fn, src_tree, dst_tree, size_tree;
1389 enum machine_mode size_mode;
1392 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1393 pseudos. We can then place those new pseudos into a VAR_DECL and
1396 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1397 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1399 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1400 src_addr = convert_memory_address (ptr_mode, src_addr);
1402 dst_tree = make_tree (ptr_type_node, dst_addr);
1403 src_tree = make_tree (ptr_type_node, src_addr);
1405 size_mode = TYPE_MODE (sizetype);
1407 size = convert_to_mode (size_mode, size, 1);
1408 size = copy_to_mode_reg (size_mode, size);
1410 /* It is incorrect to use the libcall calling conventions to call
1411 memcpy in this context. This could be a user call to memcpy and
1412 the user may wish to examine the return value from memcpy. For
1413 targets where libcalls and normal calls have different conventions
1414 for returning pointers, we could end up generating incorrect code. */
1416 size_tree = make_tree (sizetype, size);
1418 fn = emit_block_move_libcall_fn (true);
1419 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1420 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1422 retval = expand_normal (call_expr);
1427 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1428 for the function we use for block copies. The first time FOR_CALL
1429 is true, we call assemble_external. */
1431 static GTY(()) tree block_move_fn;
1434 init_block_move_fn (const char *asmspec)
1440 fn = get_identifier ("memcpy");
1441 args = build_function_type_list (ptr_type_node, ptr_type_node,
1442 const_ptr_type_node, sizetype,
1445 fn = build_decl (FUNCTION_DECL, fn, args);
1446 DECL_EXTERNAL (fn) = 1;
1447 TREE_PUBLIC (fn) = 1;
1448 DECL_ARTIFICIAL (fn) = 1;
1449 TREE_NOTHROW (fn) = 1;
1450 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1451 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1457 set_user_assembler_name (block_move_fn, asmspec);
1461 emit_block_move_libcall_fn (int for_call)
1463 static bool emitted_extern;
1466 init_block_move_fn (NULL);
1468 if (for_call && !emitted_extern)
1470 emitted_extern = true;
1471 make_decl_rtl (block_move_fn);
1472 assemble_external (block_move_fn);
1475 return block_move_fn;
1478 /* A subroutine of emit_block_move. Copy the data via an explicit
1479 loop. This is used only when libcalls are forbidden. */
1480 /* ??? It'd be nice to copy in hunks larger than QImode. */
1483 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1484 unsigned int align ATTRIBUTE_UNUSED)
1486 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1487 enum machine_mode iter_mode;
1489 iter_mode = GET_MODE (size);
1490 if (iter_mode == VOIDmode)
1491 iter_mode = word_mode;
1493 top_label = gen_label_rtx ();
1494 cmp_label = gen_label_rtx ();
1495 iter = gen_reg_rtx (iter_mode);
1497 emit_move_insn (iter, const0_rtx);
1499 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1500 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1501 do_pending_stack_adjust ();
1503 emit_jump (cmp_label);
1504 emit_label (top_label);
1506 tmp = convert_modes (Pmode, iter_mode, iter, true);
1507 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1508 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1509 x = change_address (x, QImode, x_addr);
1510 y = change_address (y, QImode, y_addr);
1512 emit_move_insn (x, y);
1514 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 true, OPTAB_LIB_WIDEN);
1517 emit_move_insn (iter, tmp);
1519 emit_label (cmp_label);
1521 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1532 #ifdef HAVE_load_multiple
1540 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541 x = validize_mem (force_const_mem (mode, x));
1543 /* See if the machine can do this with a load multiple insn. */
1544 #ifdef HAVE_load_multiple
1545 if (HAVE_load_multiple)
1547 last = get_last_insn ();
1548 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1556 delete_insns_since (last);
1560 for (i = 0; i < nregs; i++)
1561 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 operand_subword_force (x, i, mode));
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1569 move_block_from_reg (int regno, rtx x, int nregs)
1576 /* See if the machine can do this with a store multiple insn. */
1577 #ifdef HAVE_store_multiple
1578 if (HAVE_store_multiple)
1580 rtx last = get_last_insn ();
1581 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1589 delete_insns_since (last);
1593 for (i = 0; i < nregs; i++)
1595 rtx tem = operand_subword (x, i, 1, BLKmode);
1599 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604 ORIG, where ORIG is a non-consecutive group of registers represented by
1605 a PARALLEL. The clone is identical to the original except in that the
1606 original set of registers is replaced by a new set of pseudo registers.
1607 The new set has the same modes as the original set. */
1610 gen_group_rtx (rtx orig)
1615 gcc_assert (GET_CODE (orig) == PARALLEL);
1617 length = XVECLEN (orig, 0);
1618 tmps = XALLOCAVEC (rtx, length);
1620 /* Skip a NULL entry in first slot. */
1621 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1626 for (; i < length; i++)
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1631 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1634 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1637 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1638 except that values are placed in TMPS[i], and must later be moved
1639 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1646 enum machine_mode m = GET_MODE (orig_src);
1648 gcc_assert (GET_CODE (dst) == PARALLEL);
1651 && !SCALAR_INT_MODE_P (m)
1652 && !MEM_P (orig_src)
1653 && GET_CODE (orig_src) != CONCAT)
1655 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656 if (imode == BLKmode)
1657 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1659 src = gen_reg_rtx (imode);
1660 if (imode != BLKmode)
1661 src = gen_lowpart (GET_MODE (orig_src), src);
1662 emit_move_insn (src, orig_src);
1663 /* ...and back again. */
1664 if (imode != BLKmode)
1665 src = gen_lowpart (imode, src);
1666 emit_group_load_1 (tmps, dst, src, type, ssize);
1670 /* Check for a NULL entry, used to indicate that the parameter goes
1671 both on the stack and in registers. */
1672 if (XEXP (XVECEXP (dst, 0, 0), 0))
1677 /* Process the pieces. */
1678 for (i = start; i < XVECLEN (dst, 0); i++)
1680 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682 unsigned int bytelen = GET_MODE_SIZE (mode);
1685 /* Handle trailing fragments that run over the size of the struct. */
1686 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1688 /* Arrange to shift the fragment to where it belongs.
1689 extract_bit_field loads to the lsb of the reg. */
1691 #ifdef BLOCK_REG_PADDING
1692 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 == (BYTES_BIG_ENDIAN ? upward : downward)
1698 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 bytelen = ssize - bytepos;
1700 gcc_assert (bytelen > 0);
1703 /* If we won't be loading directly from memory, protect the real source
1704 from strange tricks we might play; but make sure that the source can
1705 be loaded directly into the destination. */
1707 if (!MEM_P (orig_src)
1708 && (!CONSTANT_P (orig_src)
1709 || (GET_MODE (orig_src) != mode
1710 && GET_MODE (orig_src) != VOIDmode)))
1712 if (GET_MODE (orig_src) == VOIDmode)
1713 src = gen_reg_rtx (mode);
1715 src = gen_reg_rtx (GET_MODE (orig_src));
1717 emit_move_insn (src, orig_src);
1720 /* Optimize the access just a bit. */
1722 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 && bytelen == GET_MODE_SIZE (mode))
1727 tmps[i] = gen_reg_rtx (mode);
1728 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1730 else if (COMPLEX_MODE_P (mode)
1731 && GET_MODE (src) == mode
1732 && bytelen == GET_MODE_SIZE (mode))
1733 /* Let emit_move_complex do the bulk of the work. */
1735 else if (GET_CODE (src) == CONCAT)
1737 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1740 if ((bytepos == 0 && bytelen == slen0)
1741 || (bytepos != 0 && bytepos + bytelen <= slen))
1743 /* The following assumes that the concatenated objects all
1744 have the same size. In this case, a simple calculation
1745 can be used to determine the object and the bit field
1747 tmps[i] = XEXP (src, bytepos / slen0);
1748 if (! CONSTANT_P (tmps[i])
1749 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 (bytepos % slen0) * BITS_PER_UNIT,
1752 1, NULL_RTX, mode, mode);
1758 gcc_assert (!bytepos);
1759 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760 emit_move_insn (mem, src);
1761 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 0, 1, NULL_RTX, mode, mode);
1765 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 SIMD register, which is currently broken. While we get GCC
1767 to emit proper RTL for these cases, let's dump to memory. */
1768 else if (VECTOR_MODE_P (GET_MODE (dst))
1771 int slen = GET_MODE_SIZE (GET_MODE (src));
1774 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775 emit_move_insn (mem, src);
1776 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1778 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779 && XVECLEN (dst, 0) > 1)
1780 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781 else if (CONSTANT_P (src))
1783 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1791 gcc_assert (2 * len == ssize);
1792 split_double (src, &first, &second);
1799 else if (REG_P (src) && GET_MODE (src) == mode)
1802 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1807 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1812 /* Emit code to move a block SRC of type TYPE to a block DST,
1813 where DST is non-consecutive registers represented by a PARALLEL.
1814 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1818 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1823 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1824 emit_group_load_1 (tmps, dst, src, type, ssize);
1826 /* Copy the extracted pieces into the proper (probable) hard regs. */
1827 for (i = 0; i < XVECLEN (dst, 0); i++)
1829 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1832 emit_move_insn (d, tmps[i]);
1836 /* Similar, but load SRC into new pseudos in a format that looks like
1837 PARALLEL. This can later be fed to emit_group_move to get things
1838 in the right place. */
1841 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1846 vec = rtvec_alloc (XVECLEN (parallel, 0));
1847 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1849 /* Convert the vector to look just like the original PARALLEL, except
1850 with the computed values. */
1851 for (i = 0; i < XVECLEN (parallel, 0); i++)
1853 rtx e = XVECEXP (parallel, 0, i);
1854 rtx d = XEXP (e, 0);
1858 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1867 /* Emit code to move a block SRC to block DST, where SRC and DST are
1868 non-consecutive groups of registers, each represented by a PARALLEL. */
1871 emit_group_move (rtx dst, rtx src)
1875 gcc_assert (GET_CODE (src) == PARALLEL
1876 && GET_CODE (dst) == PARALLEL
1877 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1879 /* Skip first entry if NULL. */
1880 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882 XEXP (XVECEXP (src, 0, i), 0));
1885 /* Move a group of registers represented by a PARALLEL into pseudos. */
1888 emit_group_move_into_temps (rtx src)
1890 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1893 for (i = 0; i < XVECLEN (src, 0); i++)
1895 rtx e = XVECEXP (src, 0, i);
1896 rtx d = XEXP (e, 0);
1899 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900 RTVEC_ELT (vec, i) = e;
1903 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1906 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907 where SRC is non-consecutive registers represented by a PARALLEL.
1908 SSIZE represents the total size of block ORIG_DST, or -1 if not
1912 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1915 int start, finish, i;
1916 enum machine_mode m = GET_MODE (orig_dst);
1918 gcc_assert (GET_CODE (src) == PARALLEL);
1920 if (!SCALAR_INT_MODE_P (m)
1921 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1923 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924 if (imode == BLKmode)
1925 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1927 dst = gen_reg_rtx (imode);
1928 emit_group_store (dst, src, type, ssize);
1929 if (imode != BLKmode)
1930 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931 emit_move_insn (orig_dst, dst);
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (src, 0, 0), 0))
1941 finish = XVECLEN (src, 0);
1943 tmps = XALLOCAVEC (rtx, finish);
1945 /* Copy the (probable) hard regs into pseudos. */
1946 for (i = start; i < finish; i++)
1948 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1951 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952 emit_move_insn (tmps[i], reg);
1958 /* If we won't be storing directly into memory, protect the real destination
1959 from strange tricks we might play. */
1961 if (GET_CODE (dst) == PARALLEL)
1965 /* We can get a PARALLEL dst if there is a conditional expression in
1966 a return statement. In that case, the dst and src are the same,
1967 so no action is necessary. */
1968 if (rtx_equal_p (dst, src))
1971 /* It is unclear if we can ever reach here, but we may as well handle
1972 it. Allocate a temporary, and split this into a store/load to/from
1975 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976 emit_group_store (temp, src, type, ssize);
1977 emit_group_load (dst, temp, type, ssize);
1980 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1982 enum machine_mode outer = GET_MODE (dst);
1983 enum machine_mode inner;
1984 HOST_WIDE_INT bytepos;
1988 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 dst = gen_reg_rtx (outer);
1991 /* Make life a bit easier for combine. */
1992 /* If the first element of the vector is the low part
1993 of the destination mode, use a paradoxical subreg to
1994 initialize the destination. */
1997 inner = GET_MODE (tmps[start]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[start],
2005 emit_move_insn (dst, temp);
2012 /* If the first element wasn't the low part, try the last. */
2014 && start < finish - 1)
2016 inner = GET_MODE (tmps[finish - 1]);
2017 bytepos = subreg_lowpart_offset (inner, outer);
2018 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2020 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2024 emit_move_insn (dst, temp);
2031 /* Otherwise, simply initialize the result to zero. */
2033 emit_move_insn (dst, CONST0_RTX (outer));
2036 /* Process the pieces. */
2037 for (i = start; i < finish; i++)
2039 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040 enum machine_mode mode = GET_MODE (tmps[i]);
2041 unsigned int bytelen = GET_MODE_SIZE (mode);
2044 /* Handle trailing fragments that run over the size of the struct. */
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 build_int_cst (NULL_TREE, shift),
2063 bytelen = ssize - bytepos;
2066 if (GET_CODE (dst) == CONCAT)
2068 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2069 dest = XEXP (dst, 0);
2070 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2072 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2073 dest = XEXP (dst, 1);
2077 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2078 dest = assign_stack_temp (GET_MODE (dest),
2079 GET_MODE_SIZE (GET_MODE (dest)), 0);
2080 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2087 /* Optimize the access just a bit. */
2089 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2090 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2091 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2092 && bytelen == GET_MODE_SIZE (mode))
2093 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2095 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2099 /* Copy from the pseudo into the (probable) hard reg. */
2100 if (orig_dst != dst)
2101 emit_move_insn (orig_dst, dst);
2104 /* Generate code to copy a BLKmode object of TYPE out of a
2105 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2106 is null, a stack temporary is created. TGTBLK is returned.
2108 The purpose of this routine is to handle functions that return
2109 BLKmode structures in registers. Some machines (the PA for example)
2110 want to return all small structures in registers regardless of the
2111 structure's alignment. */
2114 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2116 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2117 rtx src = NULL, dst = NULL;
2118 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2119 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2120 enum machine_mode copy_mode;
2124 tgtblk = assign_temp (build_qualified_type (type,
2126 | TYPE_QUAL_CONST)),
2128 preserve_temp_slots (tgtblk);
2131 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2132 into a new pseudo which is a full word. */
2134 if (GET_MODE (srcreg) != BLKmode
2135 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2136 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2138 /* If the structure doesn't take up a whole number of words, see whether
2139 SRCREG is padded on the left or on the right. If it's on the left,
2140 set PADDING_CORRECTION to the number of bits to skip.
2142 In most ABIs, the structure will be returned at the least end of
2143 the register, which translates to right padding on little-endian
2144 targets and left padding on big-endian targets. The opposite
2145 holds if the structure is returned at the most significant
2146 end of the register. */
2147 if (bytes % UNITS_PER_WORD != 0
2148 && (targetm.calls.return_in_msb (type)
2150 : BYTES_BIG_ENDIAN))
2152 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2154 /* Copy the structure BITSIZE bits at a time. If the target lives in
2155 memory, take care of not reading/writing past its end by selecting
2156 a copy mode suited to BITSIZE. This should always be possible given
2159 We could probably emit more efficient code for machines which do not use
2160 strict alignment, but it doesn't seem worth the effort at the current
2163 copy_mode = word_mode;
2166 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2167 if (mem_mode != BLKmode)
2168 copy_mode = mem_mode;
2171 for (bitpos = 0, xbitpos = padding_correction;
2172 bitpos < bytes * BITS_PER_UNIT;
2173 bitpos += bitsize, xbitpos += bitsize)
2175 /* We need a new source operand each time xbitpos is on a
2176 word boundary and when xbitpos == padding_correction
2177 (the first time through). */
2178 if (xbitpos % BITS_PER_WORD == 0
2179 || xbitpos == padding_correction)
2180 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2183 /* We need a new destination operand each time bitpos is on
2185 if (bitpos % BITS_PER_WORD == 0)
2186 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2188 /* Use xbitpos for the source extraction (right justified) and
2189 bitpos for the destination store (left justified). */
2190 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2191 extract_bit_field (src, bitsize,
2192 xbitpos % BITS_PER_WORD, 1,
2193 NULL_RTX, copy_mode, copy_mode));
2199 /* Add a USE expression for REG to the (possibly empty) list pointed
2200 to by CALL_FUSAGE. REG must denote a hard register. */
2203 use_reg (rtx *call_fusage, rtx reg)
2205 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2208 = gen_rtx_EXPR_LIST (VOIDmode,
2209 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2212 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2213 starting at REGNO. All of these registers must be hard registers. */
2216 use_regs (rtx *call_fusage, int regno, int nregs)
2220 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2222 for (i = 0; i < nregs; i++)
2223 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2226 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2227 PARALLEL REGS. This is for calls that pass values in multiple
2228 non-contiguous locations. The Irix 6 ABI has examples of this. */
2231 use_group_regs (rtx *call_fusage, rtx regs)
2235 for (i = 0; i < XVECLEN (regs, 0); i++)
2237 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2239 /* A NULL entry means the parameter goes both on the stack and in
2240 registers. This can also be a MEM for targets that pass values
2241 partially on the stack and partially in registers. */
2242 if (reg != 0 && REG_P (reg))
2243 use_reg (call_fusage, reg);
2248 /* Determine whether the LEN bytes generated by CONSTFUN can be
2249 stored to memory using several move instructions. CONSTFUNDATA is
2250 a pointer which will be passed as argument in every CONSTFUN call.
2251 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2252 a memset operation and false if it's a copy of a constant string.
2253 Return nonzero if a call to store_by_pieces should succeed. */
2256 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2257 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2258 void *constfundata, unsigned int align, bool memsetp)
2260 unsigned HOST_WIDE_INT l;
2261 unsigned int max_size;
2262 HOST_WIDE_INT offset = 0;
2263 enum machine_mode mode, tmode;
2264 enum insn_code icode;
2272 ? SET_BY_PIECES_P (len, align)
2273 : STORE_BY_PIECES_P (len, align)))
2276 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2277 if (align >= GET_MODE_ALIGNMENT (tmode))
2278 align = GET_MODE_ALIGNMENT (tmode);
2281 enum machine_mode xmode;
2283 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2285 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2286 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2287 || SLOW_UNALIGNED_ACCESS (tmode, align))
2290 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2293 /* We would first store what we can in the largest integer mode, then go to
2294 successively smaller modes. */
2297 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2302 max_size = STORE_MAX_PIECES + 1;
2303 while (max_size > 1)
2305 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2306 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2307 if (GET_MODE_SIZE (tmode) < max_size)
2310 if (mode == VOIDmode)
2313 icode = optab_handler (mov_optab, mode)->insn_code;
2314 if (icode != CODE_FOR_nothing
2315 && align >= GET_MODE_ALIGNMENT (mode))
2317 unsigned int size = GET_MODE_SIZE (mode);
2324 cst = (*constfun) (constfundata, offset, mode);
2325 if (!LEGITIMATE_CONSTANT_P (cst))
2335 max_size = GET_MODE_SIZE (mode);
2338 /* The code above should have handled everything. */
2345 /* Generate several move instructions to store LEN bytes generated by
2346 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2347 pointer which will be passed as argument in every CONSTFUN call.
2348 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2349 a memset operation and false if it's a copy of a constant string.
2350 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2351 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2355 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2356 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2357 void *constfundata, unsigned int align, bool memsetp, int endp)
2359 struct store_by_pieces data;
2363 gcc_assert (endp != 2);
2368 ? SET_BY_PIECES_P (len, align)
2369 : STORE_BY_PIECES_P (len, align));
2370 data.constfun = constfun;
2371 data.constfundata = constfundata;
2374 store_by_pieces_1 (&data, align);
2379 gcc_assert (!data.reverse);
2384 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2385 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2387 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2390 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2397 to1 = adjust_address (data.to, QImode, data.offset);
2405 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2406 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2409 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2411 struct store_by_pieces data;
2416 data.constfun = clear_by_pieces_1;
2417 data.constfundata = NULL;
2420 store_by_pieces_1 (&data, align);
2423 /* Callback routine for clear_by_pieces.
2424 Return const0_rtx unconditionally. */
2427 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2428 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2429 enum machine_mode mode ATTRIBUTE_UNUSED)
2434 /* Subroutine of clear_by_pieces and store_by_pieces.
2435 Generate several move instructions to store LEN bytes of block TO. (A MEM
2436 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2439 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2440 unsigned int align ATTRIBUTE_UNUSED)
2442 rtx to_addr = XEXP (data->to, 0);
2443 unsigned int max_size = STORE_MAX_PIECES + 1;
2444 enum machine_mode mode = VOIDmode, tmode;
2445 enum insn_code icode;
2448 data->to_addr = to_addr;
2450 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2451 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2453 data->explicit_inc_to = 0;
2455 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2457 data->offset = data->len;
2459 /* If storing requires more than two move insns,
2460 copy addresses to registers (to make displacements shorter)
2461 and use post-increment if available. */
2462 if (!data->autinc_to
2463 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2465 /* Determine the main mode we'll be using. */
2466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2468 if (GET_MODE_SIZE (tmode) < max_size)
2471 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2473 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2474 data->autinc_to = 1;
2475 data->explicit_inc_to = -1;
2478 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2479 && ! data->autinc_to)
2481 data->to_addr = copy_addr_to_reg (to_addr);
2482 data->autinc_to = 1;
2483 data->explicit_inc_to = 1;
2486 if ( !data->autinc_to && CONSTANT_P (to_addr))
2487 data->to_addr = copy_addr_to_reg (to_addr);
2490 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2491 if (align >= GET_MODE_ALIGNMENT (tmode))
2492 align = GET_MODE_ALIGNMENT (tmode);
2495 enum machine_mode xmode;
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2499 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2500 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2501 || SLOW_UNALIGNED_ACCESS (tmode, align))
2504 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2507 /* First store what we can in the largest integer mode, then go to
2508 successively smaller modes. */
2510 while (max_size > 1)
2512 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2513 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2514 if (GET_MODE_SIZE (tmode) < max_size)
2517 if (mode == VOIDmode)
2520 icode = optab_handler (mov_optab, mode)->insn_code;
2521 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2522 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2524 max_size = GET_MODE_SIZE (mode);
2527 /* The code above should have handled everything. */
2528 gcc_assert (!data->len);
2531 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2532 with move instructions for mode MODE. GENFUN is the gen_... function
2533 to make a move insn for that mode. DATA has all the other info. */
2536 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2537 struct store_by_pieces *data)
2539 unsigned int size = GET_MODE_SIZE (mode);
2542 while (data->len >= size)
2545 data->offset -= size;
2547 if (data->autinc_to)
2548 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2551 to1 = adjust_address (data->to, mode, data->offset);
2553 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2554 emit_insn (gen_add2_insn (data->to_addr,
2555 GEN_INT (-(HOST_WIDE_INT) size)));
2557 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2558 emit_insn ((*genfun) (to1, cst));
2560 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2563 if (! data->reverse)
2564 data->offset += size;
2570 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2571 its length in bytes. */
2574 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2575 unsigned int expected_align, HOST_WIDE_INT expected_size)
2577 enum machine_mode mode = GET_MODE (object);
2580 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2582 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2583 just move a zero. Otherwise, do this a piece at a time. */
2585 && GET_CODE (size) == CONST_INT
2586 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2588 rtx zero = CONST0_RTX (mode);
2591 emit_move_insn (object, zero);
2595 if (COMPLEX_MODE_P (mode))
2597 zero = CONST0_RTX (GET_MODE_INNER (mode));
2600 write_complex_part (object, zero, 0);
2601 write_complex_part (object, zero, 1);
2607 if (size == const0_rtx)
2610 align = MEM_ALIGN (object);
2612 if (GET_CODE (size) == CONST_INT
2613 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2614 clear_by_pieces (object, INTVAL (size), align);
2615 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2616 expected_align, expected_size))
2619 return set_storage_via_libcall (object, size, const0_rtx,
2620 method == BLOCK_OP_TAILCALL);
2626 clear_storage (rtx object, rtx size, enum block_op_methods method)
2628 return clear_storage_hints (object, size, method, 0, -1);
2632 /* A subroutine of clear_storage. Expand a call to memset.
2633 Return the return value of memset, 0 otherwise. */
2636 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2638 tree call_expr, fn, object_tree, size_tree, val_tree;
2639 enum machine_mode size_mode;
2642 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2643 place those into new pseudos into a VAR_DECL and use them later. */
2645 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2647 size_mode = TYPE_MODE (sizetype);
2648 size = convert_to_mode (size_mode, size, 1);
2649 size = copy_to_mode_reg (size_mode, size);
2651 /* It is incorrect to use the libcall calling conventions to call
2652 memset in this context. This could be a user call to memset and
2653 the user may wish to examine the return value from memset. For
2654 targets where libcalls and normal calls have different conventions
2655 for returning pointers, we could end up generating incorrect code. */
2657 object_tree = make_tree (ptr_type_node, object);
2658 if (GET_CODE (val) != CONST_INT)
2659 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2660 size_tree = make_tree (sizetype, size);
2661 val_tree = make_tree (integer_type_node, val);
2663 fn = clear_storage_libcall_fn (true);
2664 call_expr = build_call_expr (fn, 3,
2665 object_tree, integer_zero_node, size_tree);
2666 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2668 retval = expand_normal (call_expr);
2673 /* A subroutine of set_storage_via_libcall. Create the tree node
2674 for the function we use for block clears. The first time FOR_CALL
2675 is true, we call assemble_external. */
2677 static GTY(()) tree block_clear_fn;
2680 init_block_clear_fn (const char *asmspec)
2682 if (!block_clear_fn)
2686 fn = get_identifier ("memset");
2687 args = build_function_type_list (ptr_type_node, ptr_type_node,
2688 integer_type_node, sizetype,
2691 fn = build_decl (FUNCTION_DECL, fn, args);
2692 DECL_EXTERNAL (fn) = 1;
2693 TREE_PUBLIC (fn) = 1;
2694 DECL_ARTIFICIAL (fn) = 1;
2695 TREE_NOTHROW (fn) = 1;
2696 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2697 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2699 block_clear_fn = fn;
2703 set_user_assembler_name (block_clear_fn, asmspec);
2707 clear_storage_libcall_fn (int for_call)
2709 static bool emitted_extern;
2711 if (!block_clear_fn)
2712 init_block_clear_fn (NULL);
2714 if (for_call && !emitted_extern)
2716 emitted_extern = true;
2717 make_decl_rtl (block_clear_fn);
2718 assemble_external (block_clear_fn);
2721 return block_clear_fn;
2724 /* Expand a setmem pattern; return true if successful. */
2727 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2728 unsigned int expected_align, HOST_WIDE_INT expected_size)
2730 /* Try the most limited insn first, because there's no point
2731 including more than one in the machine description unless
2732 the more limited one has some advantage. */
2734 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2735 enum machine_mode mode;
2737 if (expected_align < align)
2738 expected_align = align;
2740 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2741 mode = GET_MODE_WIDER_MODE (mode))
2743 enum insn_code code = setmem_optab[(int) mode];
2744 insn_operand_predicate_fn pred;
2746 if (code != CODE_FOR_nothing
2747 /* We don't need MODE to be narrower than
2748 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2749 the mode mask, as it is returned by the macro, it will
2750 definitely be less than the actual mode mask. */
2751 && ((GET_CODE (size) == CONST_INT
2752 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2753 <= (GET_MODE_MASK (mode) >> 1)))
2754 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2755 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2756 || (*pred) (object, BLKmode))
2757 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2758 || (*pred) (opalign, VOIDmode)))
2761 enum machine_mode char_mode;
2762 rtx last = get_last_insn ();
2765 opsize = convert_to_mode (mode, size, 1);
2766 pred = insn_data[(int) code].operand[1].predicate;
2767 if (pred != 0 && ! (*pred) (opsize, mode))
2768 opsize = copy_to_mode_reg (mode, opsize);
2771 char_mode = insn_data[(int) code].operand[2].mode;
2772 if (char_mode != VOIDmode)
2774 opchar = convert_to_mode (char_mode, opchar, 1);
2775 pred = insn_data[(int) code].operand[2].predicate;
2776 if (pred != 0 && ! (*pred) (opchar, char_mode))
2777 opchar = copy_to_mode_reg (char_mode, opchar);
2780 if (insn_data[(int) code].n_operands == 4)
2781 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2783 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2784 GEN_INT (expected_align
2786 GEN_INT (expected_size));
2793 delete_insns_since (last);
2801 /* Write to one of the components of the complex value CPLX. Write VAL to
2802 the real part if IMAG_P is false, and the imaginary part if its true. */
2805 write_complex_part (rtx cplx, rtx val, bool imag_p)
2807 enum machine_mode cmode;
2808 enum machine_mode imode;
2811 if (GET_CODE (cplx) == CONCAT)
2813 emit_move_insn (XEXP (cplx, imag_p), val);
2817 cmode = GET_MODE (cplx);
2818 imode = GET_MODE_INNER (cmode);
2819 ibitsize = GET_MODE_BITSIZE (imode);
2821 /* For MEMs simplify_gen_subreg may generate an invalid new address
2822 because, e.g., the original address is considered mode-dependent
2823 by the target, which restricts simplify_subreg from invoking
2824 adjust_address_nv. Instead of preparing fallback support for an
2825 invalid address, we call adjust_address_nv directly. */
2828 emit_move_insn (adjust_address_nv (cplx, imode,
2829 imag_p ? GET_MODE_SIZE (imode) : 0),
2834 /* If the sub-object is at least word sized, then we know that subregging
2835 will work. This special case is important, since store_bit_field
2836 wants to operate on integer modes, and there's rarely an OImode to
2837 correspond to TCmode. */
2838 if (ibitsize >= BITS_PER_WORD
2839 /* For hard regs we have exact predicates. Assume we can split
2840 the original object if it spans an even number of hard regs.
2841 This special case is important for SCmode on 64-bit platforms
2842 where the natural size of floating-point regs is 32-bit. */
2844 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2845 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2847 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2848 imag_p ? GET_MODE_SIZE (imode) : 0);
2851 emit_move_insn (part, val);
2855 /* simplify_gen_subreg may fail for sub-word MEMs. */
2856 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2859 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2862 /* Extract one of the components of the complex value CPLX. Extract the
2863 real part if IMAG_P is false, and the imaginary part if it's true. */
2866 read_complex_part (rtx cplx, bool imag_p)
2868 enum machine_mode cmode, imode;
2871 if (GET_CODE (cplx) == CONCAT)
2872 return XEXP (cplx, imag_p);
2874 cmode = GET_MODE (cplx);
2875 imode = GET_MODE_INNER (cmode);
2876 ibitsize = GET_MODE_BITSIZE (imode);
2878 /* Special case reads from complex constants that got spilled to memory. */
2879 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2881 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2882 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2884 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2885 if (CONSTANT_CLASS_P (part))
2886 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2890 /* For MEMs simplify_gen_subreg may generate an invalid new address
2891 because, e.g., the original address is considered mode-dependent
2892 by the target, which restricts simplify_subreg from invoking
2893 adjust_address_nv. Instead of preparing fallback support for an
2894 invalid address, we call adjust_address_nv directly. */
2896 return adjust_address_nv (cplx, imode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2899 /* If the sub-object is at least word sized, then we know that subregging
2900 will work. This special case is important, since extract_bit_field
2901 wants to operate on integer modes, and there's rarely an OImode to
2902 correspond to TCmode. */
2903 if (ibitsize >= BITS_PER_WORD
2904 /* For hard regs we have exact predicates. Assume we can split
2905 the original object if it spans an even number of hard regs.
2906 This special case is important for SCmode on 64-bit platforms
2907 where the natural size of floating-point regs is 32-bit. */
2909 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2910 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2912 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2913 imag_p ? GET_MODE_SIZE (imode) : 0);
2917 /* simplify_gen_subreg may fail for sub-word MEMs. */
2918 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2921 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2922 true, NULL_RTX, imode, imode);
2925 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2926 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2927 represented in NEW_MODE. If FORCE is true, this will never happen, as
2928 we'll force-create a SUBREG if needed. */
2931 emit_move_change_mode (enum machine_mode new_mode,
2932 enum machine_mode old_mode, rtx x, bool force)
2936 if (push_operand (x, GET_MODE (x)))
2938 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2939 MEM_COPY_ATTRIBUTES (ret, x);
2943 /* We don't have to worry about changing the address since the
2944 size in bytes is supposed to be the same. */
2945 if (reload_in_progress)
2947 /* Copy the MEM to change the mode and move any
2948 substitutions from the old MEM to the new one. */
2949 ret = adjust_address_nv (x, new_mode, 0);
2950 copy_replacements (x, ret);
2953 ret = adjust_address (x, new_mode, 0);
2957 /* Note that we do want simplify_subreg's behavior of validating
2958 that the new mode is ok for a hard register. If we were to use
2959 simplify_gen_subreg, we would create the subreg, but would
2960 probably run into the target not being able to implement it. */
2961 /* Except, of course, when FORCE is true, when this is exactly what
2962 we want. Which is needed for CCmodes on some targets. */
2964 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2966 ret = simplify_subreg (new_mode, x, old_mode, 0);
2972 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2973 an integer mode of the same size as MODE. Returns the instruction
2974 emitted, or NULL if such a move could not be generated. */
2977 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2979 enum machine_mode imode;
2980 enum insn_code code;
2982 /* There must exist a mode of the exact size we require. */
2983 imode = int_mode_for_mode (mode);
2984 if (imode == BLKmode)
2987 /* The target must support moves in this mode. */
2988 code = optab_handler (mov_optab, imode)->insn_code;
2989 if (code == CODE_FOR_nothing)
2992 x = emit_move_change_mode (imode, mode, x, force);
2995 y = emit_move_change_mode (imode, mode, y, force);
2998 return emit_insn (GEN_FCN (code) (x, y));
3001 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3002 Return an equivalent MEM that does not use an auto-increment. */
3005 emit_move_resolve_push (enum machine_mode mode, rtx x)
3007 enum rtx_code code = GET_CODE (XEXP (x, 0));
3008 HOST_WIDE_INT adjust;
3011 adjust = GET_MODE_SIZE (mode);
3012 #ifdef PUSH_ROUNDING
3013 adjust = PUSH_ROUNDING (adjust);
3015 if (code == PRE_DEC || code == POST_DEC)
3017 else if (code == PRE_MODIFY || code == POST_MODIFY)
3019 rtx expr = XEXP (XEXP (x, 0), 1);
3022 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3023 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3024 val = INTVAL (XEXP (expr, 1));
3025 if (GET_CODE (expr) == MINUS)
3027 gcc_assert (adjust == val || adjust == -val);
3031 /* Do not use anti_adjust_stack, since we don't want to update
3032 stack_pointer_delta. */
3033 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3034 GEN_INT (adjust), stack_pointer_rtx,
3035 0, OPTAB_LIB_WIDEN);
3036 if (temp != stack_pointer_rtx)
3037 emit_move_insn (stack_pointer_rtx, temp);
3044 temp = stack_pointer_rtx;
3049 temp = plus_constant (stack_pointer_rtx, -adjust);
3055 return replace_equiv_address (x, temp);
3058 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3059 X is known to satisfy push_operand, and MODE is known to be complex.
3060 Returns the last instruction emitted. */
3063 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3065 enum machine_mode submode = GET_MODE_INNER (mode);
3068 #ifdef PUSH_ROUNDING
3069 unsigned int submodesize = GET_MODE_SIZE (submode);
3071 /* In case we output to the stack, but the size is smaller than the
3072 machine can push exactly, we need to use move instructions. */
3073 if (PUSH_ROUNDING (submodesize) != submodesize)
3075 x = emit_move_resolve_push (mode, x);
3076 return emit_move_insn (x, y);
3080 /* Note that the real part always precedes the imag part in memory
3081 regardless of machine's endianness. */
3082 switch (GET_CODE (XEXP (x, 0)))
3096 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3097 read_complex_part (y, imag_first));
3098 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3099 read_complex_part (y, !imag_first));
3102 /* A subroutine of emit_move_complex. Perform the move from Y to X
3103 via two moves of the parts. Returns the last instruction emitted. */
3106 emit_move_complex_parts (rtx x, rtx y)
3108 /* Show the output dies here. This is necessary for SUBREGs
3109 of pseudos since we cannot track their lifetimes correctly;
3110 hard regs shouldn't appear here except as return values. */
3111 if (!reload_completed && !reload_in_progress
3112 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3115 write_complex_part (x, read_complex_part (y, false), false);
3116 write_complex_part (x, read_complex_part (y, true), true);
3118 return get_last_insn ();
3121 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3122 MODE is known to be complex. Returns the last instruction emitted. */
3125 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3129 /* Need to take special care for pushes, to maintain proper ordering
3130 of the data, and possibly extra padding. */
3131 if (push_operand (x, mode))
3132 return emit_move_complex_push (mode, x, y);
3134 /* See if we can coerce the target into moving both values at once. */
3136 /* Move floating point as parts. */
3137 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3138 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3140 /* Not possible if the values are inherently not adjacent. */
3141 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3143 /* Is possible if both are registers (or subregs of registers). */
3144 else if (register_operand (x, mode) && register_operand (y, mode))
3146 /* If one of the operands is a memory, and alignment constraints
3147 are friendly enough, we may be able to do combined memory operations.
3148 We do not attempt this if Y is a constant because that combination is
3149 usually better with the by-parts thing below. */
3150 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3151 && (!STRICT_ALIGNMENT
3152 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3161 /* For memory to memory moves, optimal behavior can be had with the
3162 existing block move logic. */
3163 if (MEM_P (x) && MEM_P (y))
3165 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3166 BLOCK_OP_NO_LIBCALL);
3167 return get_last_insn ();
3170 ret = emit_move_via_integer (mode, x, y, true);
3175 return emit_move_complex_parts (x, y);
3178 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3179 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3182 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3186 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3189 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3190 if (code != CODE_FOR_nothing)
3192 x = emit_move_change_mode (CCmode, mode, x, true);
3193 y = emit_move_change_mode (CCmode, mode, y, true);
3194 return emit_insn (GEN_FCN (code) (x, y));
3198 /* Otherwise, find the MODE_INT mode of the same width. */
3199 ret = emit_move_via_integer (mode, x, y, false);
3200 gcc_assert (ret != NULL);
3204 /* Return true if word I of OP lies entirely in the
3205 undefined bits of a paradoxical subreg. */
3208 undefined_operand_subword_p (const_rtx op, int i)
3210 enum machine_mode innermode, innermostmode;
3212 if (GET_CODE (op) != SUBREG)
3214 innermode = GET_MODE (op);
3215 innermostmode = GET_MODE (SUBREG_REG (op));
3216 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3217 /* The SUBREG_BYTE represents offset, as if the value were stored in
3218 memory, except for a paradoxical subreg where we define
3219 SUBREG_BYTE to be 0; undo this exception as in
3221 if (SUBREG_BYTE (op) == 0
3222 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3224 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3225 if (WORDS_BIG_ENDIAN)
3226 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3227 if (BYTES_BIG_ENDIAN)
3228 offset += difference % UNITS_PER_WORD;
3230 if (offset >= GET_MODE_SIZE (innermostmode)
3231 || offset <= -GET_MODE_SIZE (word_mode))
3236 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3237 MODE is any multi-word or full-word mode that lacks a move_insn
3238 pattern. Note that you will get better code if you define such
3239 patterns, even if they must turn into multiple assembler instructions. */
3242 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3249 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3251 /* If X is a push on the stack, do the push now and replace
3252 X with a reference to the stack pointer. */
3253 if (push_operand (x, mode))
3254 x = emit_move_resolve_push (mode, x);
3256 /* If we are in reload, see if either operand is a MEM whose address
3257 is scheduled for replacement. */
3258 if (reload_in_progress && MEM_P (x)
3259 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3260 x = replace_equiv_address_nv (x, inner);
3261 if (reload_in_progress && MEM_P (y)
3262 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3263 y = replace_equiv_address_nv (y, inner);
3267 need_clobber = false;
3269 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3272 rtx xpart = operand_subword (x, i, 1, mode);
3275 /* Do not generate code for a move if it would come entirely
3276 from the undefined bits of a paradoxical subreg. */
3277 if (undefined_operand_subword_p (y, i))
3280 ypart = operand_subword (y, i, 1, mode);
3282 /* If we can't get a part of Y, put Y into memory if it is a
3283 constant. Otherwise, force it into a register. Then we must
3284 be able to get a part of Y. */
3285 if (ypart == 0 && CONSTANT_P (y))
3287 y = use_anchored_address (force_const_mem (mode, y));
3288 ypart = operand_subword (y, i, 1, mode);
3290 else if (ypart == 0)
3291 ypart = operand_subword_force (y, i, mode);
3293 gcc_assert (xpart && ypart);
3295 need_clobber |= (GET_CODE (xpart) == SUBREG);
3297 last_insn = emit_move_insn (xpart, ypart);
3303 /* Show the output dies here. This is necessary for SUBREGs
3304 of pseudos since we cannot track their lifetimes correctly;
3305 hard regs shouldn't appear here except as return values.
3306 We never want to emit such a clobber after reload. */
3308 && ! (reload_in_progress || reload_completed)
3309 && need_clobber != 0)
3317 /* Low level part of emit_move_insn.
3318 Called just like emit_move_insn, but assumes X and Y
3319 are basically valid. */
3322 emit_move_insn_1 (rtx x, rtx y)
3324 enum machine_mode mode = GET_MODE (x);
3325 enum insn_code code;
3327 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3329 code = optab_handler (mov_optab, mode)->insn_code;
3330 if (code != CODE_FOR_nothing)
3331 return emit_insn (GEN_FCN (code) (x, y));
3333 /* Expand complex moves by moving real part and imag part. */
3334 if (COMPLEX_MODE_P (mode))
3335 return emit_move_complex (mode, x, y);
3337 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3338 || ALL_FIXED_POINT_MODE_P (mode))
3340 rtx result = emit_move_via_integer (mode, x, y, true);
3342 /* If we can't find an integer mode, use multi words. */
3346 return emit_move_multi_word (mode, x, y);
3349 if (GET_MODE_CLASS (mode) == MODE_CC)
3350 return emit_move_ccmode (mode, x, y);
3352 /* Try using a move pattern for the corresponding integer mode. This is
3353 only safe when simplify_subreg can convert MODE constants into integer
3354 constants. At present, it can only do this reliably if the value
3355 fits within a HOST_WIDE_INT. */
3356 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3358 rtx ret = emit_move_via_integer (mode, x, y, false);
3363 return emit_move_multi_word (mode, x, y);
3366 /* Generate code to copy Y into X.
3367 Both Y and X must have the same mode, except that
3368 Y can be a constant with VOIDmode.
3369 This mode cannot be BLKmode; use emit_block_move for that.
3371 Return the last instruction emitted. */
3374 emit_move_insn (rtx x, rtx y)
3376 enum machine_mode mode = GET_MODE (x);
3377 rtx y_cst = NULL_RTX;
3380 gcc_assert (mode != BLKmode
3381 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3386 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3387 && (last_insn = compress_float_constant (x, y)))
3392 if (!LEGITIMATE_CONSTANT_P (y))
3394 y = force_const_mem (mode, y);
3396 /* If the target's cannot_force_const_mem prevented the spill,
3397 assume that the target's move expanders will also take care
3398 of the non-legitimate constant. */
3402 y = use_anchored_address (y);
3406 /* If X or Y are memory references, verify that their addresses are valid
3409 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3410 && ! push_operand (x, GET_MODE (x))))
3411 x = validize_mem (x);
3414 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3415 y = validize_mem (y);
3417 gcc_assert (mode != BLKmode);
3419 last_insn = emit_move_insn_1 (x, y);
3421 if (y_cst && REG_P (x)
3422 && (set = single_set (last_insn)) != NULL_RTX
3423 && SET_DEST (set) == x
3424 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3425 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3430 /* If Y is representable exactly in a narrower mode, and the target can
3431 perform the extension directly from constant or memory, then emit the
3432 move as an extension. */
3435 compress_float_constant (rtx x, rtx y)
3437 enum machine_mode dstmode = GET_MODE (x);
3438 enum machine_mode orig_srcmode = GET_MODE (y);
3439 enum machine_mode srcmode;
3441 int oldcost, newcost;
3443 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3445 if (LEGITIMATE_CONSTANT_P (y))
3446 oldcost = rtx_cost (y, SET);
3448 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3450 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3451 srcmode != orig_srcmode;
3452 srcmode = GET_MODE_WIDER_MODE (srcmode))
3455 rtx trunc_y, last_insn;
3457 /* Skip if the target can't extend this way. */
3458 ic = can_extend_p (dstmode, srcmode, 0);
3459 if (ic == CODE_FOR_nothing)
3462 /* Skip if the narrowed value isn't exact. */
3463 if (! exact_real_truncate (srcmode, &r))
3466 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3468 if (LEGITIMATE_CONSTANT_P (trunc_y))
3470 /* Skip if the target needs extra instructions to perform
3472 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3474 /* This is valid, but may not be cheaper than the original. */
3475 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3476 if (oldcost < newcost)
3479 else if (float_extend_from_mem[dstmode][srcmode])
3481 trunc_y = force_const_mem (srcmode, trunc_y);
3482 /* This is valid, but may not be cheaper than the original. */
3483 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3484 if (oldcost < newcost)
3486 trunc_y = validize_mem (trunc_y);
3491 /* For CSE's benefit, force the compressed constant pool entry
3492 into a new pseudo. This constant may be used in different modes,
3493 and if not, combine will put things back together for us. */
3494 trunc_y = force_reg (srcmode, trunc_y);
3495 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3496 last_insn = get_last_insn ();
3499 set_unique_reg_note (last_insn, REG_EQUAL, y);
3507 /* Pushing data onto the stack. */
3509 /* Push a block of length SIZE (perhaps variable)
3510 and return an rtx to address the beginning of the block.
3511 The value may be virtual_outgoing_args_rtx.
3513 EXTRA is the number of bytes of padding to push in addition to SIZE.
3514 BELOW nonzero means this padding comes at low addresses;
3515 otherwise, the padding comes at high addresses. */
3518 push_block (rtx size, int extra, int below)
3522 size = convert_modes (Pmode, ptr_mode, size, 1);
3523 if (CONSTANT_P (size))
3524 anti_adjust_stack (plus_constant (size, extra));
3525 else if (REG_P (size) && extra == 0)
3526 anti_adjust_stack (size);
3529 temp = copy_to_mode_reg (Pmode, size);
3531 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3532 temp, 0, OPTAB_LIB_WIDEN);
3533 anti_adjust_stack (temp);
3536 #ifndef STACK_GROWS_DOWNWARD
3542 temp = virtual_outgoing_args_rtx;
3543 if (extra != 0 && below)
3544 temp = plus_constant (temp, extra);
3548 if (GET_CODE (size) == CONST_INT)
3549 temp = plus_constant (virtual_outgoing_args_rtx,
3550 -INTVAL (size) - (below ? 0 : extra));
3551 else if (extra != 0 && !below)
3552 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3553 negate_rtx (Pmode, plus_constant (size, extra)));
3555 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3556 negate_rtx (Pmode, size));
3559 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3562 #ifdef PUSH_ROUNDING
3564 /* Emit single push insn. */
3567 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3570 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3572 enum insn_code icode;
3573 insn_operand_predicate_fn pred;
3575 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3576 /* If there is push pattern, use it. Otherwise try old way of throwing
3577 MEM representing push operation to move expander. */
3578 icode = optab_handler (push_optab, mode)->insn_code;
3579 if (icode != CODE_FOR_nothing)
3581 if (((pred = insn_data[(int) icode].operand[0].predicate)
3582 && !((*pred) (x, mode))))
3583 x = force_reg (mode, x);
3584 emit_insn (GEN_FCN (icode) (x));
3587 if (GET_MODE_SIZE (mode) == rounded_size)
3588 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3589 /* If we are to pad downward, adjust the stack pointer first and
3590 then store X into the stack location using an offset. This is
3591 because emit_move_insn does not know how to pad; it does not have
3593 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3595 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3596 HOST_WIDE_INT offset;
3598 emit_move_insn (stack_pointer_rtx,
3599 expand_binop (Pmode,
3600 #ifdef STACK_GROWS_DOWNWARD
3606 GEN_INT (rounded_size),
3607 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3609 offset = (HOST_WIDE_INT) padding_size;
3610 #ifdef STACK_GROWS_DOWNWARD
3611 if (STACK_PUSH_CODE == POST_DEC)
3612 /* We have already decremented the stack pointer, so get the
3614 offset += (HOST_WIDE_INT) rounded_size;
3616 if (STACK_PUSH_CODE == POST_INC)
3617 /* We have already incremented the stack pointer, so get the
3619 offset -= (HOST_WIDE_INT) rounded_size;
3621 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3625 #ifdef STACK_GROWS_DOWNWARD
3626 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3627 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3628 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3630 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3631 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3632 GEN_INT (rounded_size));
3634 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3637 dest = gen_rtx_MEM (mode, dest_addr);
3641 set_mem_attributes (dest, type, 1);
3643 if (flag_optimize_sibling_calls)
3644 /* Function incoming arguments may overlap with sibling call
3645 outgoing arguments and we cannot allow reordering of reads
3646 from function arguments with stores to outgoing arguments
3647 of sibling calls. */
3648 set_mem_alias_set (dest, 0);
3650 emit_move_insn (dest, x);
3654 /* Generate code to push X onto the stack, assuming it has mode MODE and
3656 MODE is redundant except when X is a CONST_INT (since they don't
3658 SIZE is an rtx for the size of data to be copied (in bytes),
3659 needed only if X is BLKmode.
3661 ALIGN (in bits) is maximum alignment we can assume.
3663 If PARTIAL and REG are both nonzero, then copy that many of the first
3664 bytes of X into registers starting with REG, and push the rest of X.
3665 The amount of space pushed is decreased by PARTIAL bytes.
3666 REG must be a hard register in this case.
3667 If REG is zero but PARTIAL is not, take any all others actions for an
3668 argument partially in registers, but do not actually load any
3671 EXTRA is the amount in bytes of extra space to leave next to this arg.
3672 This is ignored if an argument block has already been allocated.
3674 On a machine that lacks real push insns, ARGS_ADDR is the address of
3675 the bottom of the argument block for this call. We use indexing off there
3676 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3677 argument block has not been preallocated.
3679 ARGS_SO_FAR is the size of args previously pushed for this call.
3681 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3682 for arguments passed in registers. If nonzero, it will be the number
3683 of bytes required. */
3686 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3687 unsigned int align, int partial, rtx reg, int extra,
3688 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3692 enum direction stack_direction
3693 #ifdef STACK_GROWS_DOWNWARD
3699 /* Decide where to pad the argument: `downward' for below,
3700 `upward' for above, or `none' for don't pad it.
3701 Default is below for small data on big-endian machines; else above. */
3702 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3704 /* Invert direction if stack is post-decrement.
3706 if (STACK_PUSH_CODE == POST_DEC)
3707 if (where_pad != none)
3708 where_pad = (where_pad == downward ? upward : downward);
3713 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3715 /* Copy a block into the stack, entirely or partially. */
3722 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3723 used = partial - offset;
3725 if (mode != BLKmode)
3727 /* A value is to be stored in an insufficiently aligned
3728 stack slot; copy via a suitably aligned slot if
3730 size = GEN_INT (GET_MODE_SIZE (mode));
3731 if (!MEM_P (xinner))
3733 temp = assign_temp (type, 0, 1, 1);
3734 emit_move_insn (temp, xinner);
3741 /* USED is now the # of bytes we need not copy to the stack
3742 because registers will take care of them. */
3745 xinner = adjust_address (xinner, BLKmode, used);
3747 /* If the partial register-part of the arg counts in its stack size,
3748 skip the part of stack space corresponding to the registers.
3749 Otherwise, start copying to the beginning of the stack space,
3750 by setting SKIP to 0. */
3751 skip = (reg_parm_stack_space == 0) ? 0 : used;
3753 #ifdef PUSH_ROUNDING
3754 /* Do it with several push insns if that doesn't take lots of insns
3755 and if there is no difficulty with push insns that skip bytes
3756 on the stack for alignment purposes. */
3759 && GET_CODE (size) == CONST_INT
3761 && MEM_ALIGN (xinner) >= align
3762 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3763 /* Here we avoid the case of a structure whose weak alignment
3764 forces many pushes of a small amount of data,
3765 and such small pushes do rounding that causes trouble. */
3766 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3767 || align >= BIGGEST_ALIGNMENT
3768 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3769 == (align / BITS_PER_UNIT)))
3770 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3772 /* Push padding now if padding above and stack grows down,
3773 or if padding below and stack grows up.
3774 But if space already allocated, this has already been done. */
3775 if (extra && args_addr == 0
3776 && where_pad != none && where_pad != stack_direction)
3777 anti_adjust_stack (GEN_INT (extra));
3779 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3782 #endif /* PUSH_ROUNDING */
3786 /* Otherwise make space on the stack and copy the data
3787 to the address of that space. */
3789 /* Deduct words put into registers from the size we must copy. */
3792 if (GET_CODE (size) == CONST_INT)
3793 size = GEN_INT (INTVAL (size) - used);
3795 size = expand_binop (GET_MODE (size), sub_optab, size,
3796 GEN_INT (used), NULL_RTX, 0,
3800 /* Get the address of the stack space.
3801 In this case, we do not deal with EXTRA separately.
3802 A single stack adjust will do. */
3805 temp = push_block (size, extra, where_pad == downward);
3808 else if (GET_CODE (args_so_far) == CONST_INT)
3809 temp = memory_address (BLKmode,
3810 plus_constant (args_addr,
3811 skip + INTVAL (args_so_far)));
3813 temp = memory_address (BLKmode,
3814 plus_constant (gen_rtx_PLUS (Pmode,
3819 if (!ACCUMULATE_OUTGOING_ARGS)
3821 /* If the source is referenced relative to the stack pointer,
3822 copy it to another register to stabilize it. We do not need
3823 to do this if we know that we won't be changing sp. */
3825 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3826 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3827 temp = copy_to_reg (temp);
3830 target = gen_rtx_MEM (BLKmode, temp);
3832 /* We do *not* set_mem_attributes here, because incoming arguments
3833 may overlap with sibling call outgoing arguments and we cannot
3834 allow reordering of reads from function arguments with stores
3835 to outgoing arguments of sibling calls. We do, however, want
3836 to record the alignment of the stack slot. */
3837 /* ALIGN may well be better aligned than TYPE, e.g. due to
3838 PARM_BOUNDARY. Assume the caller isn't lying. */
3839 set_mem_align (target, align);
3841 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3844 else if (partial > 0)
3846 /* Scalar partly in registers. */
3848 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3851 /* # bytes of start of argument
3852 that we must make space for but need not store. */
3853 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3854 int args_offset = INTVAL (args_so_far);
3857 /* Push padding now if padding above and stack grows down,
3858 or if padding below and stack grows up.
3859 But if space already allocated, this has already been done. */
3860 if (extra && args_addr == 0
3861 && where_pad != none && where_pad != stack_direction)
3862 anti_adjust_stack (GEN_INT (extra));
3864 /* If we make space by pushing it, we might as well push
3865 the real data. Otherwise, we can leave OFFSET nonzero
3866 and leave the space uninitialized. */
3870 /* Now NOT_STACK gets the number of words that we don't need to
3871 allocate on the stack. Convert OFFSET to words too. */
3872 not_stack = (partial - offset) / UNITS_PER_WORD;
3873 offset /= UNITS_PER_WORD;
3875 /* If the partial register-part of the arg counts in its stack size,
3876 skip the part of stack space corresponding to the registers.
3877 Otherwise, start copying to the beginning of the stack space,
3878 by setting SKIP to 0. */
3879 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3881 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3882 x = validize_mem (force_const_mem (mode, x));
3884 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3885 SUBREGs of such registers are not allowed. */
3886 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3887 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3888 x = copy_to_reg (x);
3890 /* Loop over all the words allocated on the stack for this arg. */
3891 /* We can do it by words, because any scalar bigger than a word
3892 has a size a multiple of a word. */
3893 #ifndef PUSH_ARGS_REVERSED
3894 for (i = not_stack; i < size; i++)
3896 for (i = size - 1; i >= not_stack; i--)
3898 if (i >= not_stack + offset)
3899 emit_push_insn (operand_subword_force (x, i, mode),
3900 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3902 GEN_INT (args_offset + ((i - not_stack + skip)
3904 reg_parm_stack_space, alignment_pad);
3911 /* Push padding now if padding above and stack grows down,
3912 or if padding below and stack grows up.
3913 But if space already allocated, this has already been done. */
3914 if (extra && args_addr == 0
3915 && where_pad != none && where_pad != stack_direction)
3916 anti_adjust_stack (GEN_INT (extra));
3918 #ifdef PUSH_ROUNDING
3919 if (args_addr == 0 && PUSH_ARGS)
3920 emit_single_push_insn (mode, x, type);
3924 if (GET_CODE (args_so_far) == CONST_INT)
3926 = memory_address (mode,
3927 plus_constant (args_addr,
3928 INTVAL (args_so_far)));
3930 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3932 dest = gen_rtx_MEM (mode, addr);
3934 /* We do *not* set_mem_attributes here, because incoming arguments
3935 may overlap with sibling call outgoing arguments and we cannot
3936 allow reordering of reads from function arguments with stores
3937 to outgoing arguments of sibling calls. We do, however, want
3938 to record the alignment of the stack slot. */
3939 /* ALIGN may well be better aligned than TYPE, e.g. due to
3940 PARM_BOUNDARY. Assume the caller isn't lying. */
3941 set_mem_align (dest, align);
3943 emit_move_insn (dest, x);
3947 /* If part should go in registers, copy that part
3948 into the appropriate registers. Do this now, at the end,
3949 since mem-to-mem copies above may do function calls. */
3950 if (partial > 0 && reg != 0)
3952 /* Handle calls that pass values in multiple non-contiguous locations.
3953 The Irix 6 ABI has examples of this. */
3954 if (GET_CODE (reg) == PARALLEL)
3955 emit_group_load (reg, x, type, -1);
3958 gcc_assert (partial % UNITS_PER_WORD == 0);
3959 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3963 if (extra && args_addr == 0 && where_pad == stack_direction)
3964 anti_adjust_stack (GEN_INT (extra));
3966 if (alignment_pad && args_addr == 0)
3967 anti_adjust_stack (alignment_pad);
3970 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3974 get_subtarget (rtx x)
3978 /* Only registers can be subtargets. */
3980 /* Don't use hard regs to avoid extending their life. */
3981 || REGNO (x) < FIRST_PSEUDO_REGISTER
3985 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3986 FIELD is a bitfield. Returns true if the optimization was successful,
3987 and there's nothing else to do. */
3990 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3991 unsigned HOST_WIDE_INT bitpos,
3992 enum machine_mode mode1, rtx str_rtx,
3995 enum machine_mode str_mode = GET_MODE (str_rtx);
3996 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4001 if (mode1 != VOIDmode
4002 || bitsize >= BITS_PER_WORD
4003 || str_bitsize > BITS_PER_WORD
4004 || TREE_SIDE_EFFECTS (to)
4005 || TREE_THIS_VOLATILE (to))
4009 if (!BINARY_CLASS_P (src)
4010 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4013 op0 = TREE_OPERAND (src, 0);
4014 op1 = TREE_OPERAND (src, 1);
4017 if (!operand_equal_p (to, op0, 0))
4020 if (MEM_P (str_rtx))
4022 unsigned HOST_WIDE_INT offset1;
4024 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4025 str_mode = word_mode;
4026 str_mode = get_best_mode (bitsize, bitpos,
4027 MEM_ALIGN (str_rtx), str_mode, 0);
4028 if (str_mode == VOIDmode)
4030 str_bitsize = GET_MODE_BITSIZE (str_mode);
4033 bitpos %= str_bitsize;
4034 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4035 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4037 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4040 /* If the bit field covers the whole REG/MEM, store_field
4041 will likely generate better code. */
4042 if (bitsize >= str_bitsize)
4045 /* We can't handle fields split across multiple entities. */
4046 if (bitpos + bitsize > str_bitsize)
4049 if (BYTES_BIG_ENDIAN)
4050 bitpos = str_bitsize - bitpos - bitsize;
4052 switch (TREE_CODE (src))
4056 /* For now, just optimize the case of the topmost bitfield
4057 where we don't need to do any masking and also
4058 1 bit bitfields where xor can be used.
4059 We might win by one instruction for the other bitfields
4060 too if insv/extv instructions aren't used, so that
4061 can be added later. */
4062 if (bitpos + bitsize != str_bitsize
4063 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4066 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4067 value = convert_modes (str_mode,
4068 TYPE_MODE (TREE_TYPE (op1)), value,
4069 TYPE_UNSIGNED (TREE_TYPE (op1)));
4071 /* We may be accessing data outside the field, which means
4072 we can alias adjacent data. */
4073 if (MEM_P (str_rtx))
4075 str_rtx = shallow_copy_rtx (str_rtx);
4076 set_mem_alias_set (str_rtx, 0);
4077 set_mem_expr (str_rtx, 0);
4080 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4081 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4083 value = expand_and (str_mode, value, const1_rtx, NULL);
4086 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4087 build_int_cst (NULL_TREE, bitpos),
4089 result = expand_binop (str_mode, binop, str_rtx,
4090 value, str_rtx, 1, OPTAB_WIDEN);
4091 if (result != str_rtx)
4092 emit_move_insn (str_rtx, result);
4097 if (TREE_CODE (op1) != INTEGER_CST)
4099 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4100 value = convert_modes (GET_MODE (str_rtx),
4101 TYPE_MODE (TREE_TYPE (op1)), value,
4102 TYPE_UNSIGNED (TREE_TYPE (op1)));
4104 /* We may be accessing data outside the field, which means
4105 we can alias adjacent data. */
4106 if (MEM_P (str_rtx))
4108 str_rtx = shallow_copy_rtx (str_rtx);
4109 set_mem_alias_set (str_rtx, 0);
4110 set_mem_expr (str_rtx, 0);
4113 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4114 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4116 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4118 value = expand_and (GET_MODE (str_rtx), value, mask,
4121 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4122 build_int_cst (NULL_TREE, bitpos),
4124 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4125 value, str_rtx, 1, OPTAB_WIDEN);
4126 if (result != str_rtx)
4127 emit_move_insn (str_rtx, result);
4138 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4139 is true, try generating a nontemporal store. */
4142 expand_assignment (tree to, tree from, bool nontemporal)
4147 /* Don't crash if the lhs of the assignment was erroneous. */
4148 if (TREE_CODE (to) == ERROR_MARK)
4150 result = expand_normal (from);
4154 /* Optimize away no-op moves without side-effects. */
4155 if (operand_equal_p (to, from, 0))
4158 /* Assignment of a structure component needs special treatment
4159 if the structure component's rtx is not simply a MEM.
4160 Assignment of an array element at a constant index, and assignment of
4161 an array element in an unaligned packed structure field, has the same
4163 if (handled_component_p (to)
4164 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4166 enum machine_mode mode1;
4167 HOST_WIDE_INT bitsize, bitpos;
4174 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4175 &unsignedp, &volatilep, true);
4177 /* If we are going to use store_bit_field and extract_bit_field,
4178 make sure to_rtx will be safe for multiple use. */
4180 to_rtx = expand_normal (tem);
4186 if (!MEM_P (to_rtx))
4188 /* We can get constant negative offsets into arrays with broken
4189 user code. Translate this to a trap instead of ICEing. */
4190 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4191 expand_builtin_trap ();
4192 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4195 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4196 #ifdef POINTERS_EXTEND_UNSIGNED
4197 if (GET_MODE (offset_rtx) != Pmode)
4198 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4200 if (GET_MODE (offset_rtx) != ptr_mode)
4201 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4204 /* A constant address in TO_RTX can have VOIDmode, we must not try
4205 to call force_reg for that case. Avoid that case. */
4207 && GET_MODE (to_rtx) == BLKmode
4208 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4210 && (bitpos % bitsize) == 0
4211 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4212 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4214 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4218 to_rtx = offset_address (to_rtx, offset_rtx,
4219 highest_pow2_factor_for_target (to,
4223 /* Handle expand_expr of a complex value returning a CONCAT. */
4224 if (GET_CODE (to_rtx) == CONCAT)
4226 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4228 gcc_assert (bitpos == 0);
4229 result = store_expr (from, to_rtx, false, nontemporal);
4233 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4234 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4242 /* If the field is at offset zero, we could have been given the
4243 DECL_RTX of the parent struct. Don't munge it. */
4244 to_rtx = shallow_copy_rtx (to_rtx);
4246 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4248 /* Deal with volatile and readonly fields. The former is only
4249 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4251 MEM_VOLATILE_P (to_rtx) = 1;
4252 if (component_uses_parent_alias_set (to))
4253 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4256 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4260 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4261 TREE_TYPE (tem), get_alias_set (to),
4266 preserve_temp_slots (result);
4272 /* If the rhs is a function call and its value is not an aggregate,
4273 call the function before we start to compute the lhs.
4274 This is needed for correct code for cases such as
4275 val = setjmp (buf) on machines where reference to val
4276 requires loading up part of an address in a separate insn.
4278 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4279 since it might be a promoted variable where the zero- or sign- extension
4280 needs to be done. Handling this in the normal way is safe because no
4281 computation is done before the call. */
4282 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4283 && COMPLETE_TYPE_P (TREE_TYPE (from))
4284 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4285 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4286 && REG_P (DECL_RTL (to))))
4291 value = expand_normal (from);
4293 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4295 /* Handle calls that return values in multiple non-contiguous locations.
4296 The Irix 6 ABI has examples of this. */
4297 if (GET_CODE (to_rtx) == PARALLEL)
4298 emit_group_load (to_rtx, value, TREE_TYPE (from),
4299 int_size_in_bytes (TREE_TYPE (from)));
4300 else if (GET_MODE (to_rtx) == BLKmode)
4301 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4304 if (POINTER_TYPE_P (TREE_TYPE (to)))
4305 value = convert_memory_address (GET_MODE (to_rtx), value);
4306 emit_move_insn (to_rtx, value);
4308 preserve_temp_slots (to_rtx);
4314 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4315 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4318 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4320 /* Don't move directly into a return register. */
4321 if (TREE_CODE (to) == RESULT_DECL
4322 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4327 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4329 if (GET_CODE (to_rtx) == PARALLEL)
4330 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4331 int_size_in_bytes (TREE_TYPE (from)));
4333 emit_move_insn (to_rtx, temp);
4335 preserve_temp_slots (to_rtx);
4341 /* In case we are returning the contents of an object which overlaps
4342 the place the value is being stored, use a safe function when copying
4343 a value through a pointer into a structure value return block. */
4344 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4345 && cfun->returns_struct
4346 && !cfun->returns_pcc_struct)
4351 size = expr_size (from);
4352 from_rtx = expand_normal (from);
4354 emit_library_call (memmove_libfunc, LCT_NORMAL,
4355 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4356 XEXP (from_rtx, 0), Pmode,
4357 convert_to_mode (TYPE_MODE (sizetype),
4358 size, TYPE_UNSIGNED (sizetype)),
4359 TYPE_MODE (sizetype));
4361 preserve_temp_slots (to_rtx);
4367 /* Compute FROM and store the value in the rtx we got. */
4370 result = store_expr (from, to_rtx, 0, nontemporal);
4371 preserve_temp_slots (result);
4377 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4378 succeeded, false otherwise. */
4381 emit_storent_insn (rtx to, rtx from)
4383 enum machine_mode mode = GET_MODE (to), imode;
4384 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4387 if (code == CODE_FOR_nothing)
4390 imode = insn_data[code].operand[0].mode;
4391 if (!insn_data[code].operand[0].predicate (to, imode))
4394 imode = insn_data[code].operand[1].mode;
4395 if (!insn_data[code].operand[1].predicate (from, imode))
4397 from = copy_to_mode_reg (imode, from);
4398 if (!insn_data[code].operand[1].predicate (from, imode))
4402 pattern = GEN_FCN (code) (to, from);
4403 if (pattern == NULL_RTX)
4406 emit_insn (pattern);
4410 /* Generate code for computing expression EXP,
4411 and storing the value into TARGET.
4413 If the mode is BLKmode then we may return TARGET itself.
4414 It turns out that in BLKmode it doesn't cause a problem.
4415 because C has no operators that could combine two different
4416 assignments into the same BLKmode object with different values
4417 with no sequence point. Will other languages need this to
4420 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4421 stack, and block moves may need to be treated specially.
4423 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4426 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4429 rtx alt_rtl = NULL_RTX;
4430 int dont_return_target = 0;
4432 if (VOID_TYPE_P (TREE_TYPE (exp)))
4434 /* C++ can generate ?: expressions with a throw expression in one
4435 branch and an rvalue in the other. Here, we resolve attempts to
4436 store the throw expression's nonexistent result. */
4437 gcc_assert (!call_param_p);
4438 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4441 if (TREE_CODE (exp) == COMPOUND_EXPR)
4443 /* Perform first part of compound expression, then assign from second
4445 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4446 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4447 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4450 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4452 /* For conditional expression, get safe form of the target. Then
4453 test the condition, doing the appropriate assignment on either
4454 side. This avoids the creation of unnecessary temporaries.
4455 For non-BLKmode, it is more efficient not to do this. */
4457 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4459 do_pending_stack_adjust ();
4461 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4462 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4464 emit_jump_insn (gen_jump (lab2));
4467 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4474 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4475 /* If this is a scalar in a register that is stored in a wider mode
4476 than the declared mode, compute the result into its declared mode
4477 and then convert to the wider mode. Our value is the computed
4480 rtx inner_target = 0;
4482 /* We can do the conversion inside EXP, which will often result
4483 in some optimizations. Do the conversion in two steps: first
4484 change the signedness, if needed, then the extend. But don't
4485 do this if the type of EXP is a subtype of something else
4486 since then the conversion might involve more than just
4487 converting modes. */
4488 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4489 && TREE_TYPE (TREE_TYPE (exp)) == 0
4490 && GET_MODE_PRECISION (GET_MODE (target))
4491 == TYPE_PRECISION (TREE_TYPE (exp)))
4493 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4494 != SUBREG_PROMOTED_UNSIGNED_P (target))
4496 /* Some types, e.g. Fortran's logical*4, won't have a signed
4497 version, so use the mode instead. */
4499 = (signed_or_unsigned_type_for
4500 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4502 ntype = lang_hooks.types.type_for_mode
4503 (TYPE_MODE (TREE_TYPE (exp)),
4504 SUBREG_PROMOTED_UNSIGNED_P (target));
4506 exp = fold_convert (ntype, exp);
4509 exp = fold_convert (lang_hooks.types.type_for_mode
4510 (GET_MODE (SUBREG_REG (target)),
4511 SUBREG_PROMOTED_UNSIGNED_P (target)),
4514 inner_target = SUBREG_REG (target);
4517 temp = expand_expr (exp, inner_target, VOIDmode,
4518 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4520 /* If TEMP is a VOIDmode constant, use convert_modes to make
4521 sure that we properly convert it. */
4522 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4524 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4525 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4526 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4527 GET_MODE (target), temp,
4528 SUBREG_PROMOTED_UNSIGNED_P (target));
4531 convert_move (SUBREG_REG (target), temp,
4532 SUBREG_PROMOTED_UNSIGNED_P (target));
4536 else if (TREE_CODE (exp) == STRING_CST
4537 && !nontemporal && !call_param_p
4538 && TREE_STRING_LENGTH (exp) > 0
4539 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4541 /* Optimize initialization of an array with a STRING_CST. */
4542 HOST_WIDE_INT exp_len, str_copy_len;
4545 exp_len = int_expr_size (exp);
4549 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4550 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4553 str_copy_len = TREE_STRING_LENGTH (exp);
4554 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4556 str_copy_len += STORE_MAX_PIECES - 1;
4557 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4559 str_copy_len = MIN (str_copy_len, exp_len);
4560 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4561 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4562 MEM_ALIGN (target), false))
4567 dest_mem = store_by_pieces (dest_mem,
4568 str_copy_len, builtin_strncpy_read_str,
4569 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4570 MEM_ALIGN (target), false,
4571 exp_len > str_copy_len ? 1 : 0);
4572 if (exp_len > str_copy_len)
4573 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4574 GEN_INT (exp_len - str_copy_len),
4583 /* If we want to use a nontemporal store, force the value to
4585 tmp_target = nontemporal ? NULL_RTX : target;
4586 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4588 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4590 /* Return TARGET if it's a specified hardware register.
4591 If TARGET is a volatile mem ref, either return TARGET
4592 or return a reg copied *from* TARGET; ANSI requires this.
4594 Otherwise, if TEMP is not TARGET, return TEMP
4595 if it is constant (for efficiency),
4596 or if we really want the correct value. */
4597 if (!(target && REG_P (target)
4598 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4599 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4600 && ! rtx_equal_p (temp, target)
4601 && CONSTANT_P (temp))
4602 dont_return_target = 1;
4605 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4606 the same as that of TARGET, adjust the constant. This is needed, for
4607 example, in case it is a CONST_DOUBLE and we want only a word-sized
4609 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4610 && TREE_CODE (exp) != ERROR_MARK
4611 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4612 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4613 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4615 /* If value was not generated in the target, store it there.
4616 Convert the value to TARGET's type first if necessary and emit the
4617 pending incrementations that have been queued when expanding EXP.
4618 Note that we cannot emit the whole queue blindly because this will
4619 effectively disable the POST_INC optimization later.
4621 If TEMP and TARGET compare equal according to rtx_equal_p, but
4622 one or both of them are volatile memory refs, we have to distinguish
4624 - expand_expr has used TARGET. In this case, we must not generate
4625 another copy. This can be detected by TARGET being equal according
4627 - expand_expr has not used TARGET - that means that the source just
4628 happens to have the same RTX form. Since temp will have been created
4629 by expand_expr, it will compare unequal according to == .
4630 We must generate a copy in this case, to reach the correct number
4631 of volatile memory references. */
4633 if ((! rtx_equal_p (temp, target)
4634 || (temp != target && (side_effects_p (temp)
4635 || side_effects_p (target))))
4636 && TREE_CODE (exp) != ERROR_MARK
4637 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4638 but TARGET is not valid memory reference, TEMP will differ
4639 from TARGET although it is really the same location. */
4640 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4641 /* If there's nothing to copy, don't bother. Don't call
4642 expr_size unless necessary, because some front-ends (C++)
4643 expr_size-hook must not be given objects that are not
4644 supposed to be bit-copied or bit-initialized. */
4645 && expr_size (exp) != const0_rtx)
4647 if (GET_MODE (temp) != GET_MODE (target)
4648 && GET_MODE (temp) != VOIDmode)
4650 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4651 if (dont_return_target)
4653 /* In this case, we will return TEMP,
4654 so make sure it has the proper mode.
4655 But don't forget to store the value into TARGET. */
4656 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4657 emit_move_insn (target, temp);
4659 else if (GET_MODE (target) == BLKmode
4660 || GET_MODE (temp) == BLKmode)
4661 emit_block_move (target, temp, expr_size (exp),
4663 ? BLOCK_OP_CALL_PARM
4664 : BLOCK_OP_NORMAL));
4666 convert_move (target, temp, unsignedp);
4669 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4671 /* Handle copying a string constant into an array. The string
4672 constant may be shorter than the array. So copy just the string's
4673 actual length, and clear the rest. First get the size of the data
4674 type of the string, which is actually the size of the target. */
4675 rtx size = expr_size (exp);
4677 if (GET_CODE (size) == CONST_INT
4678 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4679 emit_block_move (target, temp, size,
4681 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4684 /* Compute the size of the data to copy from the string. */
4686 = size_binop (MIN_EXPR,
4687 make_tree (sizetype, size),
4688 size_int (TREE_STRING_LENGTH (exp)));
4690 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4692 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4695 /* Copy that much. */
4696 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4697 TYPE_UNSIGNED (sizetype));
4698 emit_block_move (target, temp, copy_size_rtx,
4700 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4702 /* Figure out how much is left in TARGET that we have to clear.
4703 Do all calculations in ptr_mode. */
4704 if (GET_CODE (copy_size_rtx) == CONST_INT)
4706 size = plus_constant (size, -INTVAL (copy_size_rtx));
4707 target = adjust_address (target, BLKmode,
4708 INTVAL (copy_size_rtx));
4712 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4713 copy_size_rtx, NULL_RTX, 0,
4716 #ifdef POINTERS_EXTEND_UNSIGNED
4717 if (GET_MODE (copy_size_rtx) != Pmode)
4718 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4719 TYPE_UNSIGNED (sizetype));
4722 target = offset_address (target, copy_size_rtx,
4723 highest_pow2_factor (copy_size));
4724 label = gen_label_rtx ();
4725 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4726 GET_MODE (size), 0, label);
4729 if (size != const0_rtx)
4730 clear_storage (target, size, BLOCK_OP_NORMAL);
4736 /* Handle calls that return values in multiple non-contiguous locations.
4737 The Irix 6 ABI has examples of this. */
4738 else if (GET_CODE (target) == PARALLEL)
4739 emit_group_load (target, temp, TREE_TYPE (exp),
4740 int_size_in_bytes (TREE_TYPE (exp)));
4741 else if (GET_MODE (temp) == BLKmode)
4742 emit_block_move (target, temp, expr_size (exp),
4744 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4745 else if (nontemporal
4746 && emit_storent_insn (target, temp))
4747 /* If we managed to emit a nontemporal store, there is nothing else to
4752 temp = force_operand (temp, target);
4754 emit_move_insn (target, temp);
4761 /* Helper for categorize_ctor_elements. Identical interface. */
4764 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4765 HOST_WIDE_INT *p_elt_count,
4768 unsigned HOST_WIDE_INT idx;
4769 HOST_WIDE_INT nz_elts, elt_count;
4770 tree value, purpose;
4772 /* Whether CTOR is a valid constant initializer, in accordance with what
4773 initializer_constant_valid_p does. If inferred from the constructor
4774 elements, true until proven otherwise. */
4775 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4776 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4781 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4786 if (TREE_CODE (purpose) == RANGE_EXPR)
4788 tree lo_index = TREE_OPERAND (purpose, 0);
4789 tree hi_index = TREE_OPERAND (purpose, 1);
4791 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4792 mult = (tree_low_cst (hi_index, 1)
4793 - tree_low_cst (lo_index, 1) + 1);
4796 switch (TREE_CODE (value))
4800 HOST_WIDE_INT nz = 0, ic = 0;
4803 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4805 nz_elts += mult * nz;
4806 elt_count += mult * ic;
4808 if (const_from_elts_p && const_p)
4809 const_p = const_elt_p;
4816 if (!initializer_zerop (value))
4822 nz_elts += mult * TREE_STRING_LENGTH (value);
4823 elt_count += mult * TREE_STRING_LENGTH (value);
4827 if (!initializer_zerop (TREE_REALPART (value)))
4829 if (!initializer_zerop (TREE_IMAGPART (value)))
4837 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4839 if (!initializer_zerop (TREE_VALUE (v)))
4850 if (const_from_elts_p && const_p)
4851 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4858 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4859 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4862 bool clear_this = true;
4864 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4866 /* We don't expect more than one element of the union to be
4867 initialized. Not sure what we should do otherwise... */
4868 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4871 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4872 CONSTRUCTOR_ELTS (ctor),
4875 /* ??? We could look at each element of the union, and find the
4876 largest element. Which would avoid comparing the size of the
4877 initialized element against any tail padding in the union.
4878 Doesn't seem worth the effort... */
4879 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4880 TYPE_SIZE (init_sub_type)) == 1)
4882 /* And now we have to find out if the element itself is fully
4883 constructed. E.g. for union { struct { int a, b; } s; } u
4884 = { .s = { .a = 1 } }. */
4885 if (elt_count == count_type_elements (init_sub_type, false))
4890 *p_must_clear = clear_this;
4893 *p_nz_elts += nz_elts;
4894 *p_elt_count += elt_count;
4899 /* Examine CTOR to discover:
4900 * how many scalar fields are set to nonzero values,
4901 and place it in *P_NZ_ELTS;
4902 * how many scalar fields in total are in CTOR,
4903 and place it in *P_ELT_COUNT.
4904 * if a type is a union, and the initializer from the constructor
4905 is not the largest element in the union, then set *p_must_clear.
4907 Return whether or not CTOR is a valid static constant initializer, the same
4908 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4911 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4912 HOST_WIDE_INT *p_elt_count,
4917 *p_must_clear = false;
4920 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4923 /* Count the number of scalars in TYPE. Return -1 on overflow or
4924 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4925 array member at the end of the structure. */
4928 count_type_elements (const_tree type, bool allow_flexarr)
4930 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4931 switch (TREE_CODE (type))
4935 tree telts = array_type_nelts (type);
4936 if (telts && host_integerp (telts, 1))
4938 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4939 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4942 else if (max / n > m)
4950 HOST_WIDE_INT n = 0, t;
4953 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4954 if (TREE_CODE (f) == FIELD_DECL)
4956 t = count_type_elements (TREE_TYPE (f), false);
4959 /* Check for structures with flexible array member. */
4960 tree tf = TREE_TYPE (f);
4962 && TREE_CHAIN (f) == NULL
4963 && TREE_CODE (tf) == ARRAY_TYPE
4965 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4966 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4967 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4968 && int_size_in_bytes (type) >= 0)
4980 case QUAL_UNION_TYPE:
4987 return TYPE_VECTOR_SUBPARTS (type);
4991 case FIXED_POINT_TYPE:
4996 case REFERENCE_TYPE:
5008 /* Return 1 if EXP contains mostly (3/4) zeros. */
5011 mostly_zeros_p (const_tree exp)
5013 if (TREE_CODE (exp) == CONSTRUCTOR)
5016 HOST_WIDE_INT nz_elts, count, elts;
5019 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5023 elts = count_type_elements (TREE_TYPE (exp), false);
5025 return nz_elts < elts / 4;
5028 return initializer_zerop (exp);
5031 /* Return 1 if EXP contains all zeros. */
5034 all_zeros_p (const_tree exp)
5036 if (TREE_CODE (exp) == CONSTRUCTOR)
5039 HOST_WIDE_INT nz_elts, count;
5042 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5043 return nz_elts == 0;
5046 return initializer_zerop (exp);
5049 /* Helper function for store_constructor.
5050 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5051 TYPE is the type of the CONSTRUCTOR, not the element type.
5052 CLEARED is as for store_constructor.
5053 ALIAS_SET is the alias set to use for any stores.
5055 This provides a recursive shortcut back to store_constructor when it isn't
5056 necessary to go through store_field. This is so that we can pass through
5057 the cleared field to let store_constructor know that we may not have to
5058 clear a substructure if the outer structure has already been cleared. */
5061 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5062 HOST_WIDE_INT bitpos, enum machine_mode mode,
5063 tree exp, tree type, int cleared,
5064 alias_set_type alias_set)
5066 if (TREE_CODE (exp) == CONSTRUCTOR
5067 /* We can only call store_constructor recursively if the size and
5068 bit position are on a byte boundary. */
5069 && bitpos % BITS_PER_UNIT == 0
5070 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5071 /* If we have a nonzero bitpos for a register target, then we just
5072 let store_field do the bitfield handling. This is unlikely to
5073 generate unnecessary clear instructions anyways. */
5074 && (bitpos == 0 || MEM_P (target)))
5078 = adjust_address (target,
5079 GET_MODE (target) == BLKmode
5081 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5082 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5085 /* Update the alias set, if required. */
5086 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5087 && MEM_ALIAS_SET (target) != 0)
5089 target = copy_rtx (target);
5090 set_mem_alias_set (target, alias_set);
5093 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5096 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5099 /* Store the value of constructor EXP into the rtx TARGET.
5100 TARGET is either a REG or a MEM; we know it cannot conflict, since
5101 safe_from_p has been called.
5102 CLEARED is true if TARGET is known to have been zero'd.
5103 SIZE is the number of bytes of TARGET we are allowed to modify: this
5104 may not be the same as the size of EXP if we are assigning to a field
5105 which has been packed to exclude padding bits. */
5108 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5110 tree type = TREE_TYPE (exp);
5111 #ifdef WORD_REGISTER_OPERATIONS
5112 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5115 switch (TREE_CODE (type))
5119 case QUAL_UNION_TYPE:
5121 unsigned HOST_WIDE_INT idx;
5124 /* If size is zero or the target is already cleared, do nothing. */
5125 if (size == 0 || cleared)
5127 /* We either clear the aggregate or indicate the value is dead. */
5128 else if ((TREE_CODE (type) == UNION_TYPE
5129 || TREE_CODE (type) == QUAL_UNION_TYPE)
5130 && ! CONSTRUCTOR_ELTS (exp))
5131 /* If the constructor is empty, clear the union. */
5133 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5137 /* If we are building a static constructor into a register,
5138 set the initial value as zero so we can fold the value into
5139 a constant. But if more than one register is involved,
5140 this probably loses. */
5141 else if (REG_P (target) && TREE_STATIC (exp)
5142 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5148 /* If the constructor has fewer fields than the structure or
5149 if we are initializing the structure to mostly zeros, clear
5150 the whole structure first. Don't do this if TARGET is a
5151 register whose mode size isn't equal to SIZE since
5152 clear_storage can't handle this case. */
5154 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5155 != fields_length (type))
5156 || mostly_zeros_p (exp))
5158 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5161 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5165 if (REG_P (target) && !cleared)
5166 emit_clobber (target);
5168 /* Store each element of the constructor into the
5169 corresponding field of TARGET. */
5170 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5172 enum machine_mode mode;
5173 HOST_WIDE_INT bitsize;
5174 HOST_WIDE_INT bitpos = 0;
5176 rtx to_rtx = target;
5178 /* Just ignore missing fields. We cleared the whole
5179 structure, above, if any fields are missing. */
5183 if (cleared && initializer_zerop (value))
5186 if (host_integerp (DECL_SIZE (field), 1))
5187 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5191 mode = DECL_MODE (field);
5192 if (DECL_BIT_FIELD (field))
5195 offset = DECL_FIELD_OFFSET (field);
5196 if (host_integerp (offset, 0)
5197 && host_integerp (bit_position (field), 0))
5199 bitpos = int_bit_position (field);
5203 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5210 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5211 make_tree (TREE_TYPE (exp),
5214 offset_rtx = expand_normal (offset);
5215 gcc_assert (MEM_P (to_rtx));
5217 #ifdef POINTERS_EXTEND_UNSIGNED
5218 if (GET_MODE (offset_rtx) != Pmode)
5219 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5221 if (GET_MODE (offset_rtx) != ptr_mode)
5222 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5225 to_rtx = offset_address (to_rtx, offset_rtx,
5226 highest_pow2_factor (offset));
5229 #ifdef WORD_REGISTER_OPERATIONS
5230 /* If this initializes a field that is smaller than a
5231 word, at the start of a word, try to widen it to a full
5232 word. This special case allows us to output C++ member
5233 function initializations in a form that the optimizers
5236 && bitsize < BITS_PER_WORD
5237 && bitpos % BITS_PER_WORD == 0
5238 && GET_MODE_CLASS (mode) == MODE_INT
5239 && TREE_CODE (value) == INTEGER_CST
5241 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5243 tree type = TREE_TYPE (value);
5245 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5247 type = lang_hooks.types.type_for_size
5248 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5249 value = fold_convert (type, value);
5252 if (BYTES_BIG_ENDIAN)
5254 = fold_build2 (LSHIFT_EXPR, type, value,
5255 build_int_cst (type,
5256 BITS_PER_WORD - bitsize));
5257 bitsize = BITS_PER_WORD;
5262 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5263 && DECL_NONADDRESSABLE_P (field))
5265 to_rtx = copy_rtx (to_rtx);
5266 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5269 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5270 value, type, cleared,
5271 get_alias_set (TREE_TYPE (field)));
5278 unsigned HOST_WIDE_INT i;
5281 tree elttype = TREE_TYPE (type);
5283 HOST_WIDE_INT minelt = 0;
5284 HOST_WIDE_INT maxelt = 0;
5286 domain = TYPE_DOMAIN (type);
5287 const_bounds_p = (TYPE_MIN_VALUE (domain)
5288 && TYPE_MAX_VALUE (domain)
5289 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5290 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5292 /* If we have constant bounds for the range of the type, get them. */
5295 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5296 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5299 /* If the constructor has fewer elements than the array, clear
5300 the whole array first. Similarly if this is static
5301 constructor of a non-BLKmode object. */
5304 else if (REG_P (target) && TREE_STATIC (exp))
5308 unsigned HOST_WIDE_INT idx;
5310 HOST_WIDE_INT count = 0, zero_count = 0;
5311 need_to_clear = ! const_bounds_p;
5313 /* This loop is a more accurate version of the loop in
5314 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5315 is also needed to check for missing elements. */
5316 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5318 HOST_WIDE_INT this_node_count;
5323 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5325 tree lo_index = TREE_OPERAND (index, 0);
5326 tree hi_index = TREE_OPERAND (index, 1);
5328 if (! host_integerp (lo_index, 1)
5329 || ! host_integerp (hi_index, 1))
5335 this_node_count = (tree_low_cst (hi_index, 1)
5336 - tree_low_cst (lo_index, 1) + 1);
5339 this_node_count = 1;
5341 count += this_node_count;
5342 if (mostly_zeros_p (value))
5343 zero_count += this_node_count;
5346 /* Clear the entire array first if there are any missing
5347 elements, or if the incidence of zero elements is >=
5350 && (count < maxelt - minelt + 1
5351 || 4 * zero_count >= 3 * count))
5355 if (need_to_clear && size > 0)
5358 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5360 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5364 if (!cleared && REG_P (target))
5365 /* Inform later passes that the old value is dead. */
5366 emit_clobber (target);
5368 /* Store each element of the constructor into the
5369 corresponding element of TARGET, determined by counting the
5371 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5373 enum machine_mode mode;
5374 HOST_WIDE_INT bitsize;
5375 HOST_WIDE_INT bitpos;
5377 rtx xtarget = target;
5379 if (cleared && initializer_zerop (value))
5382 unsignedp = TYPE_UNSIGNED (elttype);
5383 mode = TYPE_MODE (elttype);
5384 if (mode == BLKmode)
5385 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5386 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5389 bitsize = GET_MODE_BITSIZE (mode);
5391 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5393 tree lo_index = TREE_OPERAND (index, 0);
5394 tree hi_index = TREE_OPERAND (index, 1);
5395 rtx index_r, pos_rtx;
5396 HOST_WIDE_INT lo, hi, count;
5399 /* If the range is constant and "small", unroll the loop. */
5401 && host_integerp (lo_index, 0)
5402 && host_integerp (hi_index, 0)
5403 && (lo = tree_low_cst (lo_index, 0),
5404 hi = tree_low_cst (hi_index, 0),
5405 count = hi - lo + 1,
5408 || (host_integerp (TYPE_SIZE (elttype), 1)
5409 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5412 lo -= minelt; hi -= minelt;
5413 for (; lo <= hi; lo++)
5415 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5418 && !MEM_KEEP_ALIAS_SET_P (target)
5419 && TREE_CODE (type) == ARRAY_TYPE
5420 && TYPE_NONALIASED_COMPONENT (type))
5422 target = copy_rtx (target);
5423 MEM_KEEP_ALIAS_SET_P (target) = 1;
5426 store_constructor_field
5427 (target, bitsize, bitpos, mode, value, type, cleared,
5428 get_alias_set (elttype));
5433 rtx loop_start = gen_label_rtx ();
5434 rtx loop_end = gen_label_rtx ();
5437 expand_normal (hi_index);
5438 unsignedp = TYPE_UNSIGNED (domain);
5440 index = build_decl (VAR_DECL, NULL_TREE, domain);
5443 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5445 SET_DECL_RTL (index, index_r);
5446 store_expr (lo_index, index_r, 0, false);
5448 /* Build the head of the loop. */
5449 do_pending_stack_adjust ();
5450 emit_label (loop_start);
5452 /* Assign value to element index. */
5454 fold_convert (ssizetype,
5455 fold_build2 (MINUS_EXPR,
5458 TYPE_MIN_VALUE (domain)));
5461 size_binop (MULT_EXPR, position,
5462 fold_convert (ssizetype,
5463 TYPE_SIZE_UNIT (elttype)));
5465 pos_rtx = expand_normal (position);
5466 xtarget = offset_address (target, pos_rtx,
5467 highest_pow2_factor (position));
5468 xtarget = adjust_address (xtarget, mode, 0);
5469 if (TREE_CODE (value) == CONSTRUCTOR)
5470 store_constructor (value, xtarget, cleared,
5471 bitsize / BITS_PER_UNIT);
5473 store_expr (value, xtarget, 0, false);
5475 /* Generate a conditional jump to exit the loop. */
5476 exit_cond = build2 (LT_EXPR, integer_type_node,
5478 jumpif (exit_cond, loop_end);
5480 /* Update the loop counter, and jump to the head of
5482 expand_assignment (index,
5483 build2 (PLUS_EXPR, TREE_TYPE (index),
5484 index, integer_one_node),
5487 emit_jump (loop_start);
5489 /* Build the end of the loop. */
5490 emit_label (loop_end);
5493 else if ((index != 0 && ! host_integerp (index, 0))
5494 || ! host_integerp (TYPE_SIZE (elttype), 1))
5499 index = ssize_int (1);
5502 index = fold_convert (ssizetype,
5503 fold_build2 (MINUS_EXPR,
5506 TYPE_MIN_VALUE (domain)));
5509 size_binop (MULT_EXPR, index,
5510 fold_convert (ssizetype,
5511 TYPE_SIZE_UNIT (elttype)));
5512 xtarget = offset_address (target,
5513 expand_normal (position),
5514 highest_pow2_factor (position));
5515 xtarget = adjust_address (xtarget, mode, 0);
5516 store_expr (value, xtarget, 0, false);
5521 bitpos = ((tree_low_cst (index, 0) - minelt)
5522 * tree_low_cst (TYPE_SIZE (elttype), 1));
5524 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5526 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5527 && TREE_CODE (type) == ARRAY_TYPE
5528 && TYPE_NONALIASED_COMPONENT (type))
5530 target = copy_rtx (target);
5531 MEM_KEEP_ALIAS_SET_P (target) = 1;
5533 store_constructor_field (target, bitsize, bitpos, mode, value,
5534 type, cleared, get_alias_set (elttype));
5542 unsigned HOST_WIDE_INT idx;
5543 constructor_elt *ce;
5547 tree elttype = TREE_TYPE (type);
5548 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5549 enum machine_mode eltmode = TYPE_MODE (elttype);
5550 HOST_WIDE_INT bitsize;
5551 HOST_WIDE_INT bitpos;
5552 rtvec vector = NULL;
5555 gcc_assert (eltmode != BLKmode);
5557 n_elts = TYPE_VECTOR_SUBPARTS (type);
5558 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5560 enum machine_mode mode = GET_MODE (target);
5562 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5563 if (icode != CODE_FOR_nothing)
5567 vector = rtvec_alloc (n_elts);
5568 for (i = 0; i < n_elts; i++)
5569 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5573 /* If the constructor has fewer elements than the vector,
5574 clear the whole array first. Similarly if this is static
5575 constructor of a non-BLKmode object. */
5578 else if (REG_P (target) && TREE_STATIC (exp))
5582 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5585 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5587 int n_elts_here = tree_low_cst
5588 (int_const_binop (TRUNC_DIV_EXPR,
5589 TYPE_SIZE (TREE_TYPE (value)),
5590 TYPE_SIZE (elttype), 0), 1);
5592 count += n_elts_here;
5593 if (mostly_zeros_p (value))
5594 zero_count += n_elts_here;
5597 /* Clear the entire vector first if there are any missing elements,
5598 or if the incidence of zero elements is >= 75%. */
5599 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5602 if (need_to_clear && size > 0 && !vector)
5605 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5607 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5611 /* Inform later passes that the old value is dead. */
5612 if (!cleared && !vector && REG_P (target))
5613 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5615 /* Store each element of the constructor into the corresponding
5616 element of TARGET, determined by counting the elements. */
5617 for (idx = 0, i = 0;
5618 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5619 idx++, i += bitsize / elt_size)
5621 HOST_WIDE_INT eltpos;
5622 tree value = ce->value;
5624 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5625 if (cleared && initializer_zerop (value))
5629 eltpos = tree_low_cst (ce->index, 1);
5635 /* Vector CONSTRUCTORs should only be built from smaller
5636 vectors in the case of BLKmode vectors. */
5637 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5638 RTVEC_ELT (vector, eltpos)
5639 = expand_normal (value);
5643 enum machine_mode value_mode =
5644 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5645 ? TYPE_MODE (TREE_TYPE (value))
5647 bitpos = eltpos * elt_size;
5648 store_constructor_field (target, bitsize, bitpos,
5649 value_mode, value, type,
5650 cleared, get_alias_set (elttype));
5655 emit_insn (GEN_FCN (icode)
5657 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5666 /* Store the value of EXP (an expression tree)
5667 into a subfield of TARGET which has mode MODE and occupies
5668 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5669 If MODE is VOIDmode, it means that we are storing into a bit-field.
5671 Always return const0_rtx unless we have something particular to
5674 TYPE is the type of the underlying object,
5676 ALIAS_SET is the alias set for the destination. This value will
5677 (in general) be different from that for TARGET, since TARGET is a
5678 reference to the containing structure.
5680 If NONTEMPORAL is true, try generating a nontemporal store. */
5683 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5684 enum machine_mode mode, tree exp, tree type,
5685 alias_set_type alias_set, bool nontemporal)
5687 HOST_WIDE_INT width_mask = 0;
5689 if (TREE_CODE (exp) == ERROR_MARK)
5692 /* If we have nothing to store, do nothing unless the expression has
5695 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5696 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5697 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5699 /* If we are storing into an unaligned field of an aligned union that is
5700 in a register, we may have the mode of TARGET being an integer mode but
5701 MODE == BLKmode. In that case, get an aligned object whose size and
5702 alignment are the same as TARGET and store TARGET into it (we can avoid
5703 the store if the field being stored is the entire width of TARGET). Then
5704 call ourselves recursively to store the field into a BLKmode version of
5705 that object. Finally, load from the object into TARGET. This is not
5706 very efficient in general, but should only be slightly more expensive
5707 than the otherwise-required unaligned accesses. Perhaps this can be
5708 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5709 twice, once with emit_move_insn and once via store_field. */
5712 && (REG_P (target) || GET_CODE (target) == SUBREG))
5714 rtx object = assign_temp (type, 0, 1, 1);
5715 rtx blk_object = adjust_address (object, BLKmode, 0);
5717 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5718 emit_move_insn (object, target);
5720 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5723 emit_move_insn (target, object);
5725 /* We want to return the BLKmode version of the data. */
5729 if (GET_CODE (target) == CONCAT)
5731 /* We're storing into a struct containing a single __complex. */
5733 gcc_assert (!bitpos);
5734 return store_expr (exp, target, 0, nontemporal);
5737 /* If the structure is in a register or if the component
5738 is a bit field, we cannot use addressing to access it.
5739 Use bit-field techniques or SUBREG to store in it. */
5741 if (mode == VOIDmode
5742 || (mode != BLKmode && ! direct_store[(int) mode]
5743 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5744 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5746 || GET_CODE (target) == SUBREG
5747 /* If the field isn't aligned enough to store as an ordinary memref,
5748 store it as a bit field. */
5750 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5751 || bitpos % GET_MODE_ALIGNMENT (mode))
5752 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5753 || (bitpos % BITS_PER_UNIT != 0)))
5754 /* If the RHS and field are a constant size and the size of the
5755 RHS isn't the same size as the bitfield, we must use bitfield
5758 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5759 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5763 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5764 implies a mask operation. If the precision is the same size as
5765 the field we're storing into, that mask is redundant. This is
5766 particularly common with bit field assignments generated by the
5768 if (TREE_CODE (exp) == NOP_EXPR)
5770 tree type = TREE_TYPE (exp);
5771 if (INTEGRAL_TYPE_P (type)
5772 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5773 && bitsize == TYPE_PRECISION (type))
5775 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5776 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5777 exp = TREE_OPERAND (exp, 0);
5781 temp = expand_normal (exp);
5783 /* If BITSIZE is narrower than the size of the type of EXP
5784 we will be narrowing TEMP. Normally, what's wanted are the
5785 low-order bits. However, if EXP's type is a record and this is
5786 big-endian machine, we want the upper BITSIZE bits. */
5787 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5788 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5789 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5790 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5791 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5795 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5797 if (mode != VOIDmode && mode != BLKmode
5798 && mode != TYPE_MODE (TREE_TYPE (exp)))
5799 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5801 /* If the modes of TEMP and TARGET are both BLKmode, both
5802 must be in memory and BITPOS must be aligned on a byte
5803 boundary. If so, we simply do a block copy. Likewise
5804 for a BLKmode-like TARGET. */
5805 if (GET_MODE (temp) == BLKmode
5806 && (GET_MODE (target) == BLKmode
5808 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5809 && (bitpos % BITS_PER_UNIT) == 0
5810 && (bitsize % BITS_PER_UNIT) == 0)))
5812 gcc_assert (MEM_P (target) && MEM_P (temp)
5813 && (bitpos % BITS_PER_UNIT) == 0);
5815 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5816 emit_block_move (target, temp,
5817 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5824 /* Store the value in the bitfield. */
5825 store_bit_field (target, bitsize, bitpos, mode, temp);
5831 /* Now build a reference to just the desired component. */
5832 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5834 if (to_rtx == target)
5835 to_rtx = copy_rtx (to_rtx);
5837 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5838 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5839 set_mem_alias_set (to_rtx, alias_set);
5841 return store_expr (exp, to_rtx, 0, nontemporal);
5845 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5846 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5847 codes and find the ultimate containing object, which we return.
5849 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5850 bit position, and *PUNSIGNEDP to the signedness of the field.
5851 If the position of the field is variable, we store a tree
5852 giving the variable offset (in units) in *POFFSET.
5853 This offset is in addition to the bit position.
5854 If the position is not variable, we store 0 in *POFFSET.
5856 If any of the extraction expressions is volatile,
5857 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5859 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5860 Otherwise, it is a mode that can be used to access the field.
5862 If the field describes a variable-sized object, *PMODE is set to
5863 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5864 this case, but the address of the object can be found.
5866 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5867 look through nodes that serve as markers of a greater alignment than
5868 the one that can be deduced from the expression. These nodes make it
5869 possible for front-ends to prevent temporaries from being created by
5870 the middle-end on alignment considerations. For that purpose, the
5871 normal operating mode at high-level is to always pass FALSE so that
5872 the ultimate containing object is really returned; moreover, the
5873 associated predicate handled_component_p will always return TRUE
5874 on these nodes, thus indicating that they are essentially handled
5875 by get_inner_reference. TRUE should only be passed when the caller
5876 is scanning the expression in order to build another representation
5877 and specifically knows how to handle these nodes; as such, this is
5878 the normal operating mode in the RTL expanders. */
5881 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5882 HOST_WIDE_INT *pbitpos, tree *poffset,
5883 enum machine_mode *pmode, int *punsignedp,
5884 int *pvolatilep, bool keep_aligning)
5887 enum machine_mode mode = VOIDmode;
5888 bool blkmode_bitfield = false;
5889 tree offset = size_zero_node;
5890 tree bit_offset = bitsize_zero_node;
5892 /* First get the mode, signedness, and size. We do this from just the
5893 outermost expression. */
5894 if (TREE_CODE (exp) == COMPONENT_REF)
5896 tree field = TREE_OPERAND (exp, 1);
5897 size_tree = DECL_SIZE (field);
5898 if (!DECL_BIT_FIELD (field))
5899 mode = DECL_MODE (field);
5900 else if (DECL_MODE (field) == BLKmode)
5901 blkmode_bitfield = true;
5903 *punsignedp = DECL_UNSIGNED (field);
5905 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5907 size_tree = TREE_OPERAND (exp, 1);
5908 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5909 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5911 /* For vector types, with the correct size of access, use the mode of
5913 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5914 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5915 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5916 mode = TYPE_MODE (TREE_TYPE (exp));
5920 mode = TYPE_MODE (TREE_TYPE (exp));
5921 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5923 if (mode == BLKmode)
5924 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5926 *pbitsize = GET_MODE_BITSIZE (mode);
5931 if (! host_integerp (size_tree, 1))
5932 mode = BLKmode, *pbitsize = -1;
5934 *pbitsize = tree_low_cst (size_tree, 1);
5937 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5938 and find the ultimate containing object. */
5941 switch (TREE_CODE (exp))
5944 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5945 TREE_OPERAND (exp, 2));
5950 tree field = TREE_OPERAND (exp, 1);
5951 tree this_offset = component_ref_field_offset (exp);
5953 /* If this field hasn't been filled in yet, don't go past it.
5954 This should only happen when folding expressions made during
5955 type construction. */
5956 if (this_offset == 0)
5959 offset = size_binop (PLUS_EXPR, offset, this_offset);
5960 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5961 DECL_FIELD_BIT_OFFSET (field));
5963 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5968 case ARRAY_RANGE_REF:
5970 tree index = TREE_OPERAND (exp, 1);
5971 tree low_bound = array_ref_low_bound (exp);
5972 tree unit_size = array_ref_element_size (exp);
5974 /* We assume all arrays have sizes that are a multiple of a byte.
5975 First subtract the lower bound, if any, in the type of the
5976 index, then convert to sizetype and multiply by the size of
5977 the array element. */
5978 if (! integer_zerop (low_bound))
5979 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5982 offset = size_binop (PLUS_EXPR, offset,
5983 size_binop (MULT_EXPR,
5984 fold_convert (sizetype, index),
5993 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5994 bitsize_int (*pbitsize));
5997 case VIEW_CONVERT_EXPR:
5998 if (keep_aligning && STRICT_ALIGNMENT
5999 && (TYPE_ALIGN (TREE_TYPE (exp))
6000 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6001 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6002 < BIGGEST_ALIGNMENT)
6003 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6004 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6012 /* If any reference in the chain is volatile, the effect is volatile. */
6013 if (TREE_THIS_VOLATILE (exp))
6016 exp = TREE_OPERAND (exp, 0);
6020 /* If OFFSET is constant, see if we can return the whole thing as a
6021 constant bit position. Make sure to handle overflow during
6023 if (host_integerp (offset, 0))
6025 double_int tem = double_int_mul (tree_to_double_int (offset),
6026 uhwi_to_double_int (BITS_PER_UNIT));
6027 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6028 if (double_int_fits_in_shwi_p (tem))
6030 *pbitpos = double_int_to_shwi (tem);
6031 *poffset = offset = NULL_TREE;
6035 /* Otherwise, split it up. */
6038 *pbitpos = tree_low_cst (bit_offset, 0);
6042 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6043 if (mode == VOIDmode
6045 && (*pbitpos % BITS_PER_UNIT) == 0
6046 && (*pbitsize % BITS_PER_UNIT) == 0)
6054 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6055 look for whether EXP or any nested component-refs within EXP is marked
6059 contains_packed_reference (const_tree exp)
6061 bool packed_p = false;
6065 switch (TREE_CODE (exp))
6069 tree field = TREE_OPERAND (exp, 1);
6070 packed_p = DECL_PACKED (field)
6071 || TYPE_PACKED (TREE_TYPE (field))
6072 || TYPE_PACKED (TREE_TYPE (exp));
6080 case ARRAY_RANGE_REF:
6083 case VIEW_CONVERT_EXPR:
6089 exp = TREE_OPERAND (exp, 0);
6095 /* Return a tree of sizetype representing the size, in bytes, of the element
6096 of EXP, an ARRAY_REF. */
6099 array_ref_element_size (tree exp)
6101 tree aligned_size = TREE_OPERAND (exp, 3);
6102 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6104 /* If a size was specified in the ARRAY_REF, it's the size measured
6105 in alignment units of the element type. So multiply by that value. */
6108 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6109 sizetype from another type of the same width and signedness. */
6110 if (TREE_TYPE (aligned_size) != sizetype)
6111 aligned_size = fold_convert (sizetype, aligned_size);
6112 return size_binop (MULT_EXPR, aligned_size,
6113 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6116 /* Otherwise, take the size from that of the element type. Substitute
6117 any PLACEHOLDER_EXPR that we have. */
6119 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6122 /* Return a tree representing the lower bound of the array mentioned in
6123 EXP, an ARRAY_REF. */
6126 array_ref_low_bound (tree exp)
6128 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6130 /* If a lower bound is specified in EXP, use it. */
6131 if (TREE_OPERAND (exp, 2))
6132 return TREE_OPERAND (exp, 2);
6134 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6135 substituting for a PLACEHOLDER_EXPR as needed. */
6136 if (domain_type && TYPE_MIN_VALUE (domain_type))
6137 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6139 /* Otherwise, return a zero of the appropriate type. */
6140 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6143 /* Return a tree representing the upper bound of the array mentioned in
6144 EXP, an ARRAY_REF. */
6147 array_ref_up_bound (tree exp)
6149 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6151 /* If there is a domain type and it has an upper bound, use it, substituting
6152 for a PLACEHOLDER_EXPR as needed. */
6153 if (domain_type && TYPE_MAX_VALUE (domain_type))
6154 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6156 /* Otherwise fail. */
6160 /* Return a tree representing the offset, in bytes, of the field referenced
6161 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6164 component_ref_field_offset (tree exp)
6166 tree aligned_offset = TREE_OPERAND (exp, 2);
6167 tree field = TREE_OPERAND (exp, 1);
6169 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6170 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6174 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6175 sizetype from another type of the same width and signedness. */
6176 if (TREE_TYPE (aligned_offset) != sizetype)
6177 aligned_offset = fold_convert (sizetype, aligned_offset);
6178 return size_binop (MULT_EXPR, aligned_offset,
6179 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6182 /* Otherwise, take the offset from that of the field. Substitute
6183 any PLACEHOLDER_EXPR that we have. */
6185 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6188 /* Return 1 if T is an expression that get_inner_reference handles. */
6191 handled_component_p (const_tree t)
6193 switch (TREE_CODE (t))
6198 case ARRAY_RANGE_REF:
6199 case VIEW_CONVERT_EXPR:
6209 /* Given an rtx VALUE that may contain additions and multiplications, return
6210 an equivalent value that just refers to a register, memory, or constant.
6211 This is done by generating instructions to perform the arithmetic and
6212 returning a pseudo-register containing the value.
6214 The returned value may be a REG, SUBREG, MEM or constant. */
6217 force_operand (rtx value, rtx target)
6220 /* Use subtarget as the target for operand 0 of a binary operation. */
6221 rtx subtarget = get_subtarget (target);
6222 enum rtx_code code = GET_CODE (value);
6224 /* Check for subreg applied to an expression produced by loop optimizer. */
6226 && !REG_P (SUBREG_REG (value))
6227 && !MEM_P (SUBREG_REG (value)))
6230 = simplify_gen_subreg (GET_MODE (value),
6231 force_reg (GET_MODE (SUBREG_REG (value)),
6232 force_operand (SUBREG_REG (value),
6234 GET_MODE (SUBREG_REG (value)),
6235 SUBREG_BYTE (value));
6236 code = GET_CODE (value);
6239 /* Check for a PIC address load. */
6240 if ((code == PLUS || code == MINUS)
6241 && XEXP (value, 0) == pic_offset_table_rtx
6242 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6243 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6244 || GET_CODE (XEXP (value, 1)) == CONST))
6247 subtarget = gen_reg_rtx (GET_MODE (value));
6248 emit_move_insn (subtarget, value);
6252 if (ARITHMETIC_P (value))
6254 op2 = XEXP (value, 1);
6255 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6257 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6260 op2 = negate_rtx (GET_MODE (value), op2);
6263 /* Check for an addition with OP2 a constant integer and our first
6264 operand a PLUS of a virtual register and something else. In that
6265 case, we want to emit the sum of the virtual register and the
6266 constant first and then add the other value. This allows virtual
6267 register instantiation to simply modify the constant rather than
6268 creating another one around this addition. */
6269 if (code == PLUS && GET_CODE (op2) == CONST_INT
6270 && GET_CODE (XEXP (value, 0)) == PLUS
6271 && REG_P (XEXP (XEXP (value, 0), 0))
6272 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6273 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6275 rtx temp = expand_simple_binop (GET_MODE (value), code,
6276 XEXP (XEXP (value, 0), 0), op2,
6277 subtarget, 0, OPTAB_LIB_WIDEN);
6278 return expand_simple_binop (GET_MODE (value), code, temp,
6279 force_operand (XEXP (XEXP (value,
6281 target, 0, OPTAB_LIB_WIDEN);
6284 op1 = force_operand (XEXP (value, 0), subtarget);
6285 op2 = force_operand (op2, NULL_RTX);
6289 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6291 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6292 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6293 target, 1, OPTAB_LIB_WIDEN);
6295 return expand_divmod (0,
6296 FLOAT_MODE_P (GET_MODE (value))
6297 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6298 GET_MODE (value), op1, op2, target, 0);
6300 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6303 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6306 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6309 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6310 target, 0, OPTAB_LIB_WIDEN);
6312 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6313 target, 1, OPTAB_LIB_WIDEN);
6316 if (UNARY_P (value))
6319 target = gen_reg_rtx (GET_MODE (value));
6320 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6327 case FLOAT_TRUNCATE:
6328 convert_move (target, op1, code == ZERO_EXTEND);
6333 expand_fix (target, op1, code == UNSIGNED_FIX);
6337 case UNSIGNED_FLOAT:
6338 expand_float (target, op1, code == UNSIGNED_FLOAT);
6342 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6346 #ifdef INSN_SCHEDULING
6347 /* On machines that have insn scheduling, we want all memory reference to be
6348 explicit, so we need to deal with such paradoxical SUBREGs. */
6349 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6350 && (GET_MODE_SIZE (GET_MODE (value))
6351 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6353 = simplify_gen_subreg (GET_MODE (value),
6354 force_reg (GET_MODE (SUBREG_REG (value)),
6355 force_operand (SUBREG_REG (value),
6357 GET_MODE (SUBREG_REG (value)),
6358 SUBREG_BYTE (value));
6364 /* Subroutine of expand_expr: return nonzero iff there is no way that
6365 EXP can reference X, which is being modified. TOP_P is nonzero if this
6366 call is going to be used to determine whether we need a temporary
6367 for EXP, as opposed to a recursive call to this function.
6369 It is always safe for this routine to return zero since it merely
6370 searches for optimization opportunities. */
6373 safe_from_p (const_rtx x, tree exp, int top_p)
6379 /* If EXP has varying size, we MUST use a target since we currently
6380 have no way of allocating temporaries of variable size
6381 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6382 So we assume here that something at a higher level has prevented a
6383 clash. This is somewhat bogus, but the best we can do. Only
6384 do this when X is BLKmode and when we are at the top level. */
6385 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6386 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6387 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6388 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6389 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6391 && GET_MODE (x) == BLKmode)
6392 /* If X is in the outgoing argument area, it is always safe. */
6394 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6395 || (GET_CODE (XEXP (x, 0)) == PLUS
6396 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6399 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6400 find the underlying pseudo. */
6401 if (GET_CODE (x) == SUBREG)
6404 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6408 /* Now look at our tree code and possibly recurse. */
6409 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6411 case tcc_declaration:
6412 exp_rtl = DECL_RTL_IF_SET (exp);
6418 case tcc_exceptional:
6419 if (TREE_CODE (exp) == TREE_LIST)
6423 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6425 exp = TREE_CHAIN (exp);
6428 if (TREE_CODE (exp) != TREE_LIST)
6429 return safe_from_p (x, exp, 0);
6432 else if (TREE_CODE (exp) == CONSTRUCTOR)
6434 constructor_elt *ce;
6435 unsigned HOST_WIDE_INT idx;
6438 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6440 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6441 || !safe_from_p (x, ce->value, 0))
6445 else if (TREE_CODE (exp) == ERROR_MARK)
6446 return 1; /* An already-visited SAVE_EXPR? */
6451 /* The only case we look at here is the DECL_INITIAL inside a
6453 return (TREE_CODE (exp) != DECL_EXPR
6454 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6455 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6456 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6459 case tcc_comparison:
6460 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6465 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6467 case tcc_expression:
6470 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6471 the expression. If it is set, we conflict iff we are that rtx or
6472 both are in memory. Otherwise, we check all operands of the
6473 expression recursively. */
6475 switch (TREE_CODE (exp))
6478 /* If the operand is static or we are static, we can't conflict.
6479 Likewise if we don't conflict with the operand at all. */
6480 if (staticp (TREE_OPERAND (exp, 0))
6481 || TREE_STATIC (exp)
6482 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6485 /* Otherwise, the only way this can conflict is if we are taking
6486 the address of a DECL a that address if part of X, which is
6488 exp = TREE_OPERAND (exp, 0);
6491 if (!DECL_RTL_SET_P (exp)
6492 || !MEM_P (DECL_RTL (exp)))
6495 exp_rtl = XEXP (DECL_RTL (exp), 0);
6499 case MISALIGNED_INDIRECT_REF:
6500 case ALIGN_INDIRECT_REF:
6503 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6504 get_alias_set (exp)))
6509 /* Assume that the call will clobber all hard registers and
6511 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6516 case WITH_CLEANUP_EXPR:
6517 case CLEANUP_POINT_EXPR:
6518 /* Lowered by gimplify.c. */
6522 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6528 /* If we have an rtx, we do not need to scan our operands. */
6532 nops = TREE_OPERAND_LENGTH (exp);
6533 for (i = 0; i < nops; i++)
6534 if (TREE_OPERAND (exp, i) != 0
6535 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6541 /* Should never get a type here. */
6545 /* If we have an rtl, find any enclosed object. Then see if we conflict
6549 if (GET_CODE (exp_rtl) == SUBREG)
6551 exp_rtl = SUBREG_REG (exp_rtl);
6553 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6557 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6558 are memory and they conflict. */
6559 return ! (rtx_equal_p (x, exp_rtl)
6560 || (MEM_P (x) && MEM_P (exp_rtl)
6561 && true_dependence (exp_rtl, VOIDmode, x,
6562 rtx_addr_varies_p)));
6565 /* If we reach here, it is safe. */
6570 /* Return the highest power of two that EXP is known to be a multiple of.
6571 This is used in updating alignment of MEMs in array references. */
6573 unsigned HOST_WIDE_INT
6574 highest_pow2_factor (const_tree exp)
6576 unsigned HOST_WIDE_INT c0, c1;
6578 switch (TREE_CODE (exp))
6581 /* We can find the lowest bit that's a one. If the low
6582 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6583 We need to handle this case since we can find it in a COND_EXPR,
6584 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6585 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6587 if (TREE_OVERFLOW (exp))
6588 return BIGGEST_ALIGNMENT;
6591 /* Note: tree_low_cst is intentionally not used here,
6592 we don't care about the upper bits. */
6593 c0 = TREE_INT_CST_LOW (exp);
6595 return c0 ? c0 : BIGGEST_ALIGNMENT;
6599 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6600 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6601 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6602 return MIN (c0, c1);
6605 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6606 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6609 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6611 if (integer_pow2p (TREE_OPERAND (exp, 1))
6612 && host_integerp (TREE_OPERAND (exp, 1), 1))
6614 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6615 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6616 return MAX (1, c0 / c1);
6621 /* The highest power of two of a bit-and expression is the maximum of
6622 that of its operands. We typically get here for a complex LHS and
6623 a constant negative power of two on the RHS to force an explicit
6624 alignment, so don't bother looking at the LHS. */
6625 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6629 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6632 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6635 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6636 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6637 return MIN (c0, c1);
6646 /* Similar, except that the alignment requirements of TARGET are
6647 taken into account. Assume it is at least as aligned as its
6648 type, unless it is a COMPONENT_REF in which case the layout of
6649 the structure gives the alignment. */
6651 static unsigned HOST_WIDE_INT
6652 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6654 unsigned HOST_WIDE_INT target_align, factor;
6656 factor = highest_pow2_factor (exp);
6657 if (TREE_CODE (target) == COMPONENT_REF)
6658 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6660 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6661 return MAX (factor, target_align);
6664 /* Return &VAR expression for emulated thread local VAR. */
6667 emutls_var_address (tree var)
6669 tree emuvar = emutls_decl (var);
6670 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6671 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6672 tree arglist = build_tree_list (NULL_TREE, arg);
6673 tree call = build_function_call_expr (fn, arglist);
6674 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6678 /* Subroutine of expand_expr. Expand the two operands of a binary
6679 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6680 The value may be stored in TARGET if TARGET is nonzero. The
6681 MODIFIER argument is as documented by expand_expr. */
6684 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6685 enum expand_modifier modifier)
6687 if (! safe_from_p (target, exp1, 1))
6689 if (operand_equal_p (exp0, exp1, 0))
6691 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6692 *op1 = copy_rtx (*op0);
6696 /* If we need to preserve evaluation order, copy exp0 into its own
6697 temporary variable so that it can't be clobbered by exp1. */
6698 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6699 exp0 = save_expr (exp0);
6700 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6701 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6706 /* Return a MEM that contains constant EXP. DEFER is as for
6707 output_constant_def and MODIFIER is as for expand_expr. */
6710 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6714 mem = output_constant_def (exp, defer);
6715 if (modifier != EXPAND_INITIALIZER)
6716 mem = use_anchored_address (mem);
6720 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6721 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6724 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6725 enum expand_modifier modifier)
6727 rtx result, subtarget;
6729 HOST_WIDE_INT bitsize, bitpos;
6730 int volatilep, unsignedp;
6731 enum machine_mode mode1;
6733 /* If we are taking the address of a constant and are at the top level,
6734 we have to use output_constant_def since we can't call force_const_mem
6736 /* ??? This should be considered a front-end bug. We should not be
6737 generating ADDR_EXPR of something that isn't an LVALUE. The only
6738 exception here is STRING_CST. */
6739 if (CONSTANT_CLASS_P (exp))
6740 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6742 /* Everything must be something allowed by is_gimple_addressable. */
6743 switch (TREE_CODE (exp))
6746 /* This case will happen via recursion for &a->b. */
6747 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6750 /* Recurse and make the output_constant_def clause above handle this. */
6751 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6755 /* The real part of the complex number is always first, therefore
6756 the address is the same as the address of the parent object. */
6759 inner = TREE_OPERAND (exp, 0);
6763 /* The imaginary part of the complex number is always second.
6764 The expression is therefore always offset by the size of the
6767 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6768 inner = TREE_OPERAND (exp, 0);
6772 /* TLS emulation hook - replace __thread VAR's &VAR with
6773 __emutls_get_address (&_emutls.VAR). */
6774 if (! targetm.have_tls
6775 && TREE_CODE (exp) == VAR_DECL
6776 && DECL_THREAD_LOCAL_P (exp))
6778 exp = emutls_var_address (exp);
6779 return expand_expr (exp, target, tmode, modifier);
6784 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6785 expand_expr, as that can have various side effects; LABEL_DECLs for
6786 example, may not have their DECL_RTL set yet. Expand the rtl of
6787 CONSTRUCTORs too, which should yield a memory reference for the
6788 constructor's contents. Assume language specific tree nodes can
6789 be expanded in some interesting way. */
6791 || TREE_CODE (exp) == CONSTRUCTOR
6792 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6794 result = expand_expr (exp, target, tmode,
6795 modifier == EXPAND_INITIALIZER
6796 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6798 /* If the DECL isn't in memory, then the DECL wasn't properly
6799 marked TREE_ADDRESSABLE, which will be either a front-end
6800 or a tree optimizer bug. */
6801 gcc_assert (MEM_P (result));
6802 result = XEXP (result, 0);
6804 /* ??? Is this needed anymore? */
6805 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6807 assemble_external (exp);
6808 TREE_USED (exp) = 1;
6811 if (modifier != EXPAND_INITIALIZER
6812 && modifier != EXPAND_CONST_ADDRESS)
6813 result = force_operand (result, target);
6817 /* Pass FALSE as the last argument to get_inner_reference although
6818 we are expanding to RTL. The rationale is that we know how to
6819 handle "aligning nodes" here: we can just bypass them because
6820 they won't change the final object whose address will be returned
6821 (they actually exist only for that purpose). */
6822 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6823 &mode1, &unsignedp, &volatilep, false);
6827 /* We must have made progress. */
6828 gcc_assert (inner != exp);
6830 subtarget = offset || bitpos ? NULL_RTX : target;
6831 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6837 if (modifier != EXPAND_NORMAL)
6838 result = force_operand (result, NULL);
6839 tmp = expand_expr (offset, NULL_RTX, tmode,
6840 modifier == EXPAND_INITIALIZER
6841 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6843 result = convert_memory_address (tmode, result);
6844 tmp = convert_memory_address (tmode, tmp);
6846 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6847 result = gen_rtx_PLUS (tmode, result, tmp);
6850 subtarget = bitpos ? NULL_RTX : target;
6851 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6852 1, OPTAB_LIB_WIDEN);
6858 /* Someone beforehand should have rejected taking the address
6859 of such an object. */
6860 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6862 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6863 if (modifier < EXPAND_SUM)
6864 result = force_operand (result, target);
6870 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6871 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6874 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6875 enum expand_modifier modifier)
6877 enum machine_mode rmode;
6880 /* Target mode of VOIDmode says "whatever's natural". */
6881 if (tmode == VOIDmode)
6882 tmode = TYPE_MODE (TREE_TYPE (exp));
6884 /* We can get called with some Weird Things if the user does silliness
6885 like "(short) &a". In that case, convert_memory_address won't do
6886 the right thing, so ignore the given target mode. */
6887 if (tmode != Pmode && tmode != ptr_mode)
6890 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6893 /* Despite expand_expr claims concerning ignoring TMODE when not
6894 strictly convenient, stuff breaks if we don't honor it. Note
6895 that combined with the above, we only do this for pointer modes. */
6896 rmode = GET_MODE (result);
6897 if (rmode == VOIDmode)
6900 result = convert_memory_address (tmode, result);
6905 /* Generate code for computing CONSTRUCTOR EXP.
6906 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6907 is TRUE, instead of creating a temporary variable in memory
6908 NULL is returned and the caller needs to handle it differently. */
6911 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6912 bool avoid_temp_mem)
6914 tree type = TREE_TYPE (exp);
6915 enum machine_mode mode = TYPE_MODE (type);
6917 /* Try to avoid creating a temporary at all. This is possible
6918 if all of the initializer is zero.
6919 FIXME: try to handle all [0..255] initializers we can handle
6921 if (TREE_STATIC (exp)
6922 && !TREE_ADDRESSABLE (exp)
6923 && target != 0 && mode == BLKmode
6924 && all_zeros_p (exp))
6926 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6930 /* All elts simple constants => refer to a constant in memory. But
6931 if this is a non-BLKmode mode, let it store a field at a time
6932 since that should make a CONST_INT or CONST_DOUBLE when we
6933 fold. Likewise, if we have a target we can use, it is best to
6934 store directly into the target unless the type is large enough
6935 that memcpy will be used. If we are making an initializer and
6936 all operands are constant, put it in memory as well.
6938 FIXME: Avoid trying to fill vector constructors piece-meal.
6939 Output them with output_constant_def below unless we're sure
6940 they're zeros. This should go away when vector initializers
6941 are treated like VECTOR_CST instead of arrays. */
6942 if ((TREE_STATIC (exp)
6943 && ((mode == BLKmode
6944 && ! (target != 0 && safe_from_p (target, exp, 1)))
6945 || TREE_ADDRESSABLE (exp)
6946 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6947 && (! MOVE_BY_PIECES_P
6948 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6950 && ! mostly_zeros_p (exp))))
6951 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6952 && TREE_CONSTANT (exp)))
6959 constructor = expand_expr_constant (exp, 1, modifier);
6961 if (modifier != EXPAND_CONST_ADDRESS
6962 && modifier != EXPAND_INITIALIZER
6963 && modifier != EXPAND_SUM)
6964 constructor = validize_mem (constructor);
6969 /* Handle calls that pass values in multiple non-contiguous
6970 locations. The Irix 6 ABI has examples of this. */
6971 if (target == 0 || ! safe_from_p (target, exp, 1)
6972 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6978 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6979 | (TREE_READONLY (exp)
6980 * TYPE_QUAL_CONST))),
6981 0, TREE_ADDRESSABLE (exp), 1);
6984 store_constructor (exp, target, 0, int_expr_size (exp));
6989 /* expand_expr: generate code for computing expression EXP.
6990 An rtx for the computed value is returned. The value is never null.
6991 In the case of a void EXP, const0_rtx is returned.
6993 The value may be stored in TARGET if TARGET is nonzero.
6994 TARGET is just a suggestion; callers must assume that
6995 the rtx returned may not be the same as TARGET.
6997 If TARGET is CONST0_RTX, it means that the value will be ignored.
6999 If TMODE is not VOIDmode, it suggests generating the
7000 result in mode TMODE. But this is done only when convenient.
7001 Otherwise, TMODE is ignored and the value generated in its natural mode.
7002 TMODE is just a suggestion; callers must assume that
7003 the rtx returned may not have mode TMODE.
7005 Note that TARGET may have neither TMODE nor MODE. In that case, it
7006 probably will not be used.
7008 If MODIFIER is EXPAND_SUM then when EXP is an addition
7009 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7010 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7011 products as above, or REG or MEM, or constant.
7012 Ordinarily in such cases we would output mul or add instructions
7013 and then return a pseudo reg containing the sum.
7015 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7016 it also marks a label as absolutely required (it can't be dead).
7017 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7018 This is used for outputting expressions used in initializers.
7020 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7021 with a constant address even if that address is not normally legitimate.
7022 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7024 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7025 a call parameter. Such targets require special care as we haven't yet
7026 marked TARGET so that it's safe from being trashed by libcalls. We
7027 don't want to use TARGET for anything but the final result;
7028 Intermediate values must go elsewhere. Additionally, calls to
7029 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7031 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7032 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7033 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7034 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7037 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7038 enum expand_modifier, rtx *);
7041 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7042 enum expand_modifier modifier, rtx *alt_rtl)
7045 rtx ret, last = NULL;
7047 /* Handle ERROR_MARK before anybody tries to access its type. */
7048 if (TREE_CODE (exp) == ERROR_MARK
7049 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7051 ret = CONST0_RTX (tmode);
7052 return ret ? ret : const0_rtx;
7055 if (flag_non_call_exceptions)
7057 rn = lookup_expr_eh_region (exp);
7059 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7061 last = get_last_insn ();
7064 /* If this is an expression of some kind and it has an associated line
7065 number, then emit the line number before expanding the expression.
7067 We need to save and restore the file and line information so that
7068 errors discovered during expansion are emitted with the right
7069 information. It would be better of the diagnostic routines
7070 used the file/line information embedded in the tree nodes rather
7072 if (cfun && EXPR_HAS_LOCATION (exp))
7074 location_t saved_location = input_location;
7075 input_location = EXPR_LOCATION (exp);
7076 set_curr_insn_source_location (input_location);
7078 /* Record where the insns produced belong. */
7079 set_curr_insn_block (TREE_BLOCK (exp));
7081 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7083 input_location = saved_location;
7087 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7090 /* If using non-call exceptions, mark all insns that may trap.
7091 expand_call() will mark CALL_INSNs before we get to this code,
7092 but it doesn't handle libcalls, and these may trap. */
7096 for (insn = next_real_insn (last); insn;
7097 insn = next_real_insn (insn))
7099 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7100 /* If we want exceptions for non-call insns, any
7101 may_trap_p instruction may throw. */
7102 && GET_CODE (PATTERN (insn)) != CLOBBER
7103 && GET_CODE (PATTERN (insn)) != USE
7104 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7105 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7113 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7114 enum expand_modifier modifier, rtx *alt_rtl)
7116 rtx op0, op1, op2, temp, decl_rtl;
7119 enum machine_mode mode;
7120 enum tree_code code = TREE_CODE (exp);
7122 rtx subtarget, original_target;
7124 tree context, subexp0, subexp1;
7125 bool reduce_bit_field;
7126 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7127 ? reduce_to_bit_field_precision ((expr), \
7132 type = TREE_TYPE (exp);
7133 mode = TYPE_MODE (type);
7134 unsignedp = TYPE_UNSIGNED (type);
7136 ignore = (target == const0_rtx
7137 || ((code == NOP_EXPR || code == CONVERT_EXPR
7138 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7139 && TREE_CODE (type) == VOID_TYPE));
7141 /* An operation in what may be a bit-field type needs the
7142 result to be reduced to the precision of the bit-field type,
7143 which is narrower than that of the type's mode. */
7144 reduce_bit_field = (!ignore
7145 && TREE_CODE (type) == INTEGER_TYPE
7146 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7148 /* If we are going to ignore this result, we need only do something
7149 if there is a side-effect somewhere in the expression. If there
7150 is, short-circuit the most common cases here. Note that we must
7151 not call expand_expr with anything but const0_rtx in case this
7152 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7156 if (! TREE_SIDE_EFFECTS (exp))
7159 /* Ensure we reference a volatile object even if value is ignored, but
7160 don't do this if all we are doing is taking its address. */
7161 if (TREE_THIS_VOLATILE (exp)
7162 && TREE_CODE (exp) != FUNCTION_DECL
7163 && mode != VOIDmode && mode != BLKmode
7164 && modifier != EXPAND_CONST_ADDRESS)
7166 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7168 temp = copy_to_reg (temp);
7172 if (TREE_CODE_CLASS (code) == tcc_unary
7173 || code == COMPONENT_REF || code == INDIRECT_REF)
7174 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7177 else if (TREE_CODE_CLASS (code) == tcc_binary
7178 || TREE_CODE_CLASS (code) == tcc_comparison
7179 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7181 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7182 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7185 else if (code == BIT_FIELD_REF)
7187 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7188 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7189 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7196 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7199 /* Use subtarget as the target for operand 0 of a binary operation. */
7200 subtarget = get_subtarget (target);
7201 original_target = target;
7207 tree function = decl_function_context (exp);
7209 temp = label_rtx (exp);
7210 temp = gen_rtx_LABEL_REF (Pmode, temp);
7212 if (function != current_function_decl
7214 LABEL_REF_NONLOCAL_P (temp) = 1;
7216 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7221 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7226 /* If a static var's type was incomplete when the decl was written,
7227 but the type is complete now, lay out the decl now. */
7228 if (DECL_SIZE (exp) == 0
7229 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7230 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7231 layout_decl (exp, 0);
7233 /* TLS emulation hook - replace __thread vars with
7234 *__emutls_get_address (&_emutls.var). */
7235 if (! targetm.have_tls
7236 && TREE_CODE (exp) == VAR_DECL
7237 && DECL_THREAD_LOCAL_P (exp))
7239 exp = build_fold_indirect_ref (emutls_var_address (exp));
7240 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7243 /* ... fall through ... */
7247 decl_rtl = DECL_RTL (exp);
7248 gcc_assert (decl_rtl);
7249 decl_rtl = copy_rtx (decl_rtl);
7251 /* Ensure variable marked as used even if it doesn't go through
7252 a parser. If it hasn't be used yet, write out an external
7254 if (! TREE_USED (exp))
7256 assemble_external (exp);
7257 TREE_USED (exp) = 1;
7260 /* Show we haven't gotten RTL for this yet. */
7263 /* Variables inherited from containing functions should have
7264 been lowered by this point. */
7265 context = decl_function_context (exp);
7266 gcc_assert (!context
7267 || context == current_function_decl
7268 || TREE_STATIC (exp)
7269 /* ??? C++ creates functions that are not TREE_STATIC. */
7270 || TREE_CODE (exp) == FUNCTION_DECL);
7272 /* This is the case of an array whose size is to be determined
7273 from its initializer, while the initializer is still being parsed.
7276 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7277 temp = validize_mem (decl_rtl);
7279 /* If DECL_RTL is memory, we are in the normal case and the
7280 address is not valid, get the address into a register. */
7282 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7285 *alt_rtl = decl_rtl;
7286 decl_rtl = use_anchored_address (decl_rtl);
7287 if (modifier != EXPAND_CONST_ADDRESS
7288 && modifier != EXPAND_SUM
7289 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7290 temp = replace_equiv_address (decl_rtl,
7291 copy_rtx (XEXP (decl_rtl, 0)));
7294 /* If we got something, return it. But first, set the alignment
7295 if the address is a register. */
7298 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7299 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7304 /* If the mode of DECL_RTL does not match that of the decl, it
7305 must be a promoted value. We return a SUBREG of the wanted mode,
7306 but mark it so that we know that it was already extended. */
7308 if (REG_P (decl_rtl)
7309 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7311 enum machine_mode pmode;
7313 /* Get the signedness used for this variable. Ensure we get the
7314 same mode we got when the variable was declared. */
7315 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7316 (TREE_CODE (exp) == RESULT_DECL
7317 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7318 gcc_assert (GET_MODE (decl_rtl) == pmode);
7320 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7321 SUBREG_PROMOTED_VAR_P (temp) = 1;
7322 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7329 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7330 TREE_INT_CST_HIGH (exp), mode);
7336 tree tmp = NULL_TREE;
7337 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7338 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7339 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7340 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7341 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7342 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7343 return const_vector_from_tree (exp);
7344 if (GET_MODE_CLASS (mode) == MODE_INT)
7346 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7348 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7351 tmp = build_constructor_from_list (type,
7352 TREE_VECTOR_CST_ELTS (exp));
7353 return expand_expr (tmp, ignore ? const0_rtx : target,
7358 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7361 /* If optimized, generate immediate CONST_DOUBLE
7362 which will be turned into memory by reload if necessary.
7364 We used to force a register so that loop.c could see it. But
7365 this does not allow gen_* patterns to perform optimizations with
7366 the constants. It also produces two insns in cases like "x = 1.0;".
7367 On most machines, floating-point constants are not permitted in
7368 many insns, so we'd end up copying it to a register in any case.
7370 Now, we do the copying in expand_binop, if appropriate. */
7371 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7372 TYPE_MODE (TREE_TYPE (exp)));
7375 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7376 TYPE_MODE (TREE_TYPE (exp)));
7379 /* Handle evaluating a complex constant in a CONCAT target. */
7380 if (original_target && GET_CODE (original_target) == CONCAT)
7382 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7385 rtarg = XEXP (original_target, 0);
7386 itarg = XEXP (original_target, 1);
7388 /* Move the real and imaginary parts separately. */
7389 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7390 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7393 emit_move_insn (rtarg, op0);
7395 emit_move_insn (itarg, op1);
7397 return original_target;
7400 /* ... fall through ... */
7403 temp = expand_expr_constant (exp, 1, modifier);
7405 /* temp contains a constant address.
7406 On RISC machines where a constant address isn't valid,
7407 make some insns to get that address into a register. */
7408 if (modifier != EXPAND_CONST_ADDRESS
7409 && modifier != EXPAND_INITIALIZER
7410 && modifier != EXPAND_SUM
7411 && ! memory_address_p (mode, XEXP (temp, 0)))
7412 return replace_equiv_address (temp,
7413 copy_rtx (XEXP (temp, 0)));
7418 tree val = TREE_OPERAND (exp, 0);
7419 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7421 if (!SAVE_EXPR_RESOLVED_P (exp))
7423 /* We can indeed still hit this case, typically via builtin
7424 expanders calling save_expr immediately before expanding
7425 something. Assume this means that we only have to deal
7426 with non-BLKmode values. */
7427 gcc_assert (GET_MODE (ret) != BLKmode);
7429 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7430 DECL_ARTIFICIAL (val) = 1;
7431 DECL_IGNORED_P (val) = 1;
7432 TREE_OPERAND (exp, 0) = val;
7433 SAVE_EXPR_RESOLVED_P (exp) = 1;
7435 if (!CONSTANT_P (ret))
7436 ret = copy_to_reg (ret);
7437 SET_DECL_RTL (val, ret);
7444 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7445 expand_goto (TREE_OPERAND (exp, 0));
7447 expand_computed_goto (TREE_OPERAND (exp, 0));
7451 /* If we don't need the result, just ensure we evaluate any
7455 unsigned HOST_WIDE_INT idx;
7458 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7459 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7464 return expand_constructor (exp, target, modifier, false);
7466 case MISALIGNED_INDIRECT_REF:
7467 case ALIGN_INDIRECT_REF:
7470 tree exp1 = TREE_OPERAND (exp, 0);
7472 if (modifier != EXPAND_WRITE)
7476 t = fold_read_from_constant_string (exp);
7478 return expand_expr (t, target, tmode, modifier);
7481 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7482 op0 = memory_address (mode, op0);
7484 if (code == ALIGN_INDIRECT_REF)
7486 int align = TYPE_ALIGN_UNIT (type);
7487 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7488 op0 = memory_address (mode, op0);
7491 temp = gen_rtx_MEM (mode, op0);
7493 set_mem_attributes (temp, exp, 0);
7495 /* Resolve the misalignment now, so that we don't have to remember
7496 to resolve it later. Of course, this only works for reads. */
7497 /* ??? When we get around to supporting writes, we'll have to handle
7498 this in store_expr directly. The vectorizer isn't generating
7499 those yet, however. */
7500 if (code == MISALIGNED_INDIRECT_REF)
7505 gcc_assert (modifier == EXPAND_NORMAL
7506 || modifier == EXPAND_STACK_PARM);
7508 /* The vectorizer should have already checked the mode. */
7509 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7510 gcc_assert (icode != CODE_FOR_nothing);
7512 /* We've already validated the memory, and we're creating a
7513 new pseudo destination. The predicates really can't fail. */
7514 reg = gen_reg_rtx (mode);
7516 /* Nor can the insn generator. */
7517 insn = GEN_FCN (icode) (reg, temp);
7526 case TARGET_MEM_REF:
7528 struct mem_address addr;
7530 get_address_description (exp, &addr);
7531 op0 = addr_for_mem_ref (&addr, true);
7532 op0 = memory_address (mode, op0);
7533 temp = gen_rtx_MEM (mode, op0);
7534 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7541 tree array = TREE_OPERAND (exp, 0);
7542 tree index = TREE_OPERAND (exp, 1);
7544 /* Fold an expression like: "foo"[2].
7545 This is not done in fold so it won't happen inside &.
7546 Don't fold if this is for wide characters since it's too
7547 difficult to do correctly and this is a very rare case. */
7549 if (modifier != EXPAND_CONST_ADDRESS
7550 && modifier != EXPAND_INITIALIZER
7551 && modifier != EXPAND_MEMORY)
7553 tree t = fold_read_from_constant_string (exp);
7556 return expand_expr (t, target, tmode, modifier);
7559 /* If this is a constant index into a constant array,
7560 just get the value from the array. Handle both the cases when
7561 we have an explicit constructor and when our operand is a variable
7562 that was declared const. */
7564 if (modifier != EXPAND_CONST_ADDRESS
7565 && modifier != EXPAND_INITIALIZER
7566 && modifier != EXPAND_MEMORY
7567 && TREE_CODE (array) == CONSTRUCTOR
7568 && ! TREE_SIDE_EFFECTS (array)
7569 && TREE_CODE (index) == INTEGER_CST)
7571 unsigned HOST_WIDE_INT ix;
7574 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7576 if (tree_int_cst_equal (field, index))
7578 if (!TREE_SIDE_EFFECTS (value))
7579 return expand_expr (fold (value), target, tmode, modifier);
7584 else if (optimize >= 1
7585 && modifier != EXPAND_CONST_ADDRESS
7586 && modifier != EXPAND_INITIALIZER
7587 && modifier != EXPAND_MEMORY
7588 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7589 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7590 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7591 && targetm.binds_local_p (array))
7593 if (TREE_CODE (index) == INTEGER_CST)
7595 tree init = DECL_INITIAL (array);
7597 if (TREE_CODE (init) == CONSTRUCTOR)
7599 unsigned HOST_WIDE_INT ix;
7602 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7604 if (tree_int_cst_equal (field, index))
7606 if (TREE_SIDE_EFFECTS (value))
7609 if (TREE_CODE (value) == CONSTRUCTOR)
7611 /* If VALUE is a CONSTRUCTOR, this
7612 optimization is only useful if
7613 this doesn't store the CONSTRUCTOR
7614 into memory. If it does, it is more
7615 efficient to just load the data from
7616 the array directly. */
7617 rtx ret = expand_constructor (value, target,
7619 if (ret == NULL_RTX)
7623 return expand_expr (fold (value), target, tmode,
7627 else if(TREE_CODE (init) == STRING_CST)
7629 tree index1 = index;
7630 tree low_bound = array_ref_low_bound (exp);
7631 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7633 /* Optimize the special-case of a zero lower bound.
7635 We convert the low_bound to sizetype to avoid some problems
7636 with constant folding. (E.g. suppose the lower bound is 1,
7637 and its mode is QI. Without the conversion,l (ARRAY
7638 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7639 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7641 if (! integer_zerop (low_bound))
7642 index1 = size_diffop (index1, fold_convert (sizetype,
7645 if (0 > compare_tree_int (index1,
7646 TREE_STRING_LENGTH (init)))
7648 tree type = TREE_TYPE (TREE_TYPE (init));
7649 enum machine_mode mode = TYPE_MODE (type);
7651 if (GET_MODE_CLASS (mode) == MODE_INT
7652 && GET_MODE_SIZE (mode) == 1)
7653 return gen_int_mode (TREE_STRING_POINTER (init)
7654 [TREE_INT_CST_LOW (index1)],
7661 goto normal_inner_ref;
7664 /* If the operand is a CONSTRUCTOR, we can just extract the
7665 appropriate field if it is present. */
7666 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7668 unsigned HOST_WIDE_INT idx;
7671 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7673 if (field == TREE_OPERAND (exp, 1)
7674 /* We can normally use the value of the field in the
7675 CONSTRUCTOR. However, if this is a bitfield in
7676 an integral mode that we can fit in a HOST_WIDE_INT,
7677 we must mask only the number of bits in the bitfield,
7678 since this is done implicitly by the constructor. If
7679 the bitfield does not meet either of those conditions,
7680 we can't do this optimization. */
7681 && (! DECL_BIT_FIELD (field)
7682 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7683 && (GET_MODE_BITSIZE (DECL_MODE (field))
7684 <= HOST_BITS_PER_WIDE_INT))))
7686 if (DECL_BIT_FIELD (field)
7687 && modifier == EXPAND_STACK_PARM)
7689 op0 = expand_expr (value, target, tmode, modifier);
7690 if (DECL_BIT_FIELD (field))
7692 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7693 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7695 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7697 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7698 op0 = expand_and (imode, op0, op1, target);
7703 = build_int_cst (NULL_TREE,
7704 GET_MODE_BITSIZE (imode) - bitsize);
7706 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7708 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7716 goto normal_inner_ref;
7719 case ARRAY_RANGE_REF:
7722 enum machine_mode mode1;
7723 HOST_WIDE_INT bitsize, bitpos;
7726 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7727 &mode1, &unsignedp, &volatilep, true);
7730 /* If we got back the original object, something is wrong. Perhaps
7731 we are evaluating an expression too early. In any event, don't
7732 infinitely recurse. */
7733 gcc_assert (tem != exp);
7735 /* If TEM's type is a union of variable size, pass TARGET to the inner
7736 computation, since it will need a temporary and TARGET is known
7737 to have to do. This occurs in unchecked conversion in Ada. */
7741 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7742 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7744 && modifier != EXPAND_STACK_PARM
7745 ? target : NULL_RTX),
7747 (modifier == EXPAND_INITIALIZER
7748 || modifier == EXPAND_CONST_ADDRESS
7749 || modifier == EXPAND_STACK_PARM)
7750 ? modifier : EXPAND_NORMAL);
7752 /* If this is a constant, put it into a register if it is a legitimate
7753 constant, OFFSET is 0, and we won't try to extract outside the
7754 register (in case we were passed a partially uninitialized object
7755 or a view_conversion to a larger size) or a BLKmode piece of it
7756 (e.g. if it is unchecked-converted to a record type in Ada). Force
7757 the constant to memory otherwise. */
7758 if (CONSTANT_P (op0))
7760 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7761 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7764 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7765 op0 = force_reg (mode, op0);
7767 op0 = validize_mem (force_const_mem (mode, op0));
7770 /* Otherwise, if this object not in memory and we either have an
7771 offset, a BLKmode result, or a reference outside the object, put it
7772 there. Such cases can occur in Ada if we have unchecked conversion
7773 of an expression from a scalar type to an array or record type or
7774 for an ARRAY_RANGE_REF whose type is BLKmode. */
7775 else if (!MEM_P (op0)
7778 || (bitpos + bitsize
7779 > GET_MODE_BITSIZE (GET_MODE (op0)))))
7781 tree nt = build_qualified_type (TREE_TYPE (tem),
7782 (TYPE_QUALS (TREE_TYPE (tem))
7783 | TYPE_QUAL_CONST));
7784 rtx memloc = assign_temp (nt, 1, 1, 1);
7786 emit_move_insn (memloc, op0);
7792 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7795 gcc_assert (MEM_P (op0));
7797 #ifdef POINTERS_EXTEND_UNSIGNED
7798 if (GET_MODE (offset_rtx) != Pmode)
7799 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7801 if (GET_MODE (offset_rtx) != ptr_mode)
7802 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7805 if (GET_MODE (op0) == BLKmode
7806 /* A constant address in OP0 can have VOIDmode, we must
7807 not try to call force_reg in that case. */
7808 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7810 && (bitpos % bitsize) == 0
7811 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7812 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7814 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7818 op0 = offset_address (op0, offset_rtx,
7819 highest_pow2_factor (offset));
7822 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7823 record its alignment as BIGGEST_ALIGNMENT. */
7824 if (MEM_P (op0) && bitpos == 0 && offset != 0
7825 && is_aligning_offset (offset, tem))
7826 set_mem_align (op0, BIGGEST_ALIGNMENT);
7828 /* Don't forget about volatility even if this is a bitfield. */
7829 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7831 if (op0 == orig_op0)
7832 op0 = copy_rtx (op0);
7834 MEM_VOLATILE_P (op0) = 1;
7837 /* The following code doesn't handle CONCAT.
7838 Assume only bitpos == 0 can be used for CONCAT, due to
7839 one element arrays having the same mode as its element. */
7840 if (GET_CODE (op0) == CONCAT)
7842 gcc_assert (bitpos == 0
7843 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7847 /* In cases where an aligned union has an unaligned object
7848 as a field, we might be extracting a BLKmode value from
7849 an integer-mode (e.g., SImode) object. Handle this case
7850 by doing the extract into an object as wide as the field
7851 (which we know to be the width of a basic mode), then
7852 storing into memory, and changing the mode to BLKmode. */
7853 if (mode1 == VOIDmode
7854 || REG_P (op0) || GET_CODE (op0) == SUBREG
7855 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7856 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7857 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7858 && modifier != EXPAND_CONST_ADDRESS
7859 && modifier != EXPAND_INITIALIZER)
7860 /* If the field isn't aligned enough to fetch as a memref,
7861 fetch it as a bit field. */
7862 || (mode1 != BLKmode
7863 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7864 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7866 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7867 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7868 && ((modifier == EXPAND_CONST_ADDRESS
7869 || modifier == EXPAND_INITIALIZER)
7871 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7872 || (bitpos % BITS_PER_UNIT != 0)))
7873 /* If the type and the field are a constant size and the
7874 size of the type isn't the same size as the bitfield,
7875 we must use bitfield operations. */
7877 && TYPE_SIZE (TREE_TYPE (exp))
7878 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7879 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7882 enum machine_mode ext_mode = mode;
7884 if (ext_mode == BLKmode
7885 && ! (target != 0 && MEM_P (op0)
7887 && bitpos % BITS_PER_UNIT == 0))
7888 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7890 if (ext_mode == BLKmode)
7893 target = assign_temp (type, 0, 1, 1);
7898 /* In this case, BITPOS must start at a byte boundary and
7899 TARGET, if specified, must be a MEM. */
7900 gcc_assert (MEM_P (op0)
7901 && (!target || MEM_P (target))
7902 && !(bitpos % BITS_PER_UNIT));
7904 emit_block_move (target,
7905 adjust_address (op0, VOIDmode,
7906 bitpos / BITS_PER_UNIT),
7907 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7909 (modifier == EXPAND_STACK_PARM
7910 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7915 op0 = validize_mem (op0);
7917 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7918 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7920 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7921 (modifier == EXPAND_STACK_PARM
7922 ? NULL_RTX : target),
7923 ext_mode, ext_mode);
7925 /* If the result is a record type and BITSIZE is narrower than
7926 the mode of OP0, an integral mode, and this is a big endian
7927 machine, we must put the field into the high-order bits. */
7928 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7929 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7930 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7931 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7932 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7936 /* If the result type is BLKmode, store the data into a temporary
7937 of the appropriate type, but with the mode corresponding to the
7938 mode for the data we have (op0's mode). It's tempting to make
7939 this a constant type, since we know it's only being stored once,
7940 but that can cause problems if we are taking the address of this
7941 COMPONENT_REF because the MEM of any reference via that address
7942 will have flags corresponding to the type, which will not
7943 necessarily be constant. */
7944 if (mode == BLKmode)
7946 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7949 /* If the reference doesn't use the alias set of its type,
7950 we cannot create the temporary using that type. */
7951 if (component_uses_parent_alias_set (exp))
7953 new_rtx = assign_stack_local (ext_mode, size, 0);
7954 set_mem_alias_set (new_rtx, get_alias_set (exp));
7957 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
7959 emit_move_insn (new_rtx, op0);
7960 op0 = copy_rtx (new_rtx);
7961 PUT_MODE (op0, BLKmode);
7962 set_mem_attributes (op0, exp, 1);
7968 /* If the result is BLKmode, use that to access the object
7970 if (mode == BLKmode)
7973 /* Get a reference to just this component. */
7974 if (modifier == EXPAND_CONST_ADDRESS
7975 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7976 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7978 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7980 if (op0 == orig_op0)
7981 op0 = copy_rtx (op0);
7983 set_mem_attributes (op0, exp, 0);
7984 if (REG_P (XEXP (op0, 0)))
7985 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7987 MEM_VOLATILE_P (op0) |= volatilep;
7988 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7989 || modifier == EXPAND_CONST_ADDRESS
7990 || modifier == EXPAND_INITIALIZER)
7992 else if (target == 0)
7993 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7995 convert_move (target, op0, unsignedp);
8000 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8003 /* All valid uses of __builtin_va_arg_pack () are removed during
8005 if (CALL_EXPR_VA_ARG_PACK (exp))
8006 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8008 tree fndecl = get_callee_fndecl (exp), attr;
8011 && (attr = lookup_attribute ("error",
8012 DECL_ATTRIBUTES (fndecl))) != NULL)
8013 error ("%Kcall to %qs declared with attribute error: %s",
8014 exp, lang_hooks.decl_printable_name (fndecl, 1),
8015 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8017 && (attr = lookup_attribute ("warning",
8018 DECL_ATTRIBUTES (fndecl))) != NULL)
8019 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8020 exp, lang_hooks.decl_printable_name (fndecl, 1),
8021 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8023 /* Check for a built-in function. */
8024 if (fndecl && DECL_BUILT_IN (fndecl))
8026 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8027 return lang_hooks.expand_expr (exp, original_target,
8028 tmode, modifier, alt_rtl);
8030 return expand_builtin (exp, target, subtarget, tmode, ignore);
8033 return expand_call (exp, target, ignore);
8037 if (TREE_OPERAND (exp, 0) == error_mark_node)
8040 if (TREE_CODE (type) == UNION_TYPE)
8042 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8044 /* If both input and output are BLKmode, this conversion isn't doing
8045 anything except possibly changing memory attribute. */
8046 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8048 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8051 result = copy_rtx (result);
8052 set_mem_attributes (result, exp, 0);
8058 if (TYPE_MODE (type) != BLKmode)
8059 target = gen_reg_rtx (TYPE_MODE (type));
8061 target = assign_temp (type, 0, 1, 1);
8065 /* Store data into beginning of memory target. */
8066 store_expr (TREE_OPERAND (exp, 0),
8067 adjust_address (target, TYPE_MODE (valtype), 0),
8068 modifier == EXPAND_STACK_PARM,
8073 gcc_assert (REG_P (target));
8075 /* Store this field into a union of the proper type. */
8076 store_field (target,
8077 MIN ((int_size_in_bytes (TREE_TYPE
8078 (TREE_OPERAND (exp, 0)))
8080 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8081 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8085 /* Return the entire union. */
8089 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8091 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8094 /* If the signedness of the conversion differs and OP0 is
8095 a promoted SUBREG, clear that indication since we now
8096 have to do the proper extension. */
8097 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8098 && GET_CODE (op0) == SUBREG)
8099 SUBREG_PROMOTED_VAR_P (op0) = 0;
8101 return REDUCE_BIT_FIELD (op0);
8104 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8105 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8106 if (GET_MODE (op0) == mode)
8109 /* If OP0 is a constant, just convert it into the proper mode. */
8110 else if (CONSTANT_P (op0))
8112 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8113 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8115 if (modifier == EXPAND_INITIALIZER)
8116 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8117 subreg_lowpart_offset (mode,
8120 op0= convert_modes (mode, inner_mode, op0,
8121 TYPE_UNSIGNED (inner_type));
8124 else if (modifier == EXPAND_INITIALIZER)
8125 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8127 else if (target == 0)
8128 op0 = convert_to_mode (mode, op0,
8129 TYPE_UNSIGNED (TREE_TYPE
8130 (TREE_OPERAND (exp, 0))));
8133 convert_move (target, op0,
8134 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8138 return REDUCE_BIT_FIELD (op0);
8140 case VIEW_CONVERT_EXPR:
8141 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8143 /* If the input and output modes are both the same, we are done. */
8144 if (TYPE_MODE (type) == GET_MODE (op0))
8146 /* If neither mode is BLKmode, and both modes are the same size
8147 then we can use gen_lowpart. */
8148 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8149 && GET_MODE_SIZE (TYPE_MODE (type))
8150 == GET_MODE_SIZE (GET_MODE (op0)))
8152 if (GET_CODE (op0) == SUBREG)
8153 op0 = force_reg (GET_MODE (op0), op0);
8154 op0 = gen_lowpart (TYPE_MODE (type), op0);
8156 /* If both modes are integral, then we can convert from one to the
8158 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8159 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8160 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8161 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8162 /* As a last resort, spill op0 to memory, and reload it in a
8164 else if (!MEM_P (op0))
8166 /* If the operand is not a MEM, force it into memory. Since we
8167 are going to be changing the mode of the MEM, don't call
8168 force_const_mem for constants because we don't allow pool
8169 constants to change mode. */
8170 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8172 gcc_assert (!TREE_ADDRESSABLE (exp));
8174 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8176 = assign_stack_temp_for_type
8177 (TYPE_MODE (inner_type),
8178 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8180 emit_move_insn (target, op0);
8184 /* At this point, OP0 is in the correct mode. If the output type is such
8185 that the operand is known to be aligned, indicate that it is.
8186 Otherwise, we need only be concerned about alignment for non-BLKmode
8190 op0 = copy_rtx (op0);
8192 if (TYPE_ALIGN_OK (type))
8193 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8194 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8195 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8197 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8198 HOST_WIDE_INT temp_size
8199 = MAX (int_size_in_bytes (inner_type),
8200 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8201 rtx new_rtx = assign_stack_temp_for_type (TYPE_MODE (type),
8202 temp_size, 0, type);
8203 rtx new_with_op0_mode = adjust_address (new_rtx, GET_MODE (op0), 0);
8205 gcc_assert (!TREE_ADDRESSABLE (exp));
8207 if (GET_MODE (op0) == BLKmode)
8208 emit_block_move (new_with_op0_mode, op0,
8209 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8210 (modifier == EXPAND_STACK_PARM
8211 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8213 emit_move_insn (new_with_op0_mode, op0);
8218 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8223 case POINTER_PLUS_EXPR:
8224 /* Even though the sizetype mode and the pointer's mode can be different
8225 expand is able to handle this correctly and get the correct result out
8226 of the PLUS_EXPR code. */
8229 /* Check if this is a case for multiplication and addition. */
8230 if ((TREE_CODE (type) == INTEGER_TYPE
8231 || TREE_CODE (type) == FIXED_POINT_TYPE)
8232 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8234 tree subsubexp0, subsubexp1;
8235 enum tree_code code0, code1, this_code;
8237 subexp0 = TREE_OPERAND (exp, 0);
8238 subsubexp0 = TREE_OPERAND (subexp0, 0);
8239 subsubexp1 = TREE_OPERAND (subexp0, 1);
8240 code0 = TREE_CODE (subsubexp0);
8241 code1 = TREE_CODE (subsubexp1);
8242 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8243 : FIXED_CONVERT_EXPR;
8244 if (code0 == this_code && code1 == this_code
8245 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8246 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8247 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8248 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8249 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8250 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8252 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8253 enum machine_mode innermode = TYPE_MODE (op0type);
8254 bool zextend_p = TYPE_UNSIGNED (op0type);
8255 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8257 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8259 this_optab = zextend_p ? usmadd_widen_optab
8260 : ssmadd_widen_optab;
8261 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8262 && (optab_handler (this_optab, mode)->insn_code
8263 != CODE_FOR_nothing))
8265 expand_operands (TREE_OPERAND (subsubexp0, 0),
8266 TREE_OPERAND (subsubexp1, 0),
8267 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8268 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8269 VOIDmode, EXPAND_NORMAL);
8270 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8273 return REDUCE_BIT_FIELD (temp);
8278 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8279 something else, make sure we add the register to the constant and
8280 then to the other thing. This case can occur during strength
8281 reduction and doing it this way will produce better code if the
8282 frame pointer or argument pointer is eliminated.
8284 fold-const.c will ensure that the constant is always in the inner
8285 PLUS_EXPR, so the only case we need to do anything about is if
8286 sp, ap, or fp is our second argument, in which case we must swap
8287 the innermost first argument and our second argument. */
8289 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8290 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8291 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8292 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8293 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8294 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8296 tree t = TREE_OPERAND (exp, 1);
8298 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8299 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8302 /* If the result is to be ptr_mode and we are adding an integer to
8303 something, we might be forming a constant. So try to use
8304 plus_constant. If it produces a sum and we can't accept it,
8305 use force_operand. This allows P = &ARR[const] to generate
8306 efficient code on machines where a SYMBOL_REF is not a valid
8309 If this is an EXPAND_SUM call, always return the sum. */
8310 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8311 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8313 if (modifier == EXPAND_STACK_PARM)
8315 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8316 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8317 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8321 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8323 /* Use immed_double_const to ensure that the constant is
8324 truncated according to the mode of OP1, then sign extended
8325 to a HOST_WIDE_INT. Using the constant directly can result
8326 in non-canonical RTL in a 64x32 cross compile. */
8328 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8330 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8331 op1 = plus_constant (op1, INTVAL (constant_part));
8332 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8333 op1 = force_operand (op1, target);
8334 return REDUCE_BIT_FIELD (op1);
8337 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8338 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8339 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8343 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8344 (modifier == EXPAND_INITIALIZER
8345 ? EXPAND_INITIALIZER : EXPAND_SUM));
8346 if (! CONSTANT_P (op0))
8348 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8349 VOIDmode, modifier);
8350 /* Return a PLUS if modifier says it's OK. */
8351 if (modifier == EXPAND_SUM
8352 || modifier == EXPAND_INITIALIZER)
8353 return simplify_gen_binary (PLUS, mode, op0, op1);
8356 /* Use immed_double_const to ensure that the constant is
8357 truncated according to the mode of OP1, then sign extended
8358 to a HOST_WIDE_INT. Using the constant directly can result
8359 in non-canonical RTL in a 64x32 cross compile. */
8361 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8363 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8364 op0 = plus_constant (op0, INTVAL (constant_part));
8365 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8366 op0 = force_operand (op0, target);
8367 return REDUCE_BIT_FIELD (op0);
8371 /* No sense saving up arithmetic to be done
8372 if it's all in the wrong mode to form part of an address.
8373 And force_operand won't know whether to sign-extend or
8375 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8376 || mode != ptr_mode)
8378 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8379 subtarget, &op0, &op1, 0);
8380 if (op0 == const0_rtx)
8382 if (op1 == const0_rtx)
8387 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8388 subtarget, &op0, &op1, modifier);
8389 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8392 /* Check if this is a case for multiplication and subtraction. */
8393 if ((TREE_CODE (type) == INTEGER_TYPE
8394 || TREE_CODE (type) == FIXED_POINT_TYPE)
8395 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8397 tree subsubexp0, subsubexp1;
8398 enum tree_code code0, code1, this_code;
8400 subexp1 = TREE_OPERAND (exp, 1);
8401 subsubexp0 = TREE_OPERAND (subexp1, 0);
8402 subsubexp1 = TREE_OPERAND (subexp1, 1);
8403 code0 = TREE_CODE (subsubexp0);
8404 code1 = TREE_CODE (subsubexp1);
8405 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8406 : FIXED_CONVERT_EXPR;
8407 if (code0 == this_code && code1 == this_code
8408 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8409 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8410 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8411 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8412 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8413 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8415 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8416 enum machine_mode innermode = TYPE_MODE (op0type);
8417 bool zextend_p = TYPE_UNSIGNED (op0type);
8418 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8420 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8422 this_optab = zextend_p ? usmsub_widen_optab
8423 : ssmsub_widen_optab;
8424 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8425 && (optab_handler (this_optab, mode)->insn_code
8426 != CODE_FOR_nothing))
8428 expand_operands (TREE_OPERAND (subsubexp0, 0),
8429 TREE_OPERAND (subsubexp1, 0),
8430 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8431 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8432 VOIDmode, EXPAND_NORMAL);
8433 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8436 return REDUCE_BIT_FIELD (temp);
8441 /* For initializers, we are allowed to return a MINUS of two
8442 symbolic constants. Here we handle all cases when both operands
8444 /* Handle difference of two symbolic constants,
8445 for the sake of an initializer. */
8446 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8447 && really_constant_p (TREE_OPERAND (exp, 0))
8448 && really_constant_p (TREE_OPERAND (exp, 1)))
8450 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8451 NULL_RTX, &op0, &op1, modifier);
8453 /* If the last operand is a CONST_INT, use plus_constant of
8454 the negated constant. Else make the MINUS. */
8455 if (GET_CODE (op1) == CONST_INT)
8456 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8458 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8461 /* No sense saving up arithmetic to be done
8462 if it's all in the wrong mode to form part of an address.
8463 And force_operand won't know whether to sign-extend or
8465 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8466 || mode != ptr_mode)
8469 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8470 subtarget, &op0, &op1, modifier);
8472 /* Convert A - const to A + (-const). */
8473 if (GET_CODE (op1) == CONST_INT)
8475 op1 = negate_rtx (mode, op1);
8476 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8482 /* If this is a fixed-point operation, then we cannot use the code
8483 below because "expand_mult" doesn't support sat/no-sat fixed-point
8485 if (ALL_FIXED_POINT_MODE_P (mode))
8488 /* If first operand is constant, swap them.
8489 Thus the following special case checks need only
8490 check the second operand. */
8491 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8493 tree t1 = TREE_OPERAND (exp, 0);
8494 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8495 TREE_OPERAND (exp, 1) = t1;
8498 /* Attempt to return something suitable for generating an
8499 indexed address, for machines that support that. */
8501 if (modifier == EXPAND_SUM && mode == ptr_mode
8502 && host_integerp (TREE_OPERAND (exp, 1), 0))
8504 tree exp1 = TREE_OPERAND (exp, 1);
8506 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8510 op0 = force_operand (op0, NULL_RTX);
8512 op0 = copy_to_mode_reg (mode, op0);
8514 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8515 gen_int_mode (tree_low_cst (exp1, 0),
8516 TYPE_MODE (TREE_TYPE (exp1)))));
8519 if (modifier == EXPAND_STACK_PARM)
8522 /* Check for multiplying things that have been extended
8523 from a narrower type. If this machine supports multiplying
8524 in that narrower type with a result in the desired type,
8525 do it that way, and avoid the explicit type-conversion. */
8527 subexp0 = TREE_OPERAND (exp, 0);
8528 subexp1 = TREE_OPERAND (exp, 1);
8529 /* First, check if we have a multiplication of one signed and one
8530 unsigned operand. */
8531 if (TREE_CODE (subexp0) == NOP_EXPR
8532 && TREE_CODE (subexp1) == NOP_EXPR
8533 && TREE_CODE (type) == INTEGER_TYPE
8534 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8535 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8536 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8537 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8538 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8539 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8541 enum machine_mode innermode
8542 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8543 this_optab = usmul_widen_optab;
8544 if (mode == GET_MODE_WIDER_MODE (innermode))
8546 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8548 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8549 expand_operands (TREE_OPERAND (subexp0, 0),
8550 TREE_OPERAND (subexp1, 0),
8551 NULL_RTX, &op0, &op1, 0);
8553 expand_operands (TREE_OPERAND (subexp0, 0),
8554 TREE_OPERAND (subexp1, 0),
8555 NULL_RTX, &op1, &op0, 0);
8561 /* Check for a multiplication with matching signedness. */
8562 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8563 && TREE_CODE (type) == INTEGER_TYPE
8564 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8565 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8566 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8567 && int_fits_type_p (TREE_OPERAND (exp, 1),
8568 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8569 /* Don't use a widening multiply if a shift will do. */
8570 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8571 > HOST_BITS_PER_WIDE_INT)
8572 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8574 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8575 && (TYPE_PRECISION (TREE_TYPE
8576 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8577 == TYPE_PRECISION (TREE_TYPE
8579 (TREE_OPERAND (exp, 0), 0))))
8580 /* If both operands are extended, they must either both
8581 be zero-extended or both be sign-extended. */
8582 && (TYPE_UNSIGNED (TREE_TYPE
8583 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8584 == TYPE_UNSIGNED (TREE_TYPE
8586 (TREE_OPERAND (exp, 0), 0)))))))
8588 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8589 enum machine_mode innermode = TYPE_MODE (op0type);
8590 bool zextend_p = TYPE_UNSIGNED (op0type);
8591 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8592 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8594 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8596 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8598 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8599 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8600 TREE_OPERAND (exp, 1),
8601 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8603 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8604 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8605 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8608 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8609 && innermode == word_mode)
8612 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8613 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8614 op1 = convert_modes (innermode, mode,
8615 expand_normal (TREE_OPERAND (exp, 1)),
8618 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8619 temp = expand_binop (mode, other_optab, op0, op1, target,
8620 unsignedp, OPTAB_LIB_WIDEN);
8621 hipart = gen_highpart (innermode, temp);
8622 htem = expand_mult_highpart_adjust (innermode, hipart,
8626 emit_move_insn (hipart, htem);
8627 return REDUCE_BIT_FIELD (temp);
8631 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8632 subtarget, &op0, &op1, 0);
8633 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8635 case TRUNC_DIV_EXPR:
8636 case FLOOR_DIV_EXPR:
8638 case ROUND_DIV_EXPR:
8639 case EXACT_DIV_EXPR:
8640 /* If this is a fixed-point operation, then we cannot use the code
8641 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8643 if (ALL_FIXED_POINT_MODE_P (mode))
8646 if (modifier == EXPAND_STACK_PARM)
8648 /* Possible optimization: compute the dividend with EXPAND_SUM
8649 then if the divisor is constant can optimize the case
8650 where some terms of the dividend have coeffs divisible by it. */
8651 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8652 subtarget, &op0, &op1, 0);
8653 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8658 case TRUNC_MOD_EXPR:
8659 case FLOOR_MOD_EXPR:
8661 case ROUND_MOD_EXPR:
8662 if (modifier == EXPAND_STACK_PARM)
8664 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8665 subtarget, &op0, &op1, 0);
8666 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8668 case FIXED_CONVERT_EXPR:
8669 op0 = expand_normal (TREE_OPERAND (exp, 0));
8670 if (target == 0 || modifier == EXPAND_STACK_PARM)
8671 target = gen_reg_rtx (mode);
8673 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8674 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8675 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8676 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8678 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8681 case FIX_TRUNC_EXPR:
8682 op0 = expand_normal (TREE_OPERAND (exp, 0));
8683 if (target == 0 || modifier == EXPAND_STACK_PARM)
8684 target = gen_reg_rtx (mode);
8685 expand_fix (target, op0, unsignedp);
8689 op0 = expand_normal (TREE_OPERAND (exp, 0));
8690 if (target == 0 || modifier == EXPAND_STACK_PARM)
8691 target = gen_reg_rtx (mode);
8692 /* expand_float can't figure out what to do if FROM has VOIDmode.
8693 So give it the correct mode. With -O, cse will optimize this. */
8694 if (GET_MODE (op0) == VOIDmode)
8695 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8697 expand_float (target, op0,
8698 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8702 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8703 VOIDmode, EXPAND_NORMAL);
8704 if (modifier == EXPAND_STACK_PARM)
8706 temp = expand_unop (mode,
8707 optab_for_tree_code (NEGATE_EXPR, type,
8711 return REDUCE_BIT_FIELD (temp);
8714 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8715 VOIDmode, EXPAND_NORMAL);
8716 if (modifier == EXPAND_STACK_PARM)
8719 /* ABS_EXPR is not valid for complex arguments. */
8720 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8721 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8723 /* Unsigned abs is simply the operand. Testing here means we don't
8724 risk generating incorrect code below. */
8725 if (TYPE_UNSIGNED (type))
8728 return expand_abs (mode, op0, target, unsignedp,
8729 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8733 target = original_target;
8735 || modifier == EXPAND_STACK_PARM
8736 || (MEM_P (target) && MEM_VOLATILE_P (target))
8737 || GET_MODE (target) != mode
8739 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8740 target = gen_reg_rtx (mode);
8741 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8742 target, &op0, &op1, 0);
8744 /* First try to do it with a special MIN or MAX instruction.
8745 If that does not win, use a conditional jump to select the proper
8747 this_optab = optab_for_tree_code (code, type, optab_default);
8748 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8753 /* At this point, a MEM target is no longer useful; we will get better
8756 if (! REG_P (target))
8757 target = gen_reg_rtx (mode);
8759 /* If op1 was placed in target, swap op0 and op1. */
8760 if (target != op0 && target == op1)
8767 /* We generate better code and avoid problems with op1 mentioning
8768 target by forcing op1 into a pseudo if it isn't a constant. */
8769 if (! CONSTANT_P (op1))
8770 op1 = force_reg (mode, op1);
8773 enum rtx_code comparison_code;
8776 if (code == MAX_EXPR)
8777 comparison_code = unsignedp ? GEU : GE;
8779 comparison_code = unsignedp ? LEU : LE;
8781 /* Canonicalize to comparisons against 0. */
8782 if (op1 == const1_rtx)
8784 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8785 or (a != 0 ? a : 1) for unsigned.
8786 For MIN we are safe converting (a <= 1 ? a : 1)
8787 into (a <= 0 ? a : 1) */
8788 cmpop1 = const0_rtx;
8789 if (code == MAX_EXPR)
8790 comparison_code = unsignedp ? NE : GT;
8792 if (op1 == constm1_rtx && !unsignedp)
8794 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8795 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8796 cmpop1 = const0_rtx;
8797 if (code == MIN_EXPR)
8798 comparison_code = LT;
8800 #ifdef HAVE_conditional_move
8801 /* Use a conditional move if possible. */
8802 if (can_conditionally_move_p (mode))
8806 /* ??? Same problem as in expmed.c: emit_conditional_move
8807 forces a stack adjustment via compare_from_rtx, and we
8808 lose the stack adjustment if the sequence we are about
8809 to create is discarded. */
8810 do_pending_stack_adjust ();
8814 /* Try to emit the conditional move. */
8815 insn = emit_conditional_move (target, comparison_code,
8820 /* If we could do the conditional move, emit the sequence,
8824 rtx seq = get_insns ();
8830 /* Otherwise discard the sequence and fall back to code with
8836 emit_move_insn (target, op0);
8838 temp = gen_label_rtx ();
8839 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8840 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8842 emit_move_insn (target, op1);
8847 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8848 VOIDmode, EXPAND_NORMAL);
8849 if (modifier == EXPAND_STACK_PARM)
8851 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8855 /* ??? Can optimize bitwise operations with one arg constant.
8856 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8857 and (a bitwise1 b) bitwise2 b (etc)
8858 but that is probably not worth while. */
8860 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8861 boolean values when we want in all cases to compute both of them. In
8862 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8863 as actual zero-or-1 values and then bitwise anding. In cases where
8864 there cannot be any side effects, better code would be made by
8865 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8866 how to recognize those cases. */
8868 case TRUTH_AND_EXPR:
8869 code = BIT_AND_EXPR;
8874 code = BIT_IOR_EXPR;
8878 case TRUTH_XOR_EXPR:
8879 code = BIT_XOR_EXPR;
8885 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8886 || (GET_MODE_PRECISION (TYPE_MODE (type))
8887 == TYPE_PRECISION (type)));
8892 /* If this is a fixed-point operation, then we cannot use the code
8893 below because "expand_shift" doesn't support sat/no-sat fixed-point
8895 if (ALL_FIXED_POINT_MODE_P (mode))
8898 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8900 if (modifier == EXPAND_STACK_PARM)
8902 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8903 VOIDmode, EXPAND_NORMAL);
8904 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8906 if (code == LSHIFT_EXPR)
8907 temp = REDUCE_BIT_FIELD (temp);
8910 /* Could determine the answer when only additive constants differ. Also,
8911 the addition of one can be handled by changing the condition. */
8918 case UNORDERED_EXPR:
8926 temp = do_store_flag (exp,
8927 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8928 tmode != VOIDmode ? tmode : mode, 0);
8932 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8933 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8935 && REG_P (original_target)
8936 && (GET_MODE (original_target)
8937 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8939 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8940 VOIDmode, EXPAND_NORMAL);
8942 /* If temp is constant, we can just compute the result. */
8943 if (GET_CODE (temp) == CONST_INT)
8945 if (INTVAL (temp) != 0)
8946 emit_move_insn (target, const1_rtx);
8948 emit_move_insn (target, const0_rtx);
8953 if (temp != original_target)
8955 enum machine_mode mode1 = GET_MODE (temp);
8956 if (mode1 == VOIDmode)
8957 mode1 = tmode != VOIDmode ? tmode : mode;
8959 temp = copy_to_mode_reg (mode1, temp);
8962 op1 = gen_label_rtx ();
8963 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8964 GET_MODE (temp), unsignedp, op1);
8965 emit_move_insn (temp, const1_rtx);
8970 /* If no set-flag instruction, must generate a conditional store
8971 into a temporary variable. Drop through and handle this
8976 || modifier == EXPAND_STACK_PARM
8977 || ! safe_from_p (target, exp, 1)
8978 /* Make sure we don't have a hard reg (such as function's return
8979 value) live across basic blocks, if not optimizing. */
8980 || (!optimize && REG_P (target)
8981 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8982 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8985 emit_move_insn (target, const0_rtx);
8987 op1 = gen_label_rtx ();
8988 jumpifnot (exp, op1);
8991 emit_move_insn (target, const1_rtx);
8994 return ignore ? const0_rtx : target;
8996 case TRUTH_NOT_EXPR:
8997 if (modifier == EXPAND_STACK_PARM)
8999 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9000 VOIDmode, EXPAND_NORMAL);
9001 /* The parser is careful to generate TRUTH_NOT_EXPR
9002 only with operands that are always zero or one. */
9003 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9004 target, 1, OPTAB_LIB_WIDEN);
9008 case STATEMENT_LIST:
9010 tree_stmt_iterator iter;
9012 gcc_assert (ignore);
9014 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9015 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9020 /* A COND_EXPR with its type being VOID_TYPE represents a
9021 conditional jump and is handled in
9022 expand_gimple_cond_expr. */
9023 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9025 /* Note that COND_EXPRs whose type is a structure or union
9026 are required to be constructed to contain assignments of
9027 a temporary variable, so that we can evaluate them here
9028 for side effect only. If type is void, we must do likewise. */
9030 gcc_assert (!TREE_ADDRESSABLE (type)
9032 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9033 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9035 /* If we are not to produce a result, we have no target. Otherwise,
9036 if a target was specified use it; it will not be used as an
9037 intermediate target unless it is safe. If no target, use a
9040 if (modifier != EXPAND_STACK_PARM
9042 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9043 && GET_MODE (original_target) == mode
9044 #ifdef HAVE_conditional_move
9045 && (! can_conditionally_move_p (mode)
9046 || REG_P (original_target))
9048 && !MEM_P (original_target))
9049 temp = original_target;
9051 temp = assign_temp (type, 0, 0, 1);
9053 do_pending_stack_adjust ();
9055 op0 = gen_label_rtx ();
9056 op1 = gen_label_rtx ();
9057 jumpifnot (TREE_OPERAND (exp, 0), op0);
9058 store_expr (TREE_OPERAND (exp, 1), temp,
9059 modifier == EXPAND_STACK_PARM,
9062 emit_jump_insn (gen_jump (op1));
9065 store_expr (TREE_OPERAND (exp, 2), temp,
9066 modifier == EXPAND_STACK_PARM,
9074 target = expand_vec_cond_expr (exp, target);
9079 tree lhs = TREE_OPERAND (exp, 0);
9080 tree rhs = TREE_OPERAND (exp, 1);
9081 gcc_assert (ignore);
9083 /* Check for |= or &= of a bitfield of size one into another bitfield
9084 of size 1. In this case, (unless we need the result of the
9085 assignment) we can do this more efficiently with a
9086 test followed by an assignment, if necessary.
9088 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9089 things change so we do, this code should be enhanced to
9091 if (TREE_CODE (lhs) == COMPONENT_REF
9092 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9093 || TREE_CODE (rhs) == BIT_AND_EXPR)
9094 && TREE_OPERAND (rhs, 0) == lhs
9095 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9096 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9097 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9099 rtx label = gen_label_rtx ();
9100 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9101 do_jump (TREE_OPERAND (rhs, 1),
9104 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9105 MOVE_NONTEMPORAL (exp));
9106 do_pending_stack_adjust ();
9111 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9116 if (!TREE_OPERAND (exp, 0))
9117 expand_null_return ();
9119 expand_return (TREE_OPERAND (exp, 0));
9123 return expand_expr_addr_expr (exp, target, tmode, modifier);
9126 /* Get the rtx code of the operands. */
9127 op0 = expand_normal (TREE_OPERAND (exp, 0));
9128 op1 = expand_normal (TREE_OPERAND (exp, 1));
9131 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9133 /* Move the real (op0) and imaginary (op1) parts to their location. */
9134 write_complex_part (target, op0, false);
9135 write_complex_part (target, op1, true);
9140 op0 = expand_normal (TREE_OPERAND (exp, 0));
9141 return read_complex_part (op0, false);
9144 op0 = expand_normal (TREE_OPERAND (exp, 0));
9145 return read_complex_part (op0, true);
9148 expand_resx_expr (exp);
9151 case TRY_CATCH_EXPR:
9153 case EH_FILTER_EXPR:
9154 case TRY_FINALLY_EXPR:
9155 /* Lowered by tree-eh.c. */
9158 case WITH_CLEANUP_EXPR:
9159 case CLEANUP_POINT_EXPR:
9161 case CASE_LABEL_EXPR:
9167 case PREINCREMENT_EXPR:
9168 case PREDECREMENT_EXPR:
9169 case POSTINCREMENT_EXPR:
9170 case POSTDECREMENT_EXPR:
9173 case TRUTH_ANDIF_EXPR:
9174 case TRUTH_ORIF_EXPR:
9175 /* Lowered by gimplify.c. */
9178 case CHANGE_DYNAMIC_TYPE_EXPR:
9179 /* This is ignored at the RTL level. The tree level set
9180 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9181 overkill for the RTL layer but is all that we can
9186 return get_exception_pointer ();
9189 return get_exception_filter ();
9192 /* Function descriptors are not valid except for as
9193 initialization constants, and should not be expanded. */
9201 expand_label (TREE_OPERAND (exp, 0));
9205 expand_asm_expr (exp);
9208 case WITH_SIZE_EXPR:
9209 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9210 have pulled out the size to use in whatever context it needed. */
9211 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9214 case REALIGN_LOAD_EXPR:
9216 tree oprnd0 = TREE_OPERAND (exp, 0);
9217 tree oprnd1 = TREE_OPERAND (exp, 1);
9218 tree oprnd2 = TREE_OPERAND (exp, 2);
9221 this_optab = optab_for_tree_code (code, type, optab_default);
9222 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9223 op2 = expand_normal (oprnd2);
9224 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9232 tree oprnd0 = TREE_OPERAND (exp, 0);
9233 tree oprnd1 = TREE_OPERAND (exp, 1);
9234 tree oprnd2 = TREE_OPERAND (exp, 2);
9237 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9238 op2 = expand_normal (oprnd2);
9239 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9244 case WIDEN_SUM_EXPR:
9246 tree oprnd0 = TREE_OPERAND (exp, 0);
9247 tree oprnd1 = TREE_OPERAND (exp, 1);
9249 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9250 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9255 case REDUC_MAX_EXPR:
9256 case REDUC_MIN_EXPR:
9257 case REDUC_PLUS_EXPR:
9259 op0 = expand_normal (TREE_OPERAND (exp, 0));
9260 this_optab = optab_for_tree_code (code, type, optab_default);
9261 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9266 case VEC_EXTRACT_EVEN_EXPR:
9267 case VEC_EXTRACT_ODD_EXPR:
9269 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9270 NULL_RTX, &op0, &op1, 0);
9271 this_optab = optab_for_tree_code (code, type, optab_default);
9272 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9278 case VEC_INTERLEAVE_HIGH_EXPR:
9279 case VEC_INTERLEAVE_LOW_EXPR:
9281 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9282 NULL_RTX, &op0, &op1, 0);
9283 this_optab = optab_for_tree_code (code, type, optab_default);
9284 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9290 case VEC_LSHIFT_EXPR:
9291 case VEC_RSHIFT_EXPR:
9293 target = expand_vec_shift_expr (exp, target);
9297 case VEC_UNPACK_HI_EXPR:
9298 case VEC_UNPACK_LO_EXPR:
9300 op0 = expand_normal (TREE_OPERAND (exp, 0));
9301 this_optab = optab_for_tree_code (code, type, optab_default);
9302 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9308 case VEC_UNPACK_FLOAT_HI_EXPR:
9309 case VEC_UNPACK_FLOAT_LO_EXPR:
9311 op0 = expand_normal (TREE_OPERAND (exp, 0));
9312 /* The signedness is determined from input operand. */
9313 this_optab = optab_for_tree_code (code,
9314 TREE_TYPE (TREE_OPERAND (exp, 0)),
9316 temp = expand_widen_pattern_expr
9317 (exp, op0, NULL_RTX, NULL_RTX,
9318 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9324 case VEC_WIDEN_MULT_HI_EXPR:
9325 case VEC_WIDEN_MULT_LO_EXPR:
9327 tree oprnd0 = TREE_OPERAND (exp, 0);
9328 tree oprnd1 = TREE_OPERAND (exp, 1);
9330 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9331 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9333 gcc_assert (target);
9337 case VEC_PACK_TRUNC_EXPR:
9338 case VEC_PACK_SAT_EXPR:
9339 case VEC_PACK_FIX_TRUNC_EXPR:
9340 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9344 return lang_hooks.expand_expr (exp, original_target, tmode,
9348 /* Here to do an ordinary binary operator. */
9350 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9351 subtarget, &op0, &op1, 0);
9353 this_optab = optab_for_tree_code (code, type, optab_default);
9355 if (modifier == EXPAND_STACK_PARM)
9357 temp = expand_binop (mode, this_optab, op0, op1, target,
9358 unsignedp, OPTAB_LIB_WIDEN);
9360 return REDUCE_BIT_FIELD (temp);
9362 #undef REDUCE_BIT_FIELD
9364 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9365 signedness of TYPE), possibly returning the result in TARGET. */
9367 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9369 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9370 if (target && GET_MODE (target) != GET_MODE (exp))
9372 /* For constant values, reduce using build_int_cst_type. */
9373 if (GET_CODE (exp) == CONST_INT)
9375 HOST_WIDE_INT value = INTVAL (exp);
9376 tree t = build_int_cst_type (type, value);
9377 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9379 else if (TYPE_UNSIGNED (type))
9382 if (prec < HOST_BITS_PER_WIDE_INT)
9383 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9386 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9387 ((unsigned HOST_WIDE_INT) 1
9388 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9390 return expand_and (GET_MODE (exp), exp, mask, target);
9394 tree count = build_int_cst (NULL_TREE,
9395 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9396 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9397 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9401 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9402 when applied to the address of EXP produces an address known to be
9403 aligned more than BIGGEST_ALIGNMENT. */
9406 is_aligning_offset (const_tree offset, const_tree exp)
9408 /* Strip off any conversions. */
9409 while (CONVERT_EXPR_P (offset))
9410 offset = TREE_OPERAND (offset, 0);
9412 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9413 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9414 if (TREE_CODE (offset) != BIT_AND_EXPR
9415 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9416 || compare_tree_int (TREE_OPERAND (offset, 1),
9417 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9418 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9421 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9422 It must be NEGATE_EXPR. Then strip any more conversions. */
9423 offset = TREE_OPERAND (offset, 0);
9424 while (CONVERT_EXPR_P (offset))
9425 offset = TREE_OPERAND (offset, 0);
9427 if (TREE_CODE (offset) != NEGATE_EXPR)
9430 offset = TREE_OPERAND (offset, 0);
9431 while (CONVERT_EXPR_P (offset))
9432 offset = TREE_OPERAND (offset, 0);
9434 /* This must now be the address of EXP. */
9435 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9438 /* Return the tree node if an ARG corresponds to a string constant or zero
9439 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9440 in bytes within the string that ARG is accessing. The type of the
9441 offset will be `sizetype'. */
9444 string_constant (tree arg, tree *ptr_offset)
9446 tree array, offset, lower_bound;
9449 if (TREE_CODE (arg) == ADDR_EXPR)
9451 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9453 *ptr_offset = size_zero_node;
9454 return TREE_OPERAND (arg, 0);
9456 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9458 array = TREE_OPERAND (arg, 0);
9459 offset = size_zero_node;
9461 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9463 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9464 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9465 if (TREE_CODE (array) != STRING_CST
9466 && TREE_CODE (array) != VAR_DECL)
9469 /* Check if the array has a nonzero lower bound. */
9470 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9471 if (!integer_zerop (lower_bound))
9473 /* If the offset and base aren't both constants, return 0. */
9474 if (TREE_CODE (lower_bound) != INTEGER_CST)
9476 if (TREE_CODE (offset) != INTEGER_CST)
9478 /* Adjust offset by the lower bound. */
9479 offset = size_diffop (fold_convert (sizetype, offset),
9480 fold_convert (sizetype, lower_bound));
9486 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9488 tree arg0 = TREE_OPERAND (arg, 0);
9489 tree arg1 = TREE_OPERAND (arg, 1);
9494 if (TREE_CODE (arg0) == ADDR_EXPR
9495 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9496 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9498 array = TREE_OPERAND (arg0, 0);
9501 else if (TREE_CODE (arg1) == ADDR_EXPR
9502 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9503 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9505 array = TREE_OPERAND (arg1, 0);
9514 if (TREE_CODE (array) == STRING_CST)
9516 *ptr_offset = fold_convert (sizetype, offset);
9519 else if (TREE_CODE (array) == VAR_DECL)
9523 /* Variables initialized to string literals can be handled too. */
9524 if (DECL_INITIAL (array) == NULL_TREE
9525 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9528 /* If they are read-only, non-volatile and bind locally. */
9529 if (! TREE_READONLY (array)
9530 || TREE_SIDE_EFFECTS (array)
9531 || ! targetm.binds_local_p (array))
9534 /* Avoid const char foo[4] = "abcde"; */
9535 if (DECL_SIZE_UNIT (array) == NULL_TREE
9536 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9537 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9538 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9541 /* If variable is bigger than the string literal, OFFSET must be constant
9542 and inside of the bounds of the string literal. */
9543 offset = fold_convert (sizetype, offset);
9544 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9545 && (! host_integerp (offset, 1)
9546 || compare_tree_int (offset, length) >= 0))
9549 *ptr_offset = offset;
9550 return DECL_INITIAL (array);
9556 /* Generate code to calculate EXP using a store-flag instruction
9557 and return an rtx for the result. EXP is either a comparison
9558 or a TRUTH_NOT_EXPR whose operand is a comparison.
9560 If TARGET is nonzero, store the result there if convenient.
9562 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9565 Return zero if there is no suitable set-flag instruction
9566 available on this machine.
9568 Once expand_expr has been called on the arguments of the comparison,
9569 we are committed to doing the store flag, since it is not safe to
9570 re-evaluate the expression. We emit the store-flag insn by calling
9571 emit_store_flag, but only expand the arguments if we have a reason
9572 to believe that emit_store_flag will be successful. If we think that
9573 it will, but it isn't, we have to simulate the store-flag with a
9574 set/jump/set sequence. */
9577 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9580 tree arg0, arg1, type;
9582 enum machine_mode operand_mode;
9586 enum insn_code icode;
9587 rtx subtarget = target;
9590 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9591 result at the end. We can't simply invert the test since it would
9592 have already been inverted if it were valid. This case occurs for
9593 some floating-point comparisons. */
9595 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9596 invert = 1, exp = TREE_OPERAND (exp, 0);
9598 arg0 = TREE_OPERAND (exp, 0);
9599 arg1 = TREE_OPERAND (exp, 1);
9601 /* Don't crash if the comparison was erroneous. */
9602 if (arg0 == error_mark_node || arg1 == error_mark_node)
9605 type = TREE_TYPE (arg0);
9606 operand_mode = TYPE_MODE (type);
9607 unsignedp = TYPE_UNSIGNED (type);
9609 /* We won't bother with BLKmode store-flag operations because it would mean
9610 passing a lot of information to emit_store_flag. */
9611 if (operand_mode == BLKmode)
9614 /* We won't bother with store-flag operations involving function pointers
9615 when function pointers must be canonicalized before comparisons. */
9616 #ifdef HAVE_canonicalize_funcptr_for_compare
9617 if (HAVE_canonicalize_funcptr_for_compare
9618 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9619 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9621 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9622 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9623 == FUNCTION_TYPE))))
9630 /* Get the rtx comparison code to use. We know that EXP is a comparison
9631 operation of some type. Some comparisons against 1 and -1 can be
9632 converted to comparisons with zero. Do so here so that the tests
9633 below will be aware that we have a comparison with zero. These
9634 tests will not catch constants in the first operand, but constants
9635 are rarely passed as the first operand. */
9637 switch (TREE_CODE (exp))
9646 if (integer_onep (arg1))
9647 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9649 code = unsignedp ? LTU : LT;
9652 if (! unsignedp && integer_all_onesp (arg1))
9653 arg1 = integer_zero_node, code = LT;
9655 code = unsignedp ? LEU : LE;
9658 if (! unsignedp && integer_all_onesp (arg1))
9659 arg1 = integer_zero_node, code = GE;
9661 code = unsignedp ? GTU : GT;
9664 if (integer_onep (arg1))
9665 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9667 code = unsignedp ? GEU : GE;
9670 case UNORDERED_EXPR:
9699 /* Put a constant second. */
9700 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9701 || TREE_CODE (arg0) == FIXED_CST)
9703 tem = arg0; arg0 = arg1; arg1 = tem;
9704 code = swap_condition (code);
9707 /* If this is an equality or inequality test of a single bit, we can
9708 do this by shifting the bit being tested to the low-order bit and
9709 masking the result with the constant 1. If the condition was EQ,
9710 we xor it with 1. This does not require an scc insn and is faster
9711 than an scc insn even if we have it.
9713 The code to make this transformation was moved into fold_single_bit_test,
9714 so we just call into the folder and expand its result. */
9716 if ((code == NE || code == EQ)
9717 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9718 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9720 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9721 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9723 target, VOIDmode, EXPAND_NORMAL);
9726 /* Now see if we are likely to be able to do this. Return if not. */
9727 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9730 icode = setcc_gen_code[(int) code];
9732 if (icode == CODE_FOR_nothing)
9734 enum machine_mode wmode;
9736 for (wmode = operand_mode;
9737 icode == CODE_FOR_nothing && wmode != VOIDmode;
9738 wmode = GET_MODE_WIDER_MODE (wmode))
9739 icode = optab_handler (cstore_optab, wmode)->insn_code;
9742 if (icode == CODE_FOR_nothing
9743 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9745 /* We can only do this if it is one of the special cases that
9746 can be handled without an scc insn. */
9747 if ((code == LT && integer_zerop (arg1))
9748 || (! only_cheap && code == GE && integer_zerop (arg1)))
9750 else if (! only_cheap && (code == NE || code == EQ)
9751 && TREE_CODE (type) != REAL_TYPE
9752 && ((optab_handler (abs_optab, operand_mode)->insn_code
9753 != CODE_FOR_nothing)
9754 || (optab_handler (ffs_optab, operand_mode)->insn_code
9755 != CODE_FOR_nothing)))
9761 if (! get_subtarget (target)
9762 || GET_MODE (subtarget) != operand_mode)
9765 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9768 target = gen_reg_rtx (mode);
9770 result = emit_store_flag (target, code, op0, op1,
9771 operand_mode, unsignedp, 1);
9776 result = expand_binop (mode, xor_optab, result, const1_rtx,
9777 result, 0, OPTAB_LIB_WIDEN);
9781 /* If this failed, we have to do this with set/compare/jump/set code. */
9783 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9784 target = gen_reg_rtx (GET_MODE (target));
9786 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9787 label = gen_label_rtx ();
9788 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9791 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9798 /* Stubs in case we haven't got a casesi insn. */
9800 # define HAVE_casesi 0
9801 # define gen_casesi(a, b, c, d, e) (0)
9802 # define CODE_FOR_casesi CODE_FOR_nothing
9805 /* If the machine does not have a case insn that compares the bounds,
9806 this means extra overhead for dispatch tables, which raises the
9807 threshold for using them. */
9808 #ifndef CASE_VALUES_THRESHOLD
9809 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9810 #endif /* CASE_VALUES_THRESHOLD */
9813 case_values_threshold (void)
9815 return CASE_VALUES_THRESHOLD;
9818 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9819 0 otherwise (i.e. if there is no casesi instruction). */
9821 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9822 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9823 rtx fallback_label ATTRIBUTE_UNUSED)
9825 enum machine_mode index_mode = SImode;
9826 int index_bits = GET_MODE_BITSIZE (index_mode);
9827 rtx op1, op2, index;
9828 enum machine_mode op_mode;
9833 /* Convert the index to SImode. */
9834 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9836 enum machine_mode omode = TYPE_MODE (index_type);
9837 rtx rangertx = expand_normal (range);
9839 /* We must handle the endpoints in the original mode. */
9840 index_expr = build2 (MINUS_EXPR, index_type,
9841 index_expr, minval);
9842 minval = integer_zero_node;
9843 index = expand_normal (index_expr);
9845 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9846 omode, 1, default_label);
9847 /* Now we can safely truncate. */
9848 index = convert_to_mode (index_mode, index, 0);
9852 if (TYPE_MODE (index_type) != index_mode)
9854 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9855 index_expr = fold_convert (index_type, index_expr);
9858 index = expand_normal (index_expr);
9861 do_pending_stack_adjust ();
9863 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9864 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9866 index = copy_to_mode_reg (op_mode, index);
9868 op1 = expand_normal (minval);
9870 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9871 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9872 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9873 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9875 op1 = copy_to_mode_reg (op_mode, op1);
9877 op2 = expand_normal (range);
9879 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9880 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9881 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9882 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9884 op2 = copy_to_mode_reg (op_mode, op2);
9886 emit_jump_insn (gen_casesi (index, op1, op2,
9887 table_label, !default_label
9888 ? fallback_label : default_label));
9892 /* Attempt to generate a tablejump instruction; same concept. */
9893 #ifndef HAVE_tablejump
9894 #define HAVE_tablejump 0
9895 #define gen_tablejump(x, y) (0)
9898 /* Subroutine of the next function.
9900 INDEX is the value being switched on, with the lowest value
9901 in the table already subtracted.
9902 MODE is its expected mode (needed if INDEX is constant).
9903 RANGE is the length of the jump table.
9904 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9906 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9907 index value is out of range. */
9910 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9915 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
9916 cfun->cfg->max_jumptable_ents = INTVAL (range);
9918 /* Do an unsigned comparison (in the proper mode) between the index
9919 expression and the value which represents the length of the range.
9920 Since we just finished subtracting the lower bound of the range
9921 from the index expression, this comparison allows us to simultaneously
9922 check that the original index expression value is both greater than
9923 or equal to the minimum value of the range and less than or equal to
9924 the maximum value of the range. */
9927 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9930 /* If index is in range, it must fit in Pmode.
9931 Convert to Pmode so we can index with it. */
9933 index = convert_to_mode (Pmode, index, 1);
9935 /* Don't let a MEM slip through, because then INDEX that comes
9936 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9937 and break_out_memory_refs will go to work on it and mess it up. */
9938 #ifdef PIC_CASE_VECTOR_ADDRESS
9939 if (flag_pic && !REG_P (index))
9940 index = copy_to_mode_reg (Pmode, index);
9943 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9944 GET_MODE_SIZE, because this indicates how large insns are. The other
9945 uses should all be Pmode, because they are addresses. This code
9946 could fail if addresses and insns are not the same size. */
9947 index = gen_rtx_PLUS (Pmode,
9948 gen_rtx_MULT (Pmode, index,
9949 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9950 gen_rtx_LABEL_REF (Pmode, table_label));
9951 #ifdef PIC_CASE_VECTOR_ADDRESS
9953 index = PIC_CASE_VECTOR_ADDRESS (index);
9956 index = memory_address (CASE_VECTOR_MODE, index);
9957 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9958 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9959 convert_move (temp, vector, 0);
9961 emit_jump_insn (gen_tablejump (temp, table_label));
9963 /* If we are generating PIC code or if the table is PC-relative, the
9964 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9965 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9970 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9971 rtx table_label, rtx default_label)
9975 if (! HAVE_tablejump)
9978 index_expr = fold_build2 (MINUS_EXPR, index_type,
9979 fold_convert (index_type, index_expr),
9980 fold_convert (index_type, minval));
9981 index = expand_normal (index_expr);
9982 do_pending_stack_adjust ();
9984 do_tablejump (index, TYPE_MODE (index_type),
9985 convert_modes (TYPE_MODE (index_type),
9986 TYPE_MODE (TREE_TYPE (range)),
9987 expand_normal (range),
9988 TYPE_UNSIGNED (TREE_TYPE (range))),
9989 table_label, default_label);
9993 /* Nonzero if the mode is a valid vector mode for this architecture.
9994 This returns nonzero even if there is no hardware support for the
9995 vector mode, but we can emulate with narrower modes. */
9998 vector_mode_valid_p (enum machine_mode mode)
10000 enum mode_class mclass = GET_MODE_CLASS (mode);
10001 enum machine_mode innermode;
10003 /* Doh! What's going on? */
10004 if (mclass != MODE_VECTOR_INT
10005 && mclass != MODE_VECTOR_FLOAT
10006 && mclass != MODE_VECTOR_FRACT
10007 && mclass != MODE_VECTOR_UFRACT
10008 && mclass != MODE_VECTOR_ACCUM
10009 && mclass != MODE_VECTOR_UACCUM)
10012 /* Hardware support. Woo hoo! */
10013 if (targetm.vector_mode_supported_p (mode))
10016 innermode = GET_MODE_INNER (mode);
10018 /* We should probably return 1 if requesting V4DI and we have no DI,
10019 but we have V2DI, but this is probably very unlikely. */
10021 /* If we have support for the inner mode, we can safely emulate it.
10022 We may not have V2DI, but me can emulate with a pair of DIs. */
10023 return targetm.scalar_mode_supported_p (innermode);
10026 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10028 const_vector_from_tree (tree exp)
10033 enum machine_mode inner, mode;
10035 mode = TYPE_MODE (TREE_TYPE (exp));
10037 if (initializer_zerop (exp))
10038 return CONST0_RTX (mode);
10040 units = GET_MODE_NUNITS (mode);
10041 inner = GET_MODE_INNER (mode);
10043 v = rtvec_alloc (units);
10045 link = TREE_VECTOR_CST_ELTS (exp);
10046 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10048 elt = TREE_VALUE (link);
10050 if (TREE_CODE (elt) == REAL_CST)
10051 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10053 else if (TREE_CODE (elt) == FIXED_CST)
10054 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10057 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10058 TREE_INT_CST_HIGH (elt),
10062 /* Initialize remaining elements to 0. */
10063 for (; i < units; ++i)
10064 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10066 return gen_rtx_CONST_VECTOR (mode, v);
10068 #include "gt-expr.h"