1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* This structure is used by move_by_pieces to describe the move to
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (tree, rtx, enum machine_mode);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 SET_REGNO (reg, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
456 insns = get_insns ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
519 expand_fixed_convert (to, from, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
589 fill_value = const0_rtx;
591 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 LT, lowfrom, const0_rtx,
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
601 gcc_assert (subword);
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
607 insns = get_insns ();
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
629 /* Now follow all the conversions between integers
630 no more than a word long. */
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
658 emit_unop_insn (code, to, from, equiv_code);
663 enum machine_mode intermediate;
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
694 emit_move_insn (to, tmp);
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
721 /* Mode combination is not recognized. */
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
735 return convert_modes (mode, VOIDmode, x, unsignedp);
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
777 HOST_WIDE_INT val = INTVAL (x);
779 if (oldmode != VOIDmode
780 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
782 int width = GET_MODE_BITSIZE (oldmode);
784 /* We need to zero extend VAL. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
788 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
796 if ((GET_CODE (x) == CONST_INT
797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798 || (GET_MODE_CLASS (mode) == MODE_INT
799 && GET_MODE_CLASS (oldmode) == MODE_INT
800 && (GET_CODE (x) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 && direct_load[(int) mode])
805 && (! HARD_REGISTER_P (x)
806 || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 GET_MODE_BITSIZE (GET_MODE (x)))))))))
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
814 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
816 HOST_WIDE_INT val = INTVAL (x);
817 int width = GET_MODE_BITSIZE (oldmode);
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val &= ((HOST_WIDE_INT) 1 << width) - 1;
823 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 val |= (HOST_WIDE_INT) (-1) << width;
826 return gen_int_mode (val, mode);
829 return gen_lowpart (mode, x);
832 /* Converting from integer constant into mode is always equivalent to an
834 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
836 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837 return simplify_gen_subreg (mode, x, oldmode, 0);
840 temp = gen_reg_rtx (mode);
841 convert_move (temp, x, unsignedp);
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 unsigned int align ATTRIBUTE_UNUSED)
860 return MOVE_BY_PIECES_P (len, align);
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
869 ALIGN is maximum stack alignment we can assume.
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 unsigned int align, int endp)
879 struct move_by_pieces data;
880 rtx to_addr, from_addr = XEXP (from, 0);
881 unsigned int max_size = MOVE_MAX_PIECES + 1;
882 enum machine_mode mode = VOIDmode, tmode;
883 enum insn_code icode;
885 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 data.from_addr = from_addr;
891 to_addr = XEXP (to, 0);
894 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
895 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
897 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
904 #ifdef STACK_GROWS_DOWNWARD
910 data.to_addr = to_addr;
913 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
914 || GET_CODE (from_addr) == POST_INC
915 || GET_CODE (from_addr) == POST_DEC);
917 data.explicit_inc_from = 0;
918 data.explicit_inc_to = 0;
919 if (data.reverse) data.offset = len;
922 /* If copying requires more than two move insns,
923 copy addresses to registers (to make displacements shorter)
924 and use post-increment if available. */
925 if (!(data.autinc_from && data.autinc_to)
926 && move_by_pieces_ninsns (len, align, max_size) > 2)
928 /* Find the mode of the largest move... */
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
930 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
931 if (GET_MODE_SIZE (tmode) < max_size)
934 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
936 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
937 data.autinc_from = 1;
938 data.explicit_inc_from = -1;
940 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
942 data.from_addr = copy_addr_to_reg (from_addr);
943 data.autinc_from = 1;
944 data.explicit_inc_from = 1;
946 if (!data.autinc_from && CONSTANT_P (from_addr))
947 data.from_addr = copy_addr_to_reg (from_addr);
948 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
950 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
952 data.explicit_inc_to = -1;
954 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
956 data.to_addr = copy_addr_to_reg (to_addr);
958 data.explicit_inc_to = 1;
960 if (!data.autinc_to && CONSTANT_P (to_addr))
961 data.to_addr = copy_addr_to_reg (to_addr);
964 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
965 if (align >= GET_MODE_ALIGNMENT (tmode))
966 align = GET_MODE_ALIGNMENT (tmode);
969 enum machine_mode xmode;
971 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
973 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
974 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode, align))
978 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
988 if (GET_MODE_SIZE (tmode) < max_size)
991 if (mode == VOIDmode)
994 icode = optab_handler (mov_optab, mode)->insn_code;
995 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
996 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
998 max_size = GET_MODE_SIZE (mode);
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data.len);
1008 gcc_assert (!data.reverse);
1013 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1014 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1016 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1026 to1 = adjust_address (data.to, QImode, data.offset);
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1049 enum machine_mode tmode, xmode;
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1061 while (max_size > 1)
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1071 if (mode == VOIDmode)
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1078 max_size = GET_MODE_SIZE (mode);
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces *data)
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1096 while (data->len >= size)
1099 data->offset -= size;
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1107 to1 = adjust_address (data->to, mode, data->offset);
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1114 from1 = adjust_address (data->from, mode, data->offset);
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1124 emit_insn ((*genfun) (to1, from1));
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1139 if (! data->reverse)
1140 data->offset += size;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1191 gcc_assert (MEM_P (x));
1192 gcc_assert (MEM_P (y));
1195 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1196 block copy is more efficient for other large modes, e.g. DCmode. */
1197 x = adjust_address (x, BLKmode, 0);
1198 y = adjust_address (y, BLKmode, 0);
1200 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1201 can be incorrect is coming from __builtin_memcpy. */
1202 if (GET_CODE (size) == CONST_INT)
1204 if (INTVAL (size) == 0)
1207 x = shallow_copy_rtx (x);
1208 y = shallow_copy_rtx (y);
1209 set_mem_size (x, size);
1210 set_mem_size (y, size);
1213 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1214 move_by_pieces (x, y, INTVAL (size), align, 0);
1215 else if (emit_block_move_via_movmem (x, y, size, align,
1216 expected_align, expected_size))
1218 else if (may_use_call)
1219 retval = emit_block_move_via_libcall (x, y, size,
1220 method == BLOCK_OP_TAILCALL);
1222 emit_block_move_via_loop (x, y, size, align);
1224 if (method == BLOCK_OP_CALL_PARM)
1231 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1233 return emit_block_move_hints (x, y, size, method, 0, -1);
1236 /* A subroutine of emit_block_move. Returns true if calling the
1237 block move libcall will not clobber any parameters which may have
1238 already been placed on the stack. */
1241 block_move_libcall_safe_for_call_parm (void)
1243 #if defined (REG_PARM_STACK_SPACE)
1247 /* If arguments are pushed on the stack, then they're safe. */
1251 /* If registers go on the stack anyway, any argument is sure to clobber
1252 an outgoing argument. */
1253 #if defined (REG_PARM_STACK_SPACE)
1254 fn = emit_block_move_libcall_fn (false);
1255 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1256 && REG_PARM_STACK_SPACE (fn) != 0)
1260 /* If any argument goes in memory, then it might clobber an outgoing
1263 CUMULATIVE_ARGS args_so_far;
1266 fn = emit_block_move_libcall_fn (false);
1267 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1269 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1270 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1272 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1273 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1274 if (!tmp || !REG_P (tmp))
1276 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1278 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1284 /* A subroutine of emit_block_move. Expand a movmem pattern;
1285 return true if successful. */
1288 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1289 unsigned int expected_align, HOST_WIDE_INT expected_size)
1291 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1292 int save_volatile_ok = volatile_ok;
1293 enum machine_mode mode;
1295 if (expected_align < align)
1296 expected_align = align;
1298 /* Since this is a move insn, we don't care about volatility. */
1301 /* Try the most limited insn first, because there's no point
1302 including more than one in the machine description unless
1303 the more limited one has some advantage. */
1305 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1306 mode = GET_MODE_WIDER_MODE (mode))
1308 enum insn_code code = movmem_optab[(int) mode];
1309 insn_operand_predicate_fn pred;
1311 if (code != CODE_FOR_nothing
1312 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1313 here because if SIZE is less than the mode mask, as it is
1314 returned by the macro, it will definitely be less than the
1315 actual mode mask. */
1316 && ((GET_CODE (size) == CONST_INT
1317 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1318 <= (GET_MODE_MASK (mode) >> 1)))
1319 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1320 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1321 || (*pred) (x, BLKmode))
1322 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1323 || (*pred) (y, BLKmode))
1324 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1325 || (*pred) (opalign, VOIDmode)))
1328 rtx last = get_last_insn ();
1331 op2 = convert_to_mode (mode, size, 1);
1332 pred = insn_data[(int) code].operand[2].predicate;
1333 if (pred != 0 && ! (*pred) (op2, mode))
1334 op2 = copy_to_mode_reg (mode, op2);
1336 /* ??? When called via emit_block_move_for_call, it'd be
1337 nice if there were some way to inform the backend, so
1338 that it doesn't fail the expansion because it thinks
1339 emitting the libcall would be more efficient. */
1341 if (insn_data[(int) code].n_operands == 4)
1342 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1344 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1345 GEN_INT (expected_align
1347 GEN_INT (expected_size));
1351 volatile_ok = save_volatile_ok;
1355 delete_insns_since (last);
1359 volatile_ok = save_volatile_ok;
1363 /* A subroutine of emit_block_move. Expand a call to memcpy.
1364 Return the return value from memcpy, 0 otherwise. */
1367 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1369 rtx dst_addr, src_addr;
1370 tree call_expr, fn, src_tree, dst_tree, size_tree;
1371 enum machine_mode size_mode;
1374 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1375 pseudos. We can then place those new pseudos into a VAR_DECL and
1378 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1379 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1381 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1382 src_addr = convert_memory_address (ptr_mode, src_addr);
1384 dst_tree = make_tree (ptr_type_node, dst_addr);
1385 src_tree = make_tree (ptr_type_node, src_addr);
1387 size_mode = TYPE_MODE (sizetype);
1389 size = convert_to_mode (size_mode, size, 1);
1390 size = copy_to_mode_reg (size_mode, size);
1392 /* It is incorrect to use the libcall calling conventions to call
1393 memcpy in this context. This could be a user call to memcpy and
1394 the user may wish to examine the return value from memcpy. For
1395 targets where libcalls and normal calls have different conventions
1396 for returning pointers, we could end up generating incorrect code. */
1398 size_tree = make_tree (sizetype, size);
1400 fn = emit_block_move_libcall_fn (true);
1401 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1402 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1404 retval = expand_normal (call_expr);
1409 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1410 for the function we use for block copies. The first time FOR_CALL
1411 is true, we call assemble_external. */
1413 static GTY(()) tree block_move_fn;
1416 init_block_move_fn (const char *asmspec)
1422 fn = get_identifier ("memcpy");
1423 args = build_function_type_list (ptr_type_node, ptr_type_node,
1424 const_ptr_type_node, sizetype,
1427 fn = build_decl (FUNCTION_DECL, fn, args);
1428 DECL_EXTERNAL (fn) = 1;
1429 TREE_PUBLIC (fn) = 1;
1430 DECL_ARTIFICIAL (fn) = 1;
1431 TREE_NOTHROW (fn) = 1;
1432 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1433 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1439 set_user_assembler_name (block_move_fn, asmspec);
1443 emit_block_move_libcall_fn (int for_call)
1445 static bool emitted_extern;
1448 init_block_move_fn (NULL);
1450 if (for_call && !emitted_extern)
1452 emitted_extern = true;
1453 make_decl_rtl (block_move_fn);
1454 assemble_external (block_move_fn);
1457 return block_move_fn;
1460 /* A subroutine of emit_block_move. Copy the data via an explicit
1461 loop. This is used only when libcalls are forbidden. */
1462 /* ??? It'd be nice to copy in hunks larger than QImode. */
1465 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1466 unsigned int align ATTRIBUTE_UNUSED)
1468 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1469 enum machine_mode iter_mode;
1471 iter_mode = GET_MODE (size);
1472 if (iter_mode == VOIDmode)
1473 iter_mode = word_mode;
1475 top_label = gen_label_rtx ();
1476 cmp_label = gen_label_rtx ();
1477 iter = gen_reg_rtx (iter_mode);
1479 emit_move_insn (iter, const0_rtx);
1481 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1482 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1483 do_pending_stack_adjust ();
1485 emit_jump (cmp_label);
1486 emit_label (top_label);
1488 tmp = convert_modes (Pmode, iter_mode, iter, true);
1489 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1490 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1491 x = change_address (x, QImode, x_addr);
1492 y = change_address (y, QImode, y_addr);
1494 emit_move_insn (x, y);
1496 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1497 true, OPTAB_LIB_WIDEN);
1499 emit_move_insn (iter, tmp);
1501 emit_label (cmp_label);
1503 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1507 /* Copy all or part of a value X into registers starting at REGNO.
1508 The number of registers to be filled is NREGS. */
1511 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1514 #ifdef HAVE_load_multiple
1522 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1523 x = validize_mem (force_const_mem (mode, x));
1525 /* See if the machine can do this with a load multiple insn. */
1526 #ifdef HAVE_load_multiple
1527 if (HAVE_load_multiple)
1529 last = get_last_insn ();
1530 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1538 delete_insns_since (last);
1542 for (i = 0; i < nregs; i++)
1543 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1544 operand_subword_force (x, i, mode));
1547 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1548 The number of registers to be filled is NREGS. */
1551 move_block_from_reg (int regno, rtx x, int nregs)
1558 /* See if the machine can do this with a store multiple insn. */
1559 #ifdef HAVE_store_multiple
1560 if (HAVE_store_multiple)
1562 rtx last = get_last_insn ();
1563 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1571 delete_insns_since (last);
1575 for (i = 0; i < nregs; i++)
1577 rtx tem = operand_subword (x, i, 1, BLKmode);
1581 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1585 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1586 ORIG, where ORIG is a non-consecutive group of registers represented by
1587 a PARALLEL. The clone is identical to the original except in that the
1588 original set of registers is replaced by a new set of pseudo registers.
1589 The new set has the same modes as the original set. */
1592 gen_group_rtx (rtx orig)
1597 gcc_assert (GET_CODE (orig) == PARALLEL);
1599 length = XVECLEN (orig, 0);
1600 tmps = XALLOCAVEC (rtx, length);
1602 /* Skip a NULL entry in first slot. */
1603 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1608 for (; i < length; i++)
1610 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1611 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1613 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1616 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1619 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1620 except that values are placed in TMPS[i], and must later be moved
1621 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1624 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1628 enum machine_mode m = GET_MODE (orig_src);
1630 gcc_assert (GET_CODE (dst) == PARALLEL);
1633 && !SCALAR_INT_MODE_P (m)
1634 && !MEM_P (orig_src)
1635 && GET_CODE (orig_src) != CONCAT)
1637 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1638 if (imode == BLKmode)
1639 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1641 src = gen_reg_rtx (imode);
1642 if (imode != BLKmode)
1643 src = gen_lowpart (GET_MODE (orig_src), src);
1644 emit_move_insn (src, orig_src);
1645 /* ...and back again. */
1646 if (imode != BLKmode)
1647 src = gen_lowpart (imode, src);
1648 emit_group_load_1 (tmps, dst, src, type, ssize);
1652 /* Check for a NULL entry, used to indicate that the parameter goes
1653 both on the stack and in registers. */
1654 if (XEXP (XVECEXP (dst, 0, 0), 0))
1659 /* Process the pieces. */
1660 for (i = start; i < XVECLEN (dst, 0); i++)
1662 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1663 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1664 unsigned int bytelen = GET_MODE_SIZE (mode);
1667 /* Handle trailing fragments that run over the size of the struct. */
1668 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1670 /* Arrange to shift the fragment to where it belongs.
1671 extract_bit_field loads to the lsb of the reg. */
1673 #ifdef BLOCK_REG_PADDING
1674 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1675 == (BYTES_BIG_ENDIAN ? upward : downward)
1680 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1681 bytelen = ssize - bytepos;
1682 gcc_assert (bytelen > 0);
1685 /* If we won't be loading directly from memory, protect the real source
1686 from strange tricks we might play; but make sure that the source can
1687 be loaded directly into the destination. */
1689 if (!MEM_P (orig_src)
1690 && (!CONSTANT_P (orig_src)
1691 || (GET_MODE (orig_src) != mode
1692 && GET_MODE (orig_src) != VOIDmode)))
1694 if (GET_MODE (orig_src) == VOIDmode)
1695 src = gen_reg_rtx (mode);
1697 src = gen_reg_rtx (GET_MODE (orig_src));
1699 emit_move_insn (src, orig_src);
1702 /* Optimize the access just a bit. */
1704 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1705 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1706 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1707 && bytelen == GET_MODE_SIZE (mode))
1709 tmps[i] = gen_reg_rtx (mode);
1710 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1712 else if (COMPLEX_MODE_P (mode)
1713 && GET_MODE (src) == mode
1714 && bytelen == GET_MODE_SIZE (mode))
1715 /* Let emit_move_complex do the bulk of the work. */
1717 else if (GET_CODE (src) == CONCAT)
1719 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1720 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1722 if ((bytepos == 0 && bytelen == slen0)
1723 || (bytepos != 0 && bytepos + bytelen <= slen))
1725 /* The following assumes that the concatenated objects all
1726 have the same size. In this case, a simple calculation
1727 can be used to determine the object and the bit field
1729 tmps[i] = XEXP (src, bytepos / slen0);
1730 if (! CONSTANT_P (tmps[i])
1731 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1732 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1733 (bytepos % slen0) * BITS_PER_UNIT,
1734 1, NULL_RTX, mode, mode);
1740 gcc_assert (!bytepos);
1741 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1742 emit_move_insn (mem, src);
1743 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1744 0, 1, NULL_RTX, mode, mode);
1747 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1748 SIMD register, which is currently broken. While we get GCC
1749 to emit proper RTL for these cases, let's dump to memory. */
1750 else if (VECTOR_MODE_P (GET_MODE (dst))
1753 int slen = GET_MODE_SIZE (GET_MODE (src));
1756 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1757 emit_move_insn (mem, src);
1758 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1760 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1761 && XVECLEN (dst, 0) > 1)
1762 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1763 else if (CONSTANT_P (src))
1765 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1773 gcc_assert (2 * len == ssize);
1774 split_double (src, &first, &second);
1781 else if (REG_P (src) && GET_MODE (src) == mode)
1784 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1785 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1789 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1790 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1794 /* Emit code to move a block SRC of type TYPE to a block DST,
1795 where DST is non-consecutive registers represented by a PARALLEL.
1796 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1800 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1805 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1806 emit_group_load_1 (tmps, dst, src, type, ssize);
1808 /* Copy the extracted pieces into the proper (probable) hard regs. */
1809 for (i = 0; i < XVECLEN (dst, 0); i++)
1811 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1814 emit_move_insn (d, tmps[i]);
1818 /* Similar, but load SRC into new pseudos in a format that looks like
1819 PARALLEL. This can later be fed to emit_group_move to get things
1820 in the right place. */
1823 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1828 vec = rtvec_alloc (XVECLEN (parallel, 0));
1829 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1831 /* Convert the vector to look just like the original PARALLEL, except
1832 with the computed values. */
1833 for (i = 0; i < XVECLEN (parallel, 0); i++)
1835 rtx e = XVECEXP (parallel, 0, i);
1836 rtx d = XEXP (e, 0);
1840 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1841 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1843 RTVEC_ELT (vec, i) = e;
1846 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1849 /* Emit code to move a block SRC to block DST, where SRC and DST are
1850 non-consecutive groups of registers, each represented by a PARALLEL. */
1853 emit_group_move (rtx dst, rtx src)
1857 gcc_assert (GET_CODE (src) == PARALLEL
1858 && GET_CODE (dst) == PARALLEL
1859 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1861 /* Skip first entry if NULL. */
1862 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1863 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1864 XEXP (XVECEXP (src, 0, i), 0));
1867 /* Move a group of registers represented by a PARALLEL into pseudos. */
1870 emit_group_move_into_temps (rtx src)
1872 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1875 for (i = 0; i < XVECLEN (src, 0); i++)
1877 rtx e = XVECEXP (src, 0, i);
1878 rtx d = XEXP (e, 0);
1881 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1882 RTVEC_ELT (vec, i) = e;
1885 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1888 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1889 where SRC is non-consecutive registers represented by a PARALLEL.
1890 SSIZE represents the total size of block ORIG_DST, or -1 if not
1894 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1897 int start, finish, i;
1898 enum machine_mode m = GET_MODE (orig_dst);
1900 gcc_assert (GET_CODE (src) == PARALLEL);
1902 if (!SCALAR_INT_MODE_P (m)
1903 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1905 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1906 if (imode == BLKmode)
1907 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1909 dst = gen_reg_rtx (imode);
1910 emit_group_store (dst, src, type, ssize);
1911 if (imode != BLKmode)
1912 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1913 emit_move_insn (orig_dst, dst);
1917 /* Check for a NULL entry, used to indicate that the parameter goes
1918 both on the stack and in registers. */
1919 if (XEXP (XVECEXP (src, 0, 0), 0))
1923 finish = XVECLEN (src, 0);
1925 tmps = XALLOCAVEC (rtx, finish);
1927 /* Copy the (probable) hard regs into pseudos. */
1928 for (i = start; i < finish; i++)
1930 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1931 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1933 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1934 emit_move_insn (tmps[i], reg);
1940 /* If we won't be storing directly into memory, protect the real destination
1941 from strange tricks we might play. */
1943 if (GET_CODE (dst) == PARALLEL)
1947 /* We can get a PARALLEL dst if there is a conditional expression in
1948 a return statement. In that case, the dst and src are the same,
1949 so no action is necessary. */
1950 if (rtx_equal_p (dst, src))
1953 /* It is unclear if we can ever reach here, but we may as well handle
1954 it. Allocate a temporary, and split this into a store/load to/from
1957 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1958 emit_group_store (temp, src, type, ssize);
1959 emit_group_load (dst, temp, type, ssize);
1962 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1964 enum machine_mode outer = GET_MODE (dst);
1965 enum machine_mode inner;
1966 HOST_WIDE_INT bytepos;
1970 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1971 dst = gen_reg_rtx (outer);
1973 /* Make life a bit easier for combine. */
1974 /* If the first element of the vector is the low part
1975 of the destination mode, use a paradoxical subreg to
1976 initialize the destination. */
1979 inner = GET_MODE (tmps[start]);
1980 bytepos = subreg_lowpart_offset (inner, outer);
1981 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1983 temp = simplify_gen_subreg (outer, tmps[start],
1987 emit_move_insn (dst, temp);
1994 /* If the first element wasn't the low part, try the last. */
1996 && start < finish - 1)
1998 inner = GET_MODE (tmps[finish - 1]);
1999 bytepos = subreg_lowpart_offset (inner, outer);
2000 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2002 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2006 emit_move_insn (dst, temp);
2013 /* Otherwise, simply initialize the result to zero. */
2015 emit_move_insn (dst, CONST0_RTX (outer));
2018 /* Process the pieces. */
2019 for (i = start; i < finish; i++)
2021 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2022 enum machine_mode mode = GET_MODE (tmps[i]);
2023 unsigned int bytelen = GET_MODE_SIZE (mode);
2024 unsigned int adj_bytelen = bytelen;
2027 /* Handle trailing fragments that run over the size of the struct. */
2028 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2029 adj_bytelen = ssize - bytepos;
2031 if (GET_CODE (dst) == CONCAT)
2033 if (bytepos + adj_bytelen
2034 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2035 dest = XEXP (dst, 0);
2036 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2038 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2039 dest = XEXP (dst, 1);
2043 enum machine_mode dest_mode = GET_MODE (dest);
2044 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2046 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2048 if (GET_MODE_ALIGNMENT (dest_mode)
2049 >= GET_MODE_ALIGNMENT (tmp_mode))
2051 dest = assign_stack_temp (dest_mode,
2052 GET_MODE_SIZE (dest_mode),
2054 emit_move_insn (adjust_address (dest,
2062 dest = assign_stack_temp (tmp_mode,
2063 GET_MODE_SIZE (tmp_mode),
2065 emit_move_insn (dest, tmps[i]);
2066 dst = adjust_address (dest, dest_mode, bytepos);
2072 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2074 /* store_bit_field always takes its value from the lsb.
2075 Move the fragment to the lsb if it's not already there. */
2077 #ifdef BLOCK_REG_PADDING
2078 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2079 == (BYTES_BIG_ENDIAN ? upward : downward)
2085 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2086 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2087 build_int_cst (NULL_TREE, shift),
2090 bytelen = adj_bytelen;
2093 /* Optimize the access just a bit. */
2095 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2096 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2097 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2098 && bytelen == GET_MODE_SIZE (mode))
2099 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2101 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2105 /* Copy from the pseudo into the (probable) hard reg. */
2106 if (orig_dst != dst)
2107 emit_move_insn (orig_dst, dst);
2110 /* Generate code to copy a BLKmode object of TYPE out of a
2111 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2112 is null, a stack temporary is created. TGTBLK is returned.
2114 The purpose of this routine is to handle functions that return
2115 BLKmode structures in registers. Some machines (the PA for example)
2116 want to return all small structures in registers regardless of the
2117 structure's alignment. */
2120 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2122 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2123 rtx src = NULL, dst = NULL;
2124 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2125 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2126 enum machine_mode copy_mode;
2130 tgtblk = assign_temp (build_qualified_type (type,
2132 | TYPE_QUAL_CONST)),
2134 preserve_temp_slots (tgtblk);
2137 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2138 into a new pseudo which is a full word. */
2140 if (GET_MODE (srcreg) != BLKmode
2141 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2142 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2144 /* If the structure doesn't take up a whole number of words, see whether
2145 SRCREG is padded on the left or on the right. If it's on the left,
2146 set PADDING_CORRECTION to the number of bits to skip.
2148 In most ABIs, the structure will be returned at the least end of
2149 the register, which translates to right padding on little-endian
2150 targets and left padding on big-endian targets. The opposite
2151 holds if the structure is returned at the most significant
2152 end of the register. */
2153 if (bytes % UNITS_PER_WORD != 0
2154 && (targetm.calls.return_in_msb (type)
2156 : BYTES_BIG_ENDIAN))
2158 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2160 /* Copy the structure BITSIZE bits at a time. If the target lives in
2161 memory, take care of not reading/writing past its end by selecting
2162 a copy mode suited to BITSIZE. This should always be possible given
2165 We could probably emit more efficient code for machines which do not use
2166 strict alignment, but it doesn't seem worth the effort at the current
2169 copy_mode = word_mode;
2172 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2173 if (mem_mode != BLKmode)
2174 copy_mode = mem_mode;
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2189 /* We need a new destination operand each time bitpos is on
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 bitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, copy_mode, copy_mode));
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx *call_fusage, rtx reg)
2211 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2214 = gen_rtx_EXPR_LIST (VOIDmode,
2215 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2218 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2219 starting at REGNO. All of these registers must be hard registers. */
2222 use_regs (rtx *call_fusage, int regno, int nregs)
2226 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2228 for (i = 0; i < nregs; i++)
2229 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2232 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2233 PARALLEL REGS. This is for calls that pass values in multiple
2234 non-contiguous locations. The Irix 6 ABI has examples of this. */
2237 use_group_regs (rtx *call_fusage, rtx regs)
2241 for (i = 0; i < XVECLEN (regs, 0); i++)
2243 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2245 /* A NULL entry means the parameter goes both on the stack and in
2246 registers. This can also be a MEM for targets that pass values
2247 partially on the stack and partially in registers. */
2248 if (reg != 0 && REG_P (reg))
2249 use_reg (call_fusage, reg);
2254 /* Determine whether the LEN bytes generated by CONSTFUN can be
2255 stored to memory using several move instructions. CONSTFUNDATA is
2256 a pointer which will be passed as argument in every CONSTFUN call.
2257 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2258 a memset operation and false if it's a copy of a constant string.
2259 Return nonzero if a call to store_by_pieces should succeed. */
2262 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2263 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2264 void *constfundata, unsigned int align, bool memsetp)
2266 unsigned HOST_WIDE_INT l;
2267 unsigned int max_size;
2268 HOST_WIDE_INT offset = 0;
2269 enum machine_mode mode, tmode;
2270 enum insn_code icode;
2278 ? SET_BY_PIECES_P (len, align)
2279 : STORE_BY_PIECES_P (len, align)))
2282 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2283 if (align >= GET_MODE_ALIGNMENT (tmode))
2284 align = GET_MODE_ALIGNMENT (tmode);
2287 enum machine_mode xmode;
2289 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2291 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2292 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2293 || SLOW_UNALIGNED_ACCESS (tmode, align))
2296 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2299 /* We would first store what we can in the largest integer mode, then go to
2300 successively smaller modes. */
2303 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2308 max_size = STORE_MAX_PIECES + 1;
2309 while (max_size > 1)
2311 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2312 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2313 if (GET_MODE_SIZE (tmode) < max_size)
2316 if (mode == VOIDmode)
2319 icode = optab_handler (mov_optab, mode)->insn_code;
2320 if (icode != CODE_FOR_nothing
2321 && align >= GET_MODE_ALIGNMENT (mode))
2323 unsigned int size = GET_MODE_SIZE (mode);
2330 cst = (*constfun) (constfundata, offset, mode);
2331 if (!LEGITIMATE_CONSTANT_P (cst))
2341 max_size = GET_MODE_SIZE (mode);
2344 /* The code above should have handled everything. */
2351 /* Generate several move instructions to store LEN bytes generated by
2352 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2353 pointer which will be passed as argument in every CONSTFUN call.
2354 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2355 a memset operation and false if it's a copy of a constant string.
2356 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2357 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2361 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2362 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2363 void *constfundata, unsigned int align, bool memsetp, int endp)
2365 struct store_by_pieces data;
2369 gcc_assert (endp != 2);
2374 ? SET_BY_PIECES_P (len, align)
2375 : STORE_BY_PIECES_P (len, align));
2376 data.constfun = constfun;
2377 data.constfundata = constfundata;
2380 store_by_pieces_1 (&data, align);
2385 gcc_assert (!data.reverse);
2390 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2391 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2393 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2396 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2403 to1 = adjust_address (data.to, QImode, data.offset);
2411 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2412 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2415 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2417 struct store_by_pieces data;
2422 data.constfun = clear_by_pieces_1;
2423 data.constfundata = NULL;
2426 store_by_pieces_1 (&data, align);
2429 /* Callback routine for clear_by_pieces.
2430 Return const0_rtx unconditionally. */
2433 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2434 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2435 enum machine_mode mode ATTRIBUTE_UNUSED)
2440 /* Subroutine of clear_by_pieces and store_by_pieces.
2441 Generate several move instructions to store LEN bytes of block TO. (A MEM
2442 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2445 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2446 unsigned int align ATTRIBUTE_UNUSED)
2448 rtx to_addr = XEXP (data->to, 0);
2449 unsigned int max_size = STORE_MAX_PIECES + 1;
2450 enum machine_mode mode = VOIDmode, tmode;
2451 enum insn_code icode;
2454 data->to_addr = to_addr;
2456 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2457 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2459 data->explicit_inc_to = 0;
2461 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2463 data->offset = data->len;
2465 /* If storing requires more than two move insns,
2466 copy addresses to registers (to make displacements shorter)
2467 and use post-increment if available. */
2468 if (!data->autinc_to
2469 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2471 /* Determine the main mode we'll be using. */
2472 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2473 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2474 if (GET_MODE_SIZE (tmode) < max_size)
2477 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = -1;
2484 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2485 && ! data->autinc_to)
2487 data->to_addr = copy_addr_to_reg (to_addr);
2488 data->autinc_to = 1;
2489 data->explicit_inc_to = 1;
2492 if ( !data->autinc_to && CONSTANT_P (to_addr))
2493 data->to_addr = copy_addr_to_reg (to_addr);
2496 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2497 if (align >= GET_MODE_ALIGNMENT (tmode))
2498 align = GET_MODE_ALIGNMENT (tmode);
2501 enum machine_mode xmode;
2503 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2505 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2506 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2507 || SLOW_UNALIGNED_ACCESS (tmode, align))
2510 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2513 /* First store what we can in the largest integer mode, then go to
2514 successively smaller modes. */
2516 while (max_size > 1)
2518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2520 if (GET_MODE_SIZE (tmode) < max_size)
2523 if (mode == VOIDmode)
2526 icode = optab_handler (mov_optab, mode)->insn_code;
2527 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2528 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2530 max_size = GET_MODE_SIZE (mode);
2533 /* The code above should have handled everything. */
2534 gcc_assert (!data->len);
2537 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2538 with move instructions for mode MODE. GENFUN is the gen_... function
2539 to make a move insn for that mode. DATA has all the other info. */
2542 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2543 struct store_by_pieces *data)
2545 unsigned int size = GET_MODE_SIZE (mode);
2548 while (data->len >= size)
2551 data->offset -= size;
2553 if (data->autinc_to)
2554 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2557 to1 = adjust_address (data->to, mode, data->offset);
2559 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2560 emit_insn (gen_add2_insn (data->to_addr,
2561 GEN_INT (-(HOST_WIDE_INT) size)));
2563 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2564 emit_insn ((*genfun) (to1, cst));
2566 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2567 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2569 if (! data->reverse)
2570 data->offset += size;
2576 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2577 its length in bytes. */
2580 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2581 unsigned int expected_align, HOST_WIDE_INT expected_size)
2583 enum machine_mode mode = GET_MODE (object);
2586 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2588 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2589 just move a zero. Otherwise, do this a piece at a time. */
2591 && GET_CODE (size) == CONST_INT
2592 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2594 rtx zero = CONST0_RTX (mode);
2597 emit_move_insn (object, zero);
2601 if (COMPLEX_MODE_P (mode))
2603 zero = CONST0_RTX (GET_MODE_INNER (mode));
2606 write_complex_part (object, zero, 0);
2607 write_complex_part (object, zero, 1);
2613 if (size == const0_rtx)
2616 align = MEM_ALIGN (object);
2618 if (GET_CODE (size) == CONST_INT
2619 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2620 clear_by_pieces (object, INTVAL (size), align);
2621 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2622 expected_align, expected_size))
2625 return set_storage_via_libcall (object, size, const0_rtx,
2626 method == BLOCK_OP_TAILCALL);
2632 clear_storage (rtx object, rtx size, enum block_op_methods method)
2634 return clear_storage_hints (object, size, method, 0, -1);
2638 /* A subroutine of clear_storage. Expand a call to memset.
2639 Return the return value of memset, 0 otherwise. */
2642 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2644 tree call_expr, fn, object_tree, size_tree, val_tree;
2645 enum machine_mode size_mode;
2648 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2649 place those into new pseudos into a VAR_DECL and use them later. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 size_mode = TYPE_MODE (sizetype);
2654 size = convert_to_mode (size_mode, size, 1);
2655 size = copy_to_mode_reg (size_mode, size);
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context. This could be a user call to memset and
2659 the user may wish to examine the return value from memset. For
2660 targets where libcalls and normal calls have different conventions
2661 for returning pointers, we could end up generating incorrect code. */
2663 object_tree = make_tree (ptr_type_node, object);
2664 if (GET_CODE (val) != CONST_INT)
2665 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2666 size_tree = make_tree (sizetype, size);
2667 val_tree = make_tree (integer_type_node, val);
2669 fn = clear_storage_libcall_fn (true);
2670 call_expr = build_call_expr (fn, 3,
2671 object_tree, integer_zero_node, size_tree);
2672 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2674 retval = expand_normal (call_expr);
2679 /* A subroutine of set_storage_via_libcall. Create the tree node
2680 for the function we use for block clears. The first time FOR_CALL
2681 is true, we call assemble_external. */
2683 tree block_clear_fn;
2686 init_block_clear_fn (const char *asmspec)
2688 if (!block_clear_fn)
2692 fn = get_identifier ("memset");
2693 args = build_function_type_list (ptr_type_node, ptr_type_node,
2694 integer_type_node, sizetype,
2697 fn = build_decl (FUNCTION_DECL, fn, args);
2698 DECL_EXTERNAL (fn) = 1;
2699 TREE_PUBLIC (fn) = 1;
2700 DECL_ARTIFICIAL (fn) = 1;
2701 TREE_NOTHROW (fn) = 1;
2702 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2703 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2705 block_clear_fn = fn;
2709 set_user_assembler_name (block_clear_fn, asmspec);
2713 clear_storage_libcall_fn (int for_call)
2715 static bool emitted_extern;
2717 if (!block_clear_fn)
2718 init_block_clear_fn (NULL);
2720 if (for_call && !emitted_extern)
2722 emitted_extern = true;
2723 make_decl_rtl (block_clear_fn);
2724 assemble_external (block_clear_fn);
2727 return block_clear_fn;
2730 /* Expand a setmem pattern; return true if successful. */
2733 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2734 unsigned int expected_align, HOST_WIDE_INT expected_size)
2736 /* Try the most limited insn first, because there's no point
2737 including more than one in the machine description unless
2738 the more limited one has some advantage. */
2740 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2741 enum machine_mode mode;
2743 if (expected_align < align)
2744 expected_align = align;
2746 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2747 mode = GET_MODE_WIDER_MODE (mode))
2749 enum insn_code code = setmem_optab[(int) mode];
2750 insn_operand_predicate_fn pred;
2752 if (code != CODE_FOR_nothing
2753 /* We don't need MODE to be narrower than
2754 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2755 the mode mask, as it is returned by the macro, it will
2756 definitely be less than the actual mode mask. */
2757 && ((GET_CODE (size) == CONST_INT
2758 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2759 <= (GET_MODE_MASK (mode) >> 1)))
2760 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2761 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2762 || (*pred) (object, BLKmode))
2763 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2764 || (*pred) (opalign, VOIDmode)))
2767 enum machine_mode char_mode;
2768 rtx last = get_last_insn ();
2771 opsize = convert_to_mode (mode, size, 1);
2772 pred = insn_data[(int) code].operand[1].predicate;
2773 if (pred != 0 && ! (*pred) (opsize, mode))
2774 opsize = copy_to_mode_reg (mode, opsize);
2777 char_mode = insn_data[(int) code].operand[2].mode;
2778 if (char_mode != VOIDmode)
2780 opchar = convert_to_mode (char_mode, opchar, 1);
2781 pred = insn_data[(int) code].operand[2].predicate;
2782 if (pred != 0 && ! (*pred) (opchar, char_mode))
2783 opchar = copy_to_mode_reg (char_mode, opchar);
2786 if (insn_data[(int) code].n_operands == 4)
2787 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2789 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2790 GEN_INT (expected_align
2792 GEN_INT (expected_size));
2799 delete_insns_since (last);
2807 /* Write to one of the components of the complex value CPLX. Write VAL to
2808 the real part if IMAG_P is false, and the imaginary part if its true. */
2811 write_complex_part (rtx cplx, rtx val, bool imag_p)
2813 enum machine_mode cmode;
2814 enum machine_mode imode;
2817 if (GET_CODE (cplx) == CONCAT)
2819 emit_move_insn (XEXP (cplx, imag_p), val);
2823 cmode = GET_MODE (cplx);
2824 imode = GET_MODE_INNER (cmode);
2825 ibitsize = GET_MODE_BITSIZE (imode);
2827 /* For MEMs simplify_gen_subreg may generate an invalid new address
2828 because, e.g., the original address is considered mode-dependent
2829 by the target, which restricts simplify_subreg from invoking
2830 adjust_address_nv. Instead of preparing fallback support for an
2831 invalid address, we call adjust_address_nv directly. */
2834 emit_move_insn (adjust_address_nv (cplx, imode,
2835 imag_p ? GET_MODE_SIZE (imode) : 0),
2840 /* If the sub-object is at least word sized, then we know that subregging
2841 will work. This special case is important, since store_bit_field
2842 wants to operate on integer modes, and there's rarely an OImode to
2843 correspond to TCmode. */
2844 if (ibitsize >= BITS_PER_WORD
2845 /* For hard regs we have exact predicates. Assume we can split
2846 the original object if it spans an even number of hard regs.
2847 This special case is important for SCmode on 64-bit platforms
2848 where the natural size of floating-point regs is 32-bit. */
2850 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2851 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2853 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2854 imag_p ? GET_MODE_SIZE (imode) : 0);
2857 emit_move_insn (part, val);
2861 /* simplify_gen_subreg may fail for sub-word MEMs. */
2862 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2865 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2868 /* Extract one of the components of the complex value CPLX. Extract the
2869 real part if IMAG_P is false, and the imaginary part if it's true. */
2872 read_complex_part (rtx cplx, bool imag_p)
2874 enum machine_mode cmode, imode;
2877 if (GET_CODE (cplx) == CONCAT)
2878 return XEXP (cplx, imag_p);
2880 cmode = GET_MODE (cplx);
2881 imode = GET_MODE_INNER (cmode);
2882 ibitsize = GET_MODE_BITSIZE (imode);
2884 /* Special case reads from complex constants that got spilled to memory. */
2885 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2887 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2888 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2890 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2891 if (CONSTANT_CLASS_P (part))
2892 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2896 /* For MEMs simplify_gen_subreg may generate an invalid new address
2897 because, e.g., the original address is considered mode-dependent
2898 by the target, which restricts simplify_subreg from invoking
2899 adjust_address_nv. Instead of preparing fallback support for an
2900 invalid address, we call adjust_address_nv directly. */
2902 return adjust_address_nv (cplx, imode,
2903 imag_p ? GET_MODE_SIZE (imode) : 0);
2905 /* If the sub-object is at least word sized, then we know that subregging
2906 will work. This special case is important, since extract_bit_field
2907 wants to operate on integer modes, and there's rarely an OImode to
2908 correspond to TCmode. */
2909 if (ibitsize >= BITS_PER_WORD
2910 /* For hard regs we have exact predicates. Assume we can split
2911 the original object if it spans an even number of hard regs.
2912 This special case is important for SCmode on 64-bit platforms
2913 where the natural size of floating-point regs is 32-bit. */
2915 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2916 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2918 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2919 imag_p ? GET_MODE_SIZE (imode) : 0);
2923 /* simplify_gen_subreg may fail for sub-word MEMs. */
2924 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2927 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2928 true, NULL_RTX, imode, imode);
2931 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2932 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2933 represented in NEW_MODE. If FORCE is true, this will never happen, as
2934 we'll force-create a SUBREG if needed. */
2937 emit_move_change_mode (enum machine_mode new_mode,
2938 enum machine_mode old_mode, rtx x, bool force)
2942 if (push_operand (x, GET_MODE (x)))
2944 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2945 MEM_COPY_ATTRIBUTES (ret, x);
2949 /* We don't have to worry about changing the address since the
2950 size in bytes is supposed to be the same. */
2951 if (reload_in_progress)
2953 /* Copy the MEM to change the mode and move any
2954 substitutions from the old MEM to the new one. */
2955 ret = adjust_address_nv (x, new_mode, 0);
2956 copy_replacements (x, ret);
2959 ret = adjust_address (x, new_mode, 0);
2963 /* Note that we do want simplify_subreg's behavior of validating
2964 that the new mode is ok for a hard register. If we were to use
2965 simplify_gen_subreg, we would create the subreg, but would
2966 probably run into the target not being able to implement it. */
2967 /* Except, of course, when FORCE is true, when this is exactly what
2968 we want. Which is needed for CCmodes on some targets. */
2970 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2972 ret = simplify_subreg (new_mode, x, old_mode, 0);
2978 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2979 an integer mode of the same size as MODE. Returns the instruction
2980 emitted, or NULL if such a move could not be generated. */
2983 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2985 enum machine_mode imode;
2986 enum insn_code code;
2988 /* There must exist a mode of the exact size we require. */
2989 imode = int_mode_for_mode (mode);
2990 if (imode == BLKmode)
2993 /* The target must support moves in this mode. */
2994 code = optab_handler (mov_optab, imode)->insn_code;
2995 if (code == CODE_FOR_nothing)
2998 x = emit_move_change_mode (imode, mode, x, force);
3001 y = emit_move_change_mode (imode, mode, y, force);
3004 return emit_insn (GEN_FCN (code) (x, y));
3007 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3008 Return an equivalent MEM that does not use an auto-increment. */
3011 emit_move_resolve_push (enum machine_mode mode, rtx x)
3013 enum rtx_code code = GET_CODE (XEXP (x, 0));
3014 HOST_WIDE_INT adjust;
3017 adjust = GET_MODE_SIZE (mode);
3018 #ifdef PUSH_ROUNDING
3019 adjust = PUSH_ROUNDING (adjust);
3021 if (code == PRE_DEC || code == POST_DEC)
3023 else if (code == PRE_MODIFY || code == POST_MODIFY)
3025 rtx expr = XEXP (XEXP (x, 0), 1);
3028 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3029 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3030 val = INTVAL (XEXP (expr, 1));
3031 if (GET_CODE (expr) == MINUS)
3033 gcc_assert (adjust == val || adjust == -val);
3037 /* Do not use anti_adjust_stack, since we don't want to update
3038 stack_pointer_delta. */
3039 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3040 GEN_INT (adjust), stack_pointer_rtx,
3041 0, OPTAB_LIB_WIDEN);
3042 if (temp != stack_pointer_rtx)
3043 emit_move_insn (stack_pointer_rtx, temp);
3050 temp = stack_pointer_rtx;
3055 temp = plus_constant (stack_pointer_rtx, -adjust);
3061 return replace_equiv_address (x, temp);
3064 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3065 X is known to satisfy push_operand, and MODE is known to be complex.
3066 Returns the last instruction emitted. */
3069 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3071 enum machine_mode submode = GET_MODE_INNER (mode);
3074 #ifdef PUSH_ROUNDING
3075 unsigned int submodesize = GET_MODE_SIZE (submode);
3077 /* In case we output to the stack, but the size is smaller than the
3078 machine can push exactly, we need to use move instructions. */
3079 if (PUSH_ROUNDING (submodesize) != submodesize)
3081 x = emit_move_resolve_push (mode, x);
3082 return emit_move_insn (x, y);
3086 /* Note that the real part always precedes the imag part in memory
3087 regardless of machine's endianness. */
3088 switch (GET_CODE (XEXP (x, 0)))
3102 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3103 read_complex_part (y, imag_first));
3104 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3105 read_complex_part (y, !imag_first));
3108 /* A subroutine of emit_move_complex. Perform the move from Y to X
3109 via two moves of the parts. Returns the last instruction emitted. */
3112 emit_move_complex_parts (rtx x, rtx y)
3114 /* Show the output dies here. This is necessary for SUBREGs
3115 of pseudos since we cannot track their lifetimes correctly;
3116 hard regs shouldn't appear here except as return values. */
3117 if (!reload_completed && !reload_in_progress
3118 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3121 write_complex_part (x, read_complex_part (y, false), false);
3122 write_complex_part (x, read_complex_part (y, true), true);
3124 return get_last_insn ();
3127 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3128 MODE is known to be complex. Returns the last instruction emitted. */
3131 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3135 /* Need to take special care for pushes, to maintain proper ordering
3136 of the data, and possibly extra padding. */
3137 if (push_operand (x, mode))
3138 return emit_move_complex_push (mode, x, y);
3140 /* See if we can coerce the target into moving both values at once. */
3142 /* Move floating point as parts. */
3143 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3144 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3146 /* Not possible if the values are inherently not adjacent. */
3147 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3149 /* Is possible if both are registers (or subregs of registers). */
3150 else if (register_operand (x, mode) && register_operand (y, mode))
3152 /* If one of the operands is a memory, and alignment constraints
3153 are friendly enough, we may be able to do combined memory operations.
3154 We do not attempt this if Y is a constant because that combination is
3155 usually better with the by-parts thing below. */
3156 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3157 && (!STRICT_ALIGNMENT
3158 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3167 /* For memory to memory moves, optimal behavior can be had with the
3168 existing block move logic. */
3169 if (MEM_P (x) && MEM_P (y))
3171 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3172 BLOCK_OP_NO_LIBCALL);
3173 return get_last_insn ();
3176 ret = emit_move_via_integer (mode, x, y, true);
3181 return emit_move_complex_parts (x, y);
3184 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3185 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3188 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3192 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3195 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3196 if (code != CODE_FOR_nothing)
3198 x = emit_move_change_mode (CCmode, mode, x, true);
3199 y = emit_move_change_mode (CCmode, mode, y, true);
3200 return emit_insn (GEN_FCN (code) (x, y));
3204 /* Otherwise, find the MODE_INT mode of the same width. */
3205 ret = emit_move_via_integer (mode, x, y, false);
3206 gcc_assert (ret != NULL);
3210 /* Return true if word I of OP lies entirely in the
3211 undefined bits of a paradoxical subreg. */
3214 undefined_operand_subword_p (const_rtx op, int i)
3216 enum machine_mode innermode, innermostmode;
3218 if (GET_CODE (op) != SUBREG)
3220 innermode = GET_MODE (op);
3221 innermostmode = GET_MODE (SUBREG_REG (op));
3222 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3223 /* The SUBREG_BYTE represents offset, as if the value were stored in
3224 memory, except for a paradoxical subreg where we define
3225 SUBREG_BYTE to be 0; undo this exception as in
3227 if (SUBREG_BYTE (op) == 0
3228 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3230 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3231 if (WORDS_BIG_ENDIAN)
3232 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3233 if (BYTES_BIG_ENDIAN)
3234 offset += difference % UNITS_PER_WORD;
3236 if (offset >= GET_MODE_SIZE (innermostmode)
3237 || offset <= -GET_MODE_SIZE (word_mode))
3242 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3243 MODE is any multi-word or full-word mode that lacks a move_insn
3244 pattern. Note that you will get better code if you define such
3245 patterns, even if they must turn into multiple assembler instructions. */
3248 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3255 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3257 /* If X is a push on the stack, do the push now and replace
3258 X with a reference to the stack pointer. */
3259 if (push_operand (x, mode))
3260 x = emit_move_resolve_push (mode, x);
3262 /* If we are in reload, see if either operand is a MEM whose address
3263 is scheduled for replacement. */
3264 if (reload_in_progress && MEM_P (x)
3265 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3266 x = replace_equiv_address_nv (x, inner);
3267 if (reload_in_progress && MEM_P (y)
3268 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3269 y = replace_equiv_address_nv (y, inner);
3273 need_clobber = false;
3275 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3278 rtx xpart = operand_subword (x, i, 1, mode);
3281 /* Do not generate code for a move if it would come entirely
3282 from the undefined bits of a paradoxical subreg. */
3283 if (undefined_operand_subword_p (y, i))
3286 ypart = operand_subword (y, i, 1, mode);
3288 /* If we can't get a part of Y, put Y into memory if it is a
3289 constant. Otherwise, force it into a register. Then we must
3290 be able to get a part of Y. */
3291 if (ypart == 0 && CONSTANT_P (y))
3293 y = use_anchored_address (force_const_mem (mode, y));
3294 ypart = operand_subword (y, i, 1, mode);
3296 else if (ypart == 0)
3297 ypart = operand_subword_force (y, i, mode);
3299 gcc_assert (xpart && ypart);
3301 need_clobber |= (GET_CODE (xpart) == SUBREG);
3303 last_insn = emit_move_insn (xpart, ypart);
3309 /* Show the output dies here. This is necessary for SUBREGs
3310 of pseudos since we cannot track their lifetimes correctly;
3311 hard regs shouldn't appear here except as return values.
3312 We never want to emit such a clobber after reload. */
3314 && ! (reload_in_progress || reload_completed)
3315 && need_clobber != 0)
3323 /* Low level part of emit_move_insn.
3324 Called just like emit_move_insn, but assumes X and Y
3325 are basically valid. */
3328 emit_move_insn_1 (rtx x, rtx y)
3330 enum machine_mode mode = GET_MODE (x);
3331 enum insn_code code;
3333 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3335 code = optab_handler (mov_optab, mode)->insn_code;
3336 if (code != CODE_FOR_nothing)
3337 return emit_insn (GEN_FCN (code) (x, y));
3339 /* Expand complex moves by moving real part and imag part. */
3340 if (COMPLEX_MODE_P (mode))
3341 return emit_move_complex (mode, x, y);
3343 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3344 || ALL_FIXED_POINT_MODE_P (mode))
3346 rtx result = emit_move_via_integer (mode, x, y, true);
3348 /* If we can't find an integer mode, use multi words. */
3352 return emit_move_multi_word (mode, x, y);
3355 if (GET_MODE_CLASS (mode) == MODE_CC)
3356 return emit_move_ccmode (mode, x, y);
3358 /* Try using a move pattern for the corresponding integer mode. This is
3359 only safe when simplify_subreg can convert MODE constants into integer
3360 constants. At present, it can only do this reliably if the value
3361 fits within a HOST_WIDE_INT. */
3362 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3364 rtx ret = emit_move_via_integer (mode, x, y, false);
3369 return emit_move_multi_word (mode, x, y);
3372 /* Generate code to copy Y into X.
3373 Both Y and X must have the same mode, except that
3374 Y can be a constant with VOIDmode.
3375 This mode cannot be BLKmode; use emit_block_move for that.
3377 Return the last instruction emitted. */
3380 emit_move_insn (rtx x, rtx y)
3382 enum machine_mode mode = GET_MODE (x);
3383 rtx y_cst = NULL_RTX;
3386 gcc_assert (mode != BLKmode
3387 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3392 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3393 && (last_insn = compress_float_constant (x, y)))
3398 if (!LEGITIMATE_CONSTANT_P (y))
3400 y = force_const_mem (mode, y);
3402 /* If the target's cannot_force_const_mem prevented the spill,
3403 assume that the target's move expanders will also take care
3404 of the non-legitimate constant. */
3408 y = use_anchored_address (y);
3412 /* If X or Y are memory references, verify that their addresses are valid
3415 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3416 && ! push_operand (x, GET_MODE (x))))
3417 x = validize_mem (x);
3420 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3421 y = validize_mem (y);
3423 gcc_assert (mode != BLKmode);
3425 last_insn = emit_move_insn_1 (x, y);
3427 if (y_cst && REG_P (x)
3428 && (set = single_set (last_insn)) != NULL_RTX
3429 && SET_DEST (set) == x
3430 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3431 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3436 /* If Y is representable exactly in a narrower mode, and the target can
3437 perform the extension directly from constant or memory, then emit the
3438 move as an extension. */
3441 compress_float_constant (rtx x, rtx y)
3443 enum machine_mode dstmode = GET_MODE (x);
3444 enum machine_mode orig_srcmode = GET_MODE (y);
3445 enum machine_mode srcmode;
3447 int oldcost, newcost;
3448 bool speed = optimize_insn_for_speed_p ();
3450 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3452 if (LEGITIMATE_CONSTANT_P (y))
3453 oldcost = rtx_cost (y, SET, speed);
3455 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3457 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3458 srcmode != orig_srcmode;
3459 srcmode = GET_MODE_WIDER_MODE (srcmode))
3462 rtx trunc_y, last_insn;
3464 /* Skip if the target can't extend this way. */
3465 ic = can_extend_p (dstmode, srcmode, 0);
3466 if (ic == CODE_FOR_nothing)
3469 /* Skip if the narrowed value isn't exact. */
3470 if (! exact_real_truncate (srcmode, &r))
3473 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3475 if (LEGITIMATE_CONSTANT_P (trunc_y))
3477 /* Skip if the target needs extra instructions to perform
3479 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3481 /* This is valid, but may not be cheaper than the original. */
3482 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3483 if (oldcost < newcost)
3486 else if (float_extend_from_mem[dstmode][srcmode])
3488 trunc_y = force_const_mem (srcmode, trunc_y);
3489 /* This is valid, but may not be cheaper than the original. */
3490 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3491 if (oldcost < newcost)
3493 trunc_y = validize_mem (trunc_y);
3498 /* For CSE's benefit, force the compressed constant pool entry
3499 into a new pseudo. This constant may be used in different modes,
3500 and if not, combine will put things back together for us. */
3501 trunc_y = force_reg (srcmode, trunc_y);
3502 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3503 last_insn = get_last_insn ();
3506 set_unique_reg_note (last_insn, REG_EQUAL, y);
3514 /* Pushing data onto the stack. */
3516 /* Push a block of length SIZE (perhaps variable)
3517 and return an rtx to address the beginning of the block.
3518 The value may be virtual_outgoing_args_rtx.
3520 EXTRA is the number of bytes of padding to push in addition to SIZE.
3521 BELOW nonzero means this padding comes at low addresses;
3522 otherwise, the padding comes at high addresses. */
3525 push_block (rtx size, int extra, int below)
3529 size = convert_modes (Pmode, ptr_mode, size, 1);
3530 if (CONSTANT_P (size))
3531 anti_adjust_stack (plus_constant (size, extra));
3532 else if (REG_P (size) && extra == 0)
3533 anti_adjust_stack (size);
3536 temp = copy_to_mode_reg (Pmode, size);
3538 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3539 temp, 0, OPTAB_LIB_WIDEN);
3540 anti_adjust_stack (temp);
3543 #ifndef STACK_GROWS_DOWNWARD
3549 temp = virtual_outgoing_args_rtx;
3550 if (extra != 0 && below)
3551 temp = plus_constant (temp, extra);
3555 if (GET_CODE (size) == CONST_INT)
3556 temp = plus_constant (virtual_outgoing_args_rtx,
3557 -INTVAL (size) - (below ? 0 : extra));
3558 else if (extra != 0 && !below)
3559 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3560 negate_rtx (Pmode, plus_constant (size, extra)));
3562 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3563 negate_rtx (Pmode, size));
3566 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3569 #ifdef PUSH_ROUNDING
3571 /* Emit single push insn. */
3574 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3577 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3579 enum insn_code icode;
3580 insn_operand_predicate_fn pred;
3582 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3583 /* If there is push pattern, use it. Otherwise try old way of throwing
3584 MEM representing push operation to move expander. */
3585 icode = optab_handler (push_optab, mode)->insn_code;
3586 if (icode != CODE_FOR_nothing)
3588 if (((pred = insn_data[(int) icode].operand[0].predicate)
3589 && !((*pred) (x, mode))))
3590 x = force_reg (mode, x);
3591 emit_insn (GEN_FCN (icode) (x));
3594 if (GET_MODE_SIZE (mode) == rounded_size)
3595 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3596 /* If we are to pad downward, adjust the stack pointer first and
3597 then store X into the stack location using an offset. This is
3598 because emit_move_insn does not know how to pad; it does not have
3600 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3602 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3603 HOST_WIDE_INT offset;
3605 emit_move_insn (stack_pointer_rtx,
3606 expand_binop (Pmode,
3607 #ifdef STACK_GROWS_DOWNWARD
3613 GEN_INT (rounded_size),
3614 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3616 offset = (HOST_WIDE_INT) padding_size;
3617 #ifdef STACK_GROWS_DOWNWARD
3618 if (STACK_PUSH_CODE == POST_DEC)
3619 /* We have already decremented the stack pointer, so get the
3621 offset += (HOST_WIDE_INT) rounded_size;
3623 if (STACK_PUSH_CODE == POST_INC)
3624 /* We have already incremented the stack pointer, so get the
3626 offset -= (HOST_WIDE_INT) rounded_size;
3628 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3632 #ifdef STACK_GROWS_DOWNWARD
3633 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3634 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3635 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3637 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3638 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3639 GEN_INT (rounded_size));
3641 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3644 dest = gen_rtx_MEM (mode, dest_addr);
3648 set_mem_attributes (dest, type, 1);
3650 if (flag_optimize_sibling_calls)
3651 /* Function incoming arguments may overlap with sibling call
3652 outgoing arguments and we cannot allow reordering of reads
3653 from function arguments with stores to outgoing arguments
3654 of sibling calls. */
3655 set_mem_alias_set (dest, 0);
3657 emit_move_insn (dest, x);
3661 /* Generate code to push X onto the stack, assuming it has mode MODE and
3663 MODE is redundant except when X is a CONST_INT (since they don't
3665 SIZE is an rtx for the size of data to be copied (in bytes),
3666 needed only if X is BLKmode.
3668 ALIGN (in bits) is maximum alignment we can assume.
3670 If PARTIAL and REG are both nonzero, then copy that many of the first
3671 bytes of X into registers starting with REG, and push the rest of X.
3672 The amount of space pushed is decreased by PARTIAL bytes.
3673 REG must be a hard register in this case.
3674 If REG is zero but PARTIAL is not, take any all others actions for an
3675 argument partially in registers, but do not actually load any
3678 EXTRA is the amount in bytes of extra space to leave next to this arg.
3679 This is ignored if an argument block has already been allocated.
3681 On a machine that lacks real push insns, ARGS_ADDR is the address of
3682 the bottom of the argument block for this call. We use indexing off there
3683 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3684 argument block has not been preallocated.
3686 ARGS_SO_FAR is the size of args previously pushed for this call.
3688 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3689 for arguments passed in registers. If nonzero, it will be the number
3690 of bytes required. */
3693 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3694 unsigned int align, int partial, rtx reg, int extra,
3695 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3699 enum direction stack_direction
3700 #ifdef STACK_GROWS_DOWNWARD
3706 /* Decide where to pad the argument: `downward' for below,
3707 `upward' for above, or `none' for don't pad it.
3708 Default is below for small data on big-endian machines; else above. */
3709 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3711 /* Invert direction if stack is post-decrement.
3713 if (STACK_PUSH_CODE == POST_DEC)
3714 if (where_pad != none)
3715 where_pad = (where_pad == downward ? upward : downward);
3720 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3722 /* Copy a block into the stack, entirely or partially. */
3729 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3730 used = partial - offset;
3732 if (mode != BLKmode)
3734 /* A value is to be stored in an insufficiently aligned
3735 stack slot; copy via a suitably aligned slot if
3737 size = GEN_INT (GET_MODE_SIZE (mode));
3738 if (!MEM_P (xinner))
3740 temp = assign_temp (type, 0, 1, 1);
3741 emit_move_insn (temp, xinner);
3748 /* USED is now the # of bytes we need not copy to the stack
3749 because registers will take care of them. */
3752 xinner = adjust_address (xinner, BLKmode, used);
3754 /* If the partial register-part of the arg counts in its stack size,
3755 skip the part of stack space corresponding to the registers.
3756 Otherwise, start copying to the beginning of the stack space,
3757 by setting SKIP to 0. */
3758 skip = (reg_parm_stack_space == 0) ? 0 : used;
3760 #ifdef PUSH_ROUNDING
3761 /* Do it with several push insns if that doesn't take lots of insns
3762 and if there is no difficulty with push insns that skip bytes
3763 on the stack for alignment purposes. */
3766 && GET_CODE (size) == CONST_INT
3768 && MEM_ALIGN (xinner) >= align
3769 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3770 /* Here we avoid the case of a structure whose weak alignment
3771 forces many pushes of a small amount of data,
3772 and such small pushes do rounding that causes trouble. */
3773 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3774 || align >= BIGGEST_ALIGNMENT
3775 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3776 == (align / BITS_PER_UNIT)))
3777 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3779 /* Push padding now if padding above and stack grows down,
3780 or if padding below and stack grows up.
3781 But if space already allocated, this has already been done. */
3782 if (extra && args_addr == 0
3783 && where_pad != none && where_pad != stack_direction)
3784 anti_adjust_stack (GEN_INT (extra));
3786 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3789 #endif /* PUSH_ROUNDING */
3793 /* Otherwise make space on the stack and copy the data
3794 to the address of that space. */
3796 /* Deduct words put into registers from the size we must copy. */
3799 if (GET_CODE (size) == CONST_INT)
3800 size = GEN_INT (INTVAL (size) - used);
3802 size = expand_binop (GET_MODE (size), sub_optab, size,
3803 GEN_INT (used), NULL_RTX, 0,
3807 /* Get the address of the stack space.
3808 In this case, we do not deal with EXTRA separately.
3809 A single stack adjust will do. */
3812 temp = push_block (size, extra, where_pad == downward);
3815 else if (GET_CODE (args_so_far) == CONST_INT)
3816 temp = memory_address (BLKmode,
3817 plus_constant (args_addr,
3818 skip + INTVAL (args_so_far)));
3820 temp = memory_address (BLKmode,
3821 plus_constant (gen_rtx_PLUS (Pmode,
3826 if (!ACCUMULATE_OUTGOING_ARGS)
3828 /* If the source is referenced relative to the stack pointer,
3829 copy it to another register to stabilize it. We do not need
3830 to do this if we know that we won't be changing sp. */
3832 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3833 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3834 temp = copy_to_reg (temp);
3837 target = gen_rtx_MEM (BLKmode, temp);
3839 /* We do *not* set_mem_attributes here, because incoming arguments
3840 may overlap with sibling call outgoing arguments and we cannot
3841 allow reordering of reads from function arguments with stores
3842 to outgoing arguments of sibling calls. We do, however, want
3843 to record the alignment of the stack slot. */
3844 /* ALIGN may well be better aligned than TYPE, e.g. due to
3845 PARM_BOUNDARY. Assume the caller isn't lying. */
3846 set_mem_align (target, align);
3848 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3851 else if (partial > 0)
3853 /* Scalar partly in registers. */
3855 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3858 /* # bytes of start of argument
3859 that we must make space for but need not store. */
3860 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3861 int args_offset = INTVAL (args_so_far);
3864 /* Push padding now if padding above and stack grows down,
3865 or if padding below and stack grows up.
3866 But if space already allocated, this has already been done. */
3867 if (extra && args_addr == 0
3868 && where_pad != none && where_pad != stack_direction)
3869 anti_adjust_stack (GEN_INT (extra));
3871 /* If we make space by pushing it, we might as well push
3872 the real data. Otherwise, we can leave OFFSET nonzero
3873 and leave the space uninitialized. */
3877 /* Now NOT_STACK gets the number of words that we don't need to
3878 allocate on the stack. Convert OFFSET to words too. */
3879 not_stack = (partial - offset) / UNITS_PER_WORD;
3880 offset /= UNITS_PER_WORD;
3882 /* If the partial register-part of the arg counts in its stack size,
3883 skip the part of stack space corresponding to the registers.
3884 Otherwise, start copying to the beginning of the stack space,
3885 by setting SKIP to 0. */
3886 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3888 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3889 x = validize_mem (force_const_mem (mode, x));
3891 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3892 SUBREGs of such registers are not allowed. */
3893 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3894 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3895 x = copy_to_reg (x);
3897 /* Loop over all the words allocated on the stack for this arg. */
3898 /* We can do it by words, because any scalar bigger than a word
3899 has a size a multiple of a word. */
3900 #ifndef PUSH_ARGS_REVERSED
3901 for (i = not_stack; i < size; i++)
3903 for (i = size - 1; i >= not_stack; i--)
3905 if (i >= not_stack + offset)
3906 emit_push_insn (operand_subword_force (x, i, mode),
3907 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3909 GEN_INT (args_offset + ((i - not_stack + skip)
3911 reg_parm_stack_space, alignment_pad);
3918 /* Push padding now if padding above and stack grows down,
3919 or if padding below and stack grows up.
3920 But if space already allocated, this has already been done. */
3921 if (extra && args_addr == 0
3922 && where_pad != none && where_pad != stack_direction)
3923 anti_adjust_stack (GEN_INT (extra));
3925 #ifdef PUSH_ROUNDING
3926 if (args_addr == 0 && PUSH_ARGS)
3927 emit_single_push_insn (mode, x, type);
3931 if (GET_CODE (args_so_far) == CONST_INT)
3933 = memory_address (mode,
3934 plus_constant (args_addr,
3935 INTVAL (args_so_far)));
3937 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3939 dest = gen_rtx_MEM (mode, addr);
3941 /* We do *not* set_mem_attributes here, because incoming arguments
3942 may overlap with sibling call outgoing arguments and we cannot
3943 allow reordering of reads from function arguments with stores
3944 to outgoing arguments of sibling calls. We do, however, want
3945 to record the alignment of the stack slot. */
3946 /* ALIGN may well be better aligned than TYPE, e.g. due to
3947 PARM_BOUNDARY. Assume the caller isn't lying. */
3948 set_mem_align (dest, align);
3950 emit_move_insn (dest, x);
3954 /* If part should go in registers, copy that part
3955 into the appropriate registers. Do this now, at the end,
3956 since mem-to-mem copies above may do function calls. */
3957 if (partial > 0 && reg != 0)
3959 /* Handle calls that pass values in multiple non-contiguous locations.
3960 The Irix 6 ABI has examples of this. */
3961 if (GET_CODE (reg) == PARALLEL)
3962 emit_group_load (reg, x, type, -1);
3965 gcc_assert (partial % UNITS_PER_WORD == 0);
3966 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3970 if (extra && args_addr == 0 && where_pad == stack_direction)
3971 anti_adjust_stack (GEN_INT (extra));
3973 if (alignment_pad && args_addr == 0)
3974 anti_adjust_stack (alignment_pad);
3977 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3981 get_subtarget (rtx x)
3985 /* Only registers can be subtargets. */
3987 /* Don't use hard regs to avoid extending their life. */
3988 || REGNO (x) < FIRST_PSEUDO_REGISTER
3992 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3993 FIELD is a bitfield. Returns true if the optimization was successful,
3994 and there's nothing else to do. */
3997 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3998 unsigned HOST_WIDE_INT bitpos,
3999 enum machine_mode mode1, rtx str_rtx,
4002 enum machine_mode str_mode = GET_MODE (str_rtx);
4003 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4008 if (mode1 != VOIDmode
4009 || bitsize >= BITS_PER_WORD
4010 || str_bitsize > BITS_PER_WORD
4011 || TREE_SIDE_EFFECTS (to)
4012 || TREE_THIS_VOLATILE (to))
4016 if (!BINARY_CLASS_P (src)
4017 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4020 op0 = TREE_OPERAND (src, 0);
4021 op1 = TREE_OPERAND (src, 1);
4024 if (!operand_equal_p (to, op0, 0))
4027 if (MEM_P (str_rtx))
4029 unsigned HOST_WIDE_INT offset1;
4031 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4032 str_mode = word_mode;
4033 str_mode = get_best_mode (bitsize, bitpos,
4034 MEM_ALIGN (str_rtx), str_mode, 0);
4035 if (str_mode == VOIDmode)
4037 str_bitsize = GET_MODE_BITSIZE (str_mode);
4040 bitpos %= str_bitsize;
4041 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4042 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4044 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4047 /* If the bit field covers the whole REG/MEM, store_field
4048 will likely generate better code. */
4049 if (bitsize >= str_bitsize)
4052 /* We can't handle fields split across multiple entities. */
4053 if (bitpos + bitsize > str_bitsize)
4056 if (BYTES_BIG_ENDIAN)
4057 bitpos = str_bitsize - bitpos - bitsize;
4059 switch (TREE_CODE (src))
4063 /* For now, just optimize the case of the topmost bitfield
4064 where we don't need to do any masking and also
4065 1 bit bitfields where xor can be used.
4066 We might win by one instruction for the other bitfields
4067 too if insv/extv instructions aren't used, so that
4068 can be added later. */
4069 if (bitpos + bitsize != str_bitsize
4070 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4073 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4074 value = convert_modes (str_mode,
4075 TYPE_MODE (TREE_TYPE (op1)), value,
4076 TYPE_UNSIGNED (TREE_TYPE (op1)));
4078 /* We may be accessing data outside the field, which means
4079 we can alias adjacent data. */
4080 if (MEM_P (str_rtx))
4082 str_rtx = shallow_copy_rtx (str_rtx);
4083 set_mem_alias_set (str_rtx, 0);
4084 set_mem_expr (str_rtx, 0);
4087 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4088 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4090 value = expand_and (str_mode, value, const1_rtx, NULL);
4093 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4094 build_int_cst (NULL_TREE, bitpos),
4096 result = expand_binop (str_mode, binop, str_rtx,
4097 value, str_rtx, 1, OPTAB_WIDEN);
4098 if (result != str_rtx)
4099 emit_move_insn (str_rtx, result);
4104 if (TREE_CODE (op1) != INTEGER_CST)
4106 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4107 value = convert_modes (GET_MODE (str_rtx),
4108 TYPE_MODE (TREE_TYPE (op1)), value,
4109 TYPE_UNSIGNED (TREE_TYPE (op1)));
4111 /* We may be accessing data outside the field, which means
4112 we can alias adjacent data. */
4113 if (MEM_P (str_rtx))
4115 str_rtx = shallow_copy_rtx (str_rtx);
4116 set_mem_alias_set (str_rtx, 0);
4117 set_mem_expr (str_rtx, 0);
4120 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4121 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4123 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4125 value = expand_and (GET_MODE (str_rtx), value, mask,
4128 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4129 build_int_cst (NULL_TREE, bitpos),
4131 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4132 value, str_rtx, 1, OPTAB_WIDEN);
4133 if (result != str_rtx)
4134 emit_move_insn (str_rtx, result);
4145 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4146 is true, try generating a nontemporal store. */
4149 expand_assignment (tree to, tree from, bool nontemporal)
4154 /* Don't crash if the lhs of the assignment was erroneous. */
4155 if (TREE_CODE (to) == ERROR_MARK)
4157 result = expand_normal (from);
4161 /* Optimize away no-op moves without side-effects. */
4162 if (operand_equal_p (to, from, 0))
4165 /* Assignment of a structure component needs special treatment
4166 if the structure component's rtx is not simply a MEM.
4167 Assignment of an array element at a constant index, and assignment of
4168 an array element in an unaligned packed structure field, has the same
4170 if (handled_component_p (to)
4171 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4173 enum machine_mode mode1;
4174 HOST_WIDE_INT bitsize, bitpos;
4181 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4182 &unsignedp, &volatilep, true);
4184 /* If we are going to use store_bit_field and extract_bit_field,
4185 make sure to_rtx will be safe for multiple use. */
4187 to_rtx = expand_normal (tem);
4193 if (!MEM_P (to_rtx))
4195 /* We can get constant negative offsets into arrays with broken
4196 user code. Translate this to a trap instead of ICEing. */
4197 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4198 expand_builtin_trap ();
4199 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4202 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4203 #ifdef POINTERS_EXTEND_UNSIGNED
4204 if (GET_MODE (offset_rtx) != Pmode)
4205 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4207 if (GET_MODE (offset_rtx) != ptr_mode)
4208 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4211 /* A constant address in TO_RTX can have VOIDmode, we must not try
4212 to call force_reg for that case. Avoid that case. */
4214 && GET_MODE (to_rtx) == BLKmode
4215 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4217 && (bitpos % bitsize) == 0
4218 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4219 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4221 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4225 to_rtx = offset_address (to_rtx, offset_rtx,
4226 highest_pow2_factor_for_target (to,
4230 /* Handle expand_expr of a complex value returning a CONCAT. */
4231 if (GET_CODE (to_rtx) == CONCAT)
4233 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4235 gcc_assert (bitpos == 0);
4236 result = store_expr (from, to_rtx, false, nontemporal);
4240 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4241 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4249 /* If the field is at offset zero, we could have been given the
4250 DECL_RTX of the parent struct. Don't munge it. */
4251 to_rtx = shallow_copy_rtx (to_rtx);
4253 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4255 /* Deal with volatile and readonly fields. The former is only
4256 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4258 MEM_VOLATILE_P (to_rtx) = 1;
4259 if (component_uses_parent_alias_set (to))
4260 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4263 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4267 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4268 TREE_TYPE (tem), get_alias_set (to),
4273 preserve_temp_slots (result);
4279 /* If the rhs is a function call and its value is not an aggregate,
4280 call the function before we start to compute the lhs.
4281 This is needed for correct code for cases such as
4282 val = setjmp (buf) on machines where reference to val
4283 requires loading up part of an address in a separate insn.
4285 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4286 since it might be a promoted variable where the zero- or sign- extension
4287 needs to be done. Handling this in the normal way is safe because no
4288 computation is done before the call. The same is true for SSA names. */
4289 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4290 && COMPLETE_TYPE_P (TREE_TYPE (from))
4291 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4292 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4293 && REG_P (DECL_RTL (to)))
4294 || TREE_CODE (to) == SSA_NAME))
4299 value = expand_normal (from);
4301 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4303 /* Handle calls that return values in multiple non-contiguous locations.
4304 The Irix 6 ABI has examples of this. */
4305 if (GET_CODE (to_rtx) == PARALLEL)
4306 emit_group_load (to_rtx, value, TREE_TYPE (from),
4307 int_size_in_bytes (TREE_TYPE (from)));
4308 else if (GET_MODE (to_rtx) == BLKmode)
4309 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4312 if (POINTER_TYPE_P (TREE_TYPE (to)))
4313 value = convert_memory_address (GET_MODE (to_rtx), value);
4314 emit_move_insn (to_rtx, value);
4316 preserve_temp_slots (to_rtx);
4322 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4323 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4326 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4328 /* Don't move directly into a return register. */
4329 if (TREE_CODE (to) == RESULT_DECL
4330 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4335 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4337 if (GET_CODE (to_rtx) == PARALLEL)
4338 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4339 int_size_in_bytes (TREE_TYPE (from)));
4341 emit_move_insn (to_rtx, temp);
4343 preserve_temp_slots (to_rtx);
4349 /* In case we are returning the contents of an object which overlaps
4350 the place the value is being stored, use a safe function when copying
4351 a value through a pointer into a structure value return block. */
4352 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4353 && cfun->returns_struct
4354 && !cfun->returns_pcc_struct)
4359 size = expr_size (from);
4360 from_rtx = expand_normal (from);
4362 emit_library_call (memmove_libfunc, LCT_NORMAL,
4363 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4364 XEXP (from_rtx, 0), Pmode,
4365 convert_to_mode (TYPE_MODE (sizetype),
4366 size, TYPE_UNSIGNED (sizetype)),
4367 TYPE_MODE (sizetype));
4369 preserve_temp_slots (to_rtx);
4375 /* Compute FROM and store the value in the rtx we got. */
4378 result = store_expr (from, to_rtx, 0, nontemporal);
4379 preserve_temp_slots (result);
4385 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4386 succeeded, false otherwise. */
4389 emit_storent_insn (rtx to, rtx from)
4391 enum machine_mode mode = GET_MODE (to), imode;
4392 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4395 if (code == CODE_FOR_nothing)
4398 imode = insn_data[code].operand[0].mode;
4399 if (!insn_data[code].operand[0].predicate (to, imode))
4402 imode = insn_data[code].operand[1].mode;
4403 if (!insn_data[code].operand[1].predicate (from, imode))
4405 from = copy_to_mode_reg (imode, from);
4406 if (!insn_data[code].operand[1].predicate (from, imode))
4410 pattern = GEN_FCN (code) (to, from);
4411 if (pattern == NULL_RTX)
4414 emit_insn (pattern);
4418 /* Generate code for computing expression EXP,
4419 and storing the value into TARGET.
4421 If the mode is BLKmode then we may return TARGET itself.
4422 It turns out that in BLKmode it doesn't cause a problem.
4423 because C has no operators that could combine two different
4424 assignments into the same BLKmode object with different values
4425 with no sequence point. Will other languages need this to
4428 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4429 stack, and block moves may need to be treated specially.
4431 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4434 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4437 rtx alt_rtl = NULL_RTX;
4438 int dont_return_target = 0;
4440 if (VOID_TYPE_P (TREE_TYPE (exp)))
4442 /* C++ can generate ?: expressions with a throw expression in one
4443 branch and an rvalue in the other. Here, we resolve attempts to
4444 store the throw expression's nonexistent result. */
4445 gcc_assert (!call_param_p);
4446 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4449 if (TREE_CODE (exp) == COMPOUND_EXPR)
4451 /* Perform first part of compound expression, then assign from second
4453 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4454 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4455 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4458 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4460 /* For conditional expression, get safe form of the target. Then
4461 test the condition, doing the appropriate assignment on either
4462 side. This avoids the creation of unnecessary temporaries.
4463 For non-BLKmode, it is more efficient not to do this. */
4465 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4467 do_pending_stack_adjust ();
4469 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4470 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4472 emit_jump_insn (gen_jump (lab2));
4475 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4482 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4483 /* If this is a scalar in a register that is stored in a wider mode
4484 than the declared mode, compute the result into its declared mode
4485 and then convert to the wider mode. Our value is the computed
4488 rtx inner_target = 0;
4490 /* We can do the conversion inside EXP, which will often result
4491 in some optimizations. Do the conversion in two steps: first
4492 change the signedness, if needed, then the extend. But don't
4493 do this if the type of EXP is a subtype of something else
4494 since then the conversion might involve more than just
4495 converting modes. */
4496 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4497 && TREE_TYPE (TREE_TYPE (exp)) == 0
4498 && GET_MODE_PRECISION (GET_MODE (target))
4499 == TYPE_PRECISION (TREE_TYPE (exp)))
4501 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4502 != SUBREG_PROMOTED_UNSIGNED_P (target))
4504 /* Some types, e.g. Fortran's logical*4, won't have a signed
4505 version, so use the mode instead. */
4507 = (signed_or_unsigned_type_for
4508 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4510 ntype = lang_hooks.types.type_for_mode
4511 (TYPE_MODE (TREE_TYPE (exp)),
4512 SUBREG_PROMOTED_UNSIGNED_P (target));
4514 exp = fold_convert (ntype, exp);
4517 exp = fold_convert (lang_hooks.types.type_for_mode
4518 (GET_MODE (SUBREG_REG (target)),
4519 SUBREG_PROMOTED_UNSIGNED_P (target)),
4522 inner_target = SUBREG_REG (target);
4525 temp = expand_expr (exp, inner_target, VOIDmode,
4526 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4528 /* If TEMP is a VOIDmode constant, use convert_modes to make
4529 sure that we properly convert it. */
4530 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4532 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4533 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4534 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4535 GET_MODE (target), temp,
4536 SUBREG_PROMOTED_UNSIGNED_P (target));
4539 convert_move (SUBREG_REG (target), temp,
4540 SUBREG_PROMOTED_UNSIGNED_P (target));
4544 else if (TREE_CODE (exp) == STRING_CST
4545 && !nontemporal && !call_param_p
4546 && TREE_STRING_LENGTH (exp) > 0
4547 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4549 /* Optimize initialization of an array with a STRING_CST. */
4550 HOST_WIDE_INT exp_len, str_copy_len;
4553 exp_len = int_expr_size (exp);
4557 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4558 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4561 str_copy_len = TREE_STRING_LENGTH (exp);
4562 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4564 str_copy_len += STORE_MAX_PIECES - 1;
4565 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4567 str_copy_len = MIN (str_copy_len, exp_len);
4568 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4569 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4570 MEM_ALIGN (target), false))
4575 dest_mem = store_by_pieces (dest_mem,
4576 str_copy_len, builtin_strncpy_read_str,
4577 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4578 MEM_ALIGN (target), false,
4579 exp_len > str_copy_len ? 1 : 0);
4580 if (exp_len > str_copy_len)
4581 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4582 GEN_INT (exp_len - str_copy_len),
4591 /* If we want to use a nontemporal store, force the value to
4593 tmp_target = nontemporal ? NULL_RTX : target;
4594 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4596 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4598 /* Return TARGET if it's a specified hardware register.
4599 If TARGET is a volatile mem ref, either return TARGET
4600 or return a reg copied *from* TARGET; ANSI requires this.
4602 Otherwise, if TEMP is not TARGET, return TEMP
4603 if it is constant (for efficiency),
4604 or if we really want the correct value. */
4605 if (!(target && REG_P (target)
4606 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4607 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4608 && ! rtx_equal_p (temp, target)
4609 && CONSTANT_P (temp))
4610 dont_return_target = 1;
4613 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4614 the same as that of TARGET, adjust the constant. This is needed, for
4615 example, in case it is a CONST_DOUBLE and we want only a word-sized
4617 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4618 && TREE_CODE (exp) != ERROR_MARK
4619 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4620 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4621 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4623 /* If value was not generated in the target, store it there.
4624 Convert the value to TARGET's type first if necessary and emit the
4625 pending incrementations that have been queued when expanding EXP.
4626 Note that we cannot emit the whole queue blindly because this will
4627 effectively disable the POST_INC optimization later.
4629 If TEMP and TARGET compare equal according to rtx_equal_p, but
4630 one or both of them are volatile memory refs, we have to distinguish
4632 - expand_expr has used TARGET. In this case, we must not generate
4633 another copy. This can be detected by TARGET being equal according
4635 - expand_expr has not used TARGET - that means that the source just
4636 happens to have the same RTX form. Since temp will have been created
4637 by expand_expr, it will compare unequal according to == .
4638 We must generate a copy in this case, to reach the correct number
4639 of volatile memory references. */
4641 if ((! rtx_equal_p (temp, target)
4642 || (temp != target && (side_effects_p (temp)
4643 || side_effects_p (target))))
4644 && TREE_CODE (exp) != ERROR_MARK
4645 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4646 but TARGET is not valid memory reference, TEMP will differ
4647 from TARGET although it is really the same location. */
4648 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4649 /* If there's nothing to copy, don't bother. Don't call
4650 expr_size unless necessary, because some front-ends (C++)
4651 expr_size-hook must not be given objects that are not
4652 supposed to be bit-copied or bit-initialized. */
4653 && expr_size (exp) != const0_rtx)
4655 if (GET_MODE (temp) != GET_MODE (target)
4656 && GET_MODE (temp) != VOIDmode)
4658 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4659 if (dont_return_target)
4661 /* In this case, we will return TEMP,
4662 so make sure it has the proper mode.
4663 But don't forget to store the value into TARGET. */
4664 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4665 emit_move_insn (target, temp);
4667 else if (GET_MODE (target) == BLKmode
4668 || GET_MODE (temp) == BLKmode)
4669 emit_block_move (target, temp, expr_size (exp),
4671 ? BLOCK_OP_CALL_PARM
4672 : BLOCK_OP_NORMAL));
4674 convert_move (target, temp, unsignedp);
4677 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4679 /* Handle copying a string constant into an array. The string
4680 constant may be shorter than the array. So copy just the string's
4681 actual length, and clear the rest. First get the size of the data
4682 type of the string, which is actually the size of the target. */
4683 rtx size = expr_size (exp);
4685 if (GET_CODE (size) == CONST_INT
4686 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4687 emit_block_move (target, temp, size,
4689 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4692 /* Compute the size of the data to copy from the string. */
4694 = size_binop (MIN_EXPR,
4695 make_tree (sizetype, size),
4696 size_int (TREE_STRING_LENGTH (exp)));
4698 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4700 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4703 /* Copy that much. */
4704 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4705 TYPE_UNSIGNED (sizetype));
4706 emit_block_move (target, temp, copy_size_rtx,
4708 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4710 /* Figure out how much is left in TARGET that we have to clear.
4711 Do all calculations in ptr_mode. */
4712 if (GET_CODE (copy_size_rtx) == CONST_INT)
4714 size = plus_constant (size, -INTVAL (copy_size_rtx));
4715 target = adjust_address (target, BLKmode,
4716 INTVAL (copy_size_rtx));
4720 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4721 copy_size_rtx, NULL_RTX, 0,
4724 #ifdef POINTERS_EXTEND_UNSIGNED
4725 if (GET_MODE (copy_size_rtx) != Pmode)
4726 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4727 TYPE_UNSIGNED (sizetype));
4730 target = offset_address (target, copy_size_rtx,
4731 highest_pow2_factor (copy_size));
4732 label = gen_label_rtx ();
4733 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4734 GET_MODE (size), 0, label);
4737 if (size != const0_rtx)
4738 clear_storage (target, size, BLOCK_OP_NORMAL);
4744 /* Handle calls that return values in multiple non-contiguous locations.
4745 The Irix 6 ABI has examples of this. */
4746 else if (GET_CODE (target) == PARALLEL)
4747 emit_group_load (target, temp, TREE_TYPE (exp),
4748 int_size_in_bytes (TREE_TYPE (exp)));
4749 else if (GET_MODE (temp) == BLKmode)
4750 emit_block_move (target, temp, expr_size (exp),
4752 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4753 else if (nontemporal
4754 && emit_storent_insn (target, temp))
4755 /* If we managed to emit a nontemporal store, there is nothing else to
4760 temp = force_operand (temp, target);
4762 emit_move_insn (target, temp);
4769 /* Helper for categorize_ctor_elements. Identical interface. */
4772 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4773 HOST_WIDE_INT *p_elt_count,
4776 unsigned HOST_WIDE_INT idx;
4777 HOST_WIDE_INT nz_elts, elt_count;
4778 tree value, purpose;
4780 /* Whether CTOR is a valid constant initializer, in accordance with what
4781 initializer_constant_valid_p does. If inferred from the constructor
4782 elements, true until proven otherwise. */
4783 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4784 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4789 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4794 if (TREE_CODE (purpose) == RANGE_EXPR)
4796 tree lo_index = TREE_OPERAND (purpose, 0);
4797 tree hi_index = TREE_OPERAND (purpose, 1);
4799 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4800 mult = (tree_low_cst (hi_index, 1)
4801 - tree_low_cst (lo_index, 1) + 1);
4804 switch (TREE_CODE (value))
4808 HOST_WIDE_INT nz = 0, ic = 0;
4811 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4813 nz_elts += mult * nz;
4814 elt_count += mult * ic;
4816 if (const_from_elts_p && const_p)
4817 const_p = const_elt_p;
4824 if (!initializer_zerop (value))
4830 nz_elts += mult * TREE_STRING_LENGTH (value);
4831 elt_count += mult * TREE_STRING_LENGTH (value);
4835 if (!initializer_zerop (TREE_REALPART (value)))
4837 if (!initializer_zerop (TREE_IMAGPART (value)))
4845 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4847 if (!initializer_zerop (TREE_VALUE (v)))
4858 if (const_from_elts_p && const_p)
4859 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4866 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4867 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4870 bool clear_this = true;
4872 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4874 /* We don't expect more than one element of the union to be
4875 initialized. Not sure what we should do otherwise... */
4876 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4879 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4880 CONSTRUCTOR_ELTS (ctor),
4883 /* ??? We could look at each element of the union, and find the
4884 largest element. Which would avoid comparing the size of the
4885 initialized element against any tail padding in the union.
4886 Doesn't seem worth the effort... */
4887 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4888 TYPE_SIZE (init_sub_type)) == 1)
4890 /* And now we have to find out if the element itself is fully
4891 constructed. E.g. for union { struct { int a, b; } s; } u
4892 = { .s = { .a = 1 } }. */
4893 if (elt_count == count_type_elements (init_sub_type, false))
4898 *p_must_clear = clear_this;
4901 *p_nz_elts += nz_elts;
4902 *p_elt_count += elt_count;
4907 /* Examine CTOR to discover:
4908 * how many scalar fields are set to nonzero values,
4909 and place it in *P_NZ_ELTS;
4910 * how many scalar fields in total are in CTOR,
4911 and place it in *P_ELT_COUNT.
4912 * if a type is a union, and the initializer from the constructor
4913 is not the largest element in the union, then set *p_must_clear.
4915 Return whether or not CTOR is a valid static constant initializer, the same
4916 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4919 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4920 HOST_WIDE_INT *p_elt_count,
4925 *p_must_clear = false;
4928 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4931 /* Count the number of scalars in TYPE. Return -1 on overflow or
4932 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4933 array member at the end of the structure. */
4936 count_type_elements (const_tree type, bool allow_flexarr)
4938 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4939 switch (TREE_CODE (type))
4943 tree telts = array_type_nelts (type);
4944 if (telts && host_integerp (telts, 1))
4946 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4947 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4950 else if (max / n > m)
4958 HOST_WIDE_INT n = 0, t;
4961 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4962 if (TREE_CODE (f) == FIELD_DECL)
4964 t = count_type_elements (TREE_TYPE (f), false);
4967 /* Check for structures with flexible array member. */
4968 tree tf = TREE_TYPE (f);
4970 && TREE_CHAIN (f) == NULL
4971 && TREE_CODE (tf) == ARRAY_TYPE
4973 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4974 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4975 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4976 && int_size_in_bytes (type) >= 0)
4988 case QUAL_UNION_TYPE:
4995 return TYPE_VECTOR_SUBPARTS (type);
4999 case FIXED_POINT_TYPE:
5004 case REFERENCE_TYPE:
5019 /* Return 1 if EXP contains mostly (3/4) zeros. */
5022 mostly_zeros_p (const_tree exp)
5024 if (TREE_CODE (exp) == CONSTRUCTOR)
5027 HOST_WIDE_INT nz_elts, count, elts;
5030 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5034 elts = count_type_elements (TREE_TYPE (exp), false);
5036 return nz_elts < elts / 4;
5039 return initializer_zerop (exp);
5042 /* Return 1 if EXP contains all zeros. */
5045 all_zeros_p (const_tree exp)
5047 if (TREE_CODE (exp) == CONSTRUCTOR)
5050 HOST_WIDE_INT nz_elts, count;
5053 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5054 return nz_elts == 0;
5057 return initializer_zerop (exp);
5060 /* Helper function for store_constructor.
5061 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5062 TYPE is the type of the CONSTRUCTOR, not the element type.
5063 CLEARED is as for store_constructor.
5064 ALIAS_SET is the alias set to use for any stores.
5066 This provides a recursive shortcut back to store_constructor when it isn't
5067 necessary to go through store_field. This is so that we can pass through
5068 the cleared field to let store_constructor know that we may not have to
5069 clear a substructure if the outer structure has already been cleared. */
5072 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5073 HOST_WIDE_INT bitpos, enum machine_mode mode,
5074 tree exp, tree type, int cleared,
5075 alias_set_type alias_set)
5077 if (TREE_CODE (exp) == CONSTRUCTOR
5078 /* We can only call store_constructor recursively if the size and
5079 bit position are on a byte boundary. */
5080 && bitpos % BITS_PER_UNIT == 0
5081 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5082 /* If we have a nonzero bitpos for a register target, then we just
5083 let store_field do the bitfield handling. This is unlikely to
5084 generate unnecessary clear instructions anyways. */
5085 && (bitpos == 0 || MEM_P (target)))
5089 = adjust_address (target,
5090 GET_MODE (target) == BLKmode
5092 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5093 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5096 /* Update the alias set, if required. */
5097 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5098 && MEM_ALIAS_SET (target) != 0)
5100 target = copy_rtx (target);
5101 set_mem_alias_set (target, alias_set);
5104 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5107 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5110 /* Store the value of constructor EXP into the rtx TARGET.
5111 TARGET is either a REG or a MEM; we know it cannot conflict, since
5112 safe_from_p has been called.
5113 CLEARED is true if TARGET is known to have been zero'd.
5114 SIZE is the number of bytes of TARGET we are allowed to modify: this
5115 may not be the same as the size of EXP if we are assigning to a field
5116 which has been packed to exclude padding bits. */
5119 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5121 tree type = TREE_TYPE (exp);
5122 #ifdef WORD_REGISTER_OPERATIONS
5123 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5126 switch (TREE_CODE (type))
5130 case QUAL_UNION_TYPE:
5132 unsigned HOST_WIDE_INT idx;
5135 /* If size is zero or the target is already cleared, do nothing. */
5136 if (size == 0 || cleared)
5138 /* We either clear the aggregate or indicate the value is dead. */
5139 else if ((TREE_CODE (type) == UNION_TYPE
5140 || TREE_CODE (type) == QUAL_UNION_TYPE)
5141 && ! CONSTRUCTOR_ELTS (exp))
5142 /* If the constructor is empty, clear the union. */
5144 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5148 /* If we are building a static constructor into a register,
5149 set the initial value as zero so we can fold the value into
5150 a constant. But if more than one register is involved,
5151 this probably loses. */
5152 else if (REG_P (target) && TREE_STATIC (exp)
5153 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5155 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5159 /* If the constructor has fewer fields than the structure or
5160 if we are initializing the structure to mostly zeros, clear
5161 the whole structure first. Don't do this if TARGET is a
5162 register whose mode size isn't equal to SIZE since
5163 clear_storage can't handle this case. */
5165 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5166 != fields_length (type))
5167 || mostly_zeros_p (exp))
5169 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5172 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5176 if (REG_P (target) && !cleared)
5177 emit_clobber (target);
5179 /* Store each element of the constructor into the
5180 corresponding field of TARGET. */
5181 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5183 enum machine_mode mode;
5184 HOST_WIDE_INT bitsize;
5185 HOST_WIDE_INT bitpos = 0;
5187 rtx to_rtx = target;
5189 /* Just ignore missing fields. We cleared the whole
5190 structure, above, if any fields are missing. */
5194 if (cleared && initializer_zerop (value))
5197 if (host_integerp (DECL_SIZE (field), 1))
5198 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5202 mode = DECL_MODE (field);
5203 if (DECL_BIT_FIELD (field))
5206 offset = DECL_FIELD_OFFSET (field);
5207 if (host_integerp (offset, 0)
5208 && host_integerp (bit_position (field), 0))
5210 bitpos = int_bit_position (field);
5214 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5221 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5222 make_tree (TREE_TYPE (exp),
5225 offset_rtx = expand_normal (offset);
5226 gcc_assert (MEM_P (to_rtx));
5228 #ifdef POINTERS_EXTEND_UNSIGNED
5229 if (GET_MODE (offset_rtx) != Pmode)
5230 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5232 if (GET_MODE (offset_rtx) != ptr_mode)
5233 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5236 to_rtx = offset_address (to_rtx, offset_rtx,
5237 highest_pow2_factor (offset));
5240 #ifdef WORD_REGISTER_OPERATIONS
5241 /* If this initializes a field that is smaller than a
5242 word, at the start of a word, try to widen it to a full
5243 word. This special case allows us to output C++ member
5244 function initializations in a form that the optimizers
5247 && bitsize < BITS_PER_WORD
5248 && bitpos % BITS_PER_WORD == 0
5249 && GET_MODE_CLASS (mode) == MODE_INT
5250 && TREE_CODE (value) == INTEGER_CST
5252 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5254 tree type = TREE_TYPE (value);
5256 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5258 type = lang_hooks.types.type_for_size
5259 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5260 value = fold_convert (type, value);
5263 if (BYTES_BIG_ENDIAN)
5265 = fold_build2 (LSHIFT_EXPR, type, value,
5266 build_int_cst (type,
5267 BITS_PER_WORD - bitsize));
5268 bitsize = BITS_PER_WORD;
5273 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5274 && DECL_NONADDRESSABLE_P (field))
5276 to_rtx = copy_rtx (to_rtx);
5277 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5280 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5281 value, type, cleared,
5282 get_alias_set (TREE_TYPE (field)));
5289 unsigned HOST_WIDE_INT i;
5292 tree elttype = TREE_TYPE (type);
5294 HOST_WIDE_INT minelt = 0;
5295 HOST_WIDE_INT maxelt = 0;
5297 domain = TYPE_DOMAIN (type);
5298 const_bounds_p = (TYPE_MIN_VALUE (domain)
5299 && TYPE_MAX_VALUE (domain)
5300 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5301 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5303 /* If we have constant bounds for the range of the type, get them. */
5306 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5307 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5310 /* If the constructor has fewer elements than the array, clear
5311 the whole array first. Similarly if this is static
5312 constructor of a non-BLKmode object. */
5315 else if (REG_P (target) && TREE_STATIC (exp))
5319 unsigned HOST_WIDE_INT idx;
5321 HOST_WIDE_INT count = 0, zero_count = 0;
5322 need_to_clear = ! const_bounds_p;
5324 /* This loop is a more accurate version of the loop in
5325 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5326 is also needed to check for missing elements. */
5327 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5329 HOST_WIDE_INT this_node_count;
5334 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5336 tree lo_index = TREE_OPERAND (index, 0);
5337 tree hi_index = TREE_OPERAND (index, 1);
5339 if (! host_integerp (lo_index, 1)
5340 || ! host_integerp (hi_index, 1))
5346 this_node_count = (tree_low_cst (hi_index, 1)
5347 - tree_low_cst (lo_index, 1) + 1);
5350 this_node_count = 1;
5352 count += this_node_count;
5353 if (mostly_zeros_p (value))
5354 zero_count += this_node_count;
5357 /* Clear the entire array first if there are any missing
5358 elements, or if the incidence of zero elements is >=
5361 && (count < maxelt - minelt + 1
5362 || 4 * zero_count >= 3 * count))
5366 if (need_to_clear && size > 0)
5369 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5371 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5375 if (!cleared && REG_P (target))
5376 /* Inform later passes that the old value is dead. */
5377 emit_clobber (target);
5379 /* Store each element of the constructor into the
5380 corresponding element of TARGET, determined by counting the
5382 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5384 enum machine_mode mode;
5385 HOST_WIDE_INT bitsize;
5386 HOST_WIDE_INT bitpos;
5388 rtx xtarget = target;
5390 if (cleared && initializer_zerop (value))
5393 unsignedp = TYPE_UNSIGNED (elttype);
5394 mode = TYPE_MODE (elttype);
5395 if (mode == BLKmode)
5396 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5397 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5400 bitsize = GET_MODE_BITSIZE (mode);
5402 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5404 tree lo_index = TREE_OPERAND (index, 0);
5405 tree hi_index = TREE_OPERAND (index, 1);
5406 rtx index_r, pos_rtx;
5407 HOST_WIDE_INT lo, hi, count;
5410 /* If the range is constant and "small", unroll the loop. */
5412 && host_integerp (lo_index, 0)
5413 && host_integerp (hi_index, 0)
5414 && (lo = tree_low_cst (lo_index, 0),
5415 hi = tree_low_cst (hi_index, 0),
5416 count = hi - lo + 1,
5419 || (host_integerp (TYPE_SIZE (elttype), 1)
5420 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5423 lo -= minelt; hi -= minelt;
5424 for (; lo <= hi; lo++)
5426 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5429 && !MEM_KEEP_ALIAS_SET_P (target)
5430 && TREE_CODE (type) == ARRAY_TYPE
5431 && TYPE_NONALIASED_COMPONENT (type))
5433 target = copy_rtx (target);
5434 MEM_KEEP_ALIAS_SET_P (target) = 1;
5437 store_constructor_field
5438 (target, bitsize, bitpos, mode, value, type, cleared,
5439 get_alias_set (elttype));
5444 rtx loop_start = gen_label_rtx ();
5445 rtx loop_end = gen_label_rtx ();
5448 expand_normal (hi_index);
5449 unsignedp = TYPE_UNSIGNED (domain);
5451 index = build_decl (VAR_DECL, NULL_TREE, domain);
5454 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5456 SET_DECL_RTL (index, index_r);
5457 store_expr (lo_index, index_r, 0, false);
5459 /* Build the head of the loop. */
5460 do_pending_stack_adjust ();
5461 emit_label (loop_start);
5463 /* Assign value to element index. */
5465 fold_convert (ssizetype,
5466 fold_build2 (MINUS_EXPR,
5469 TYPE_MIN_VALUE (domain)));
5472 size_binop (MULT_EXPR, position,
5473 fold_convert (ssizetype,
5474 TYPE_SIZE_UNIT (elttype)));
5476 pos_rtx = expand_normal (position);
5477 xtarget = offset_address (target, pos_rtx,
5478 highest_pow2_factor (position));
5479 xtarget = adjust_address (xtarget, mode, 0);
5480 if (TREE_CODE (value) == CONSTRUCTOR)
5481 store_constructor (value, xtarget, cleared,
5482 bitsize / BITS_PER_UNIT);
5484 store_expr (value, xtarget, 0, false);
5486 /* Generate a conditional jump to exit the loop. */
5487 exit_cond = build2 (LT_EXPR, integer_type_node,
5489 jumpif (exit_cond, loop_end);
5491 /* Update the loop counter, and jump to the head of
5493 expand_assignment (index,
5494 build2 (PLUS_EXPR, TREE_TYPE (index),
5495 index, integer_one_node),
5498 emit_jump (loop_start);
5500 /* Build the end of the loop. */
5501 emit_label (loop_end);
5504 else if ((index != 0 && ! host_integerp (index, 0))
5505 || ! host_integerp (TYPE_SIZE (elttype), 1))
5510 index = ssize_int (1);
5513 index = fold_convert (ssizetype,
5514 fold_build2 (MINUS_EXPR,
5517 TYPE_MIN_VALUE (domain)));
5520 size_binop (MULT_EXPR, index,
5521 fold_convert (ssizetype,
5522 TYPE_SIZE_UNIT (elttype)));
5523 xtarget = offset_address (target,
5524 expand_normal (position),
5525 highest_pow2_factor (position));
5526 xtarget = adjust_address (xtarget, mode, 0);
5527 store_expr (value, xtarget, 0, false);
5532 bitpos = ((tree_low_cst (index, 0) - minelt)
5533 * tree_low_cst (TYPE_SIZE (elttype), 1));
5535 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5537 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5538 && TREE_CODE (type) == ARRAY_TYPE
5539 && TYPE_NONALIASED_COMPONENT (type))
5541 target = copy_rtx (target);
5542 MEM_KEEP_ALIAS_SET_P (target) = 1;
5544 store_constructor_field (target, bitsize, bitpos, mode, value,
5545 type, cleared, get_alias_set (elttype));
5553 unsigned HOST_WIDE_INT idx;
5554 constructor_elt *ce;
5558 tree elttype = TREE_TYPE (type);
5559 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5560 enum machine_mode eltmode = TYPE_MODE (elttype);
5561 HOST_WIDE_INT bitsize;
5562 HOST_WIDE_INT bitpos;
5563 rtvec vector = NULL;
5565 alias_set_type alias;
5567 gcc_assert (eltmode != BLKmode);
5569 n_elts = TYPE_VECTOR_SUBPARTS (type);
5570 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5572 enum machine_mode mode = GET_MODE (target);
5574 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5575 if (icode != CODE_FOR_nothing)
5579 vector = rtvec_alloc (n_elts);
5580 for (i = 0; i < n_elts; i++)
5581 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5585 /* If the constructor has fewer elements than the vector,
5586 clear the whole array first. Similarly if this is static
5587 constructor of a non-BLKmode object. */
5590 else if (REG_P (target) && TREE_STATIC (exp))
5594 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5597 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5599 int n_elts_here = tree_low_cst
5600 (int_const_binop (TRUNC_DIV_EXPR,
5601 TYPE_SIZE (TREE_TYPE (value)),
5602 TYPE_SIZE (elttype), 0), 1);
5604 count += n_elts_here;
5605 if (mostly_zeros_p (value))
5606 zero_count += n_elts_here;
5609 /* Clear the entire vector first if there are any missing elements,
5610 or if the incidence of zero elements is >= 75%. */
5611 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5614 if (need_to_clear && size > 0 && !vector)
5617 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5619 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5623 /* Inform later passes that the old value is dead. */
5624 if (!cleared && !vector && REG_P (target))
5625 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5628 alias = MEM_ALIAS_SET (target);
5630 alias = get_alias_set (elttype);
5632 /* Store each element of the constructor into the corresponding
5633 element of TARGET, determined by counting the elements. */
5634 for (idx = 0, i = 0;
5635 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5636 idx++, i += bitsize / elt_size)
5638 HOST_WIDE_INT eltpos;
5639 tree value = ce->value;
5641 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5642 if (cleared && initializer_zerop (value))
5646 eltpos = tree_low_cst (ce->index, 1);
5652 /* Vector CONSTRUCTORs should only be built from smaller
5653 vectors in the case of BLKmode vectors. */
5654 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5655 RTVEC_ELT (vector, eltpos)
5656 = expand_normal (value);
5660 enum machine_mode value_mode =
5661 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5662 ? TYPE_MODE (TREE_TYPE (value))
5664 bitpos = eltpos * elt_size;
5665 store_constructor_field (target, bitsize, bitpos,
5666 value_mode, value, type,
5672 emit_insn (GEN_FCN (icode)
5674 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5683 /* Store the value of EXP (an expression tree)
5684 into a subfield of TARGET which has mode MODE and occupies
5685 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5686 If MODE is VOIDmode, it means that we are storing into a bit-field.
5688 Always return const0_rtx unless we have something particular to
5691 TYPE is the type of the underlying object,
5693 ALIAS_SET is the alias set for the destination. This value will
5694 (in general) be different from that for TARGET, since TARGET is a
5695 reference to the containing structure.
5697 If NONTEMPORAL is true, try generating a nontemporal store. */
5700 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5701 enum machine_mode mode, tree exp, tree type,
5702 alias_set_type alias_set, bool nontemporal)
5704 HOST_WIDE_INT width_mask = 0;
5706 if (TREE_CODE (exp) == ERROR_MARK)
5709 /* If we have nothing to store, do nothing unless the expression has
5712 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5713 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5714 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5716 /* If we are storing into an unaligned field of an aligned union that is
5717 in a register, we may have the mode of TARGET being an integer mode but
5718 MODE == BLKmode. In that case, get an aligned object whose size and
5719 alignment are the same as TARGET and store TARGET into it (we can avoid
5720 the store if the field being stored is the entire width of TARGET). Then
5721 call ourselves recursively to store the field into a BLKmode version of
5722 that object. Finally, load from the object into TARGET. This is not
5723 very efficient in general, but should only be slightly more expensive
5724 than the otherwise-required unaligned accesses. Perhaps this can be
5725 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5726 twice, once with emit_move_insn and once via store_field. */
5729 && (REG_P (target) || GET_CODE (target) == SUBREG))
5731 rtx object = assign_temp (type, 0, 1, 1);
5732 rtx blk_object = adjust_address (object, BLKmode, 0);
5734 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5735 emit_move_insn (object, target);
5737 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5740 emit_move_insn (target, object);
5742 /* We want to return the BLKmode version of the data. */
5746 if (GET_CODE (target) == CONCAT)
5748 /* We're storing into a struct containing a single __complex. */
5750 gcc_assert (!bitpos);
5751 return store_expr (exp, target, 0, nontemporal);
5754 /* If the structure is in a register or if the component
5755 is a bit field, we cannot use addressing to access it.
5756 Use bit-field techniques or SUBREG to store in it. */
5758 if (mode == VOIDmode
5759 || (mode != BLKmode && ! direct_store[(int) mode]
5760 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5761 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5763 || GET_CODE (target) == SUBREG
5764 /* If the field isn't aligned enough to store as an ordinary memref,
5765 store it as a bit field. */
5767 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5768 || bitpos % GET_MODE_ALIGNMENT (mode))
5769 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5770 || (bitpos % BITS_PER_UNIT != 0)))
5771 /* If the RHS and field are a constant size and the size of the
5772 RHS isn't the same size as the bitfield, we must use bitfield
5775 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5776 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5780 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5781 implies a mask operation. If the precision is the same size as
5782 the field we're storing into, that mask is redundant. This is
5783 particularly common with bit field assignments generated by the
5785 if (TREE_CODE (exp) == NOP_EXPR)
5787 tree type = TREE_TYPE (exp);
5788 if (INTEGRAL_TYPE_P (type)
5789 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5790 && bitsize == TYPE_PRECISION (type))
5792 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5793 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5794 exp = TREE_OPERAND (exp, 0);
5798 temp = expand_normal (exp);
5800 /* If BITSIZE is narrower than the size of the type of EXP
5801 we will be narrowing TEMP. Normally, what's wanted are the
5802 low-order bits. However, if EXP's type is a record and this is
5803 big-endian machine, we want the upper BITSIZE bits. */
5804 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5805 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5806 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5807 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5808 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5812 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5814 if (mode != VOIDmode && mode != BLKmode
5815 && mode != TYPE_MODE (TREE_TYPE (exp)))
5816 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5818 /* If the modes of TEMP and TARGET are both BLKmode, both
5819 must be in memory and BITPOS must be aligned on a byte
5820 boundary. If so, we simply do a block copy. Likewise
5821 for a BLKmode-like TARGET. */
5822 if (GET_MODE (temp) == BLKmode
5823 && (GET_MODE (target) == BLKmode
5825 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5826 && (bitpos % BITS_PER_UNIT) == 0
5827 && (bitsize % BITS_PER_UNIT) == 0)))
5829 gcc_assert (MEM_P (target) && MEM_P (temp)
5830 && (bitpos % BITS_PER_UNIT) == 0);
5832 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5833 emit_block_move (target, temp,
5834 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5841 /* Store the value in the bitfield. */
5842 store_bit_field (target, bitsize, bitpos, mode, temp);
5848 /* Now build a reference to just the desired component. */
5849 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5851 if (to_rtx == target)
5852 to_rtx = copy_rtx (to_rtx);
5854 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5855 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5856 set_mem_alias_set (to_rtx, alias_set);
5858 return store_expr (exp, to_rtx, 0, nontemporal);
5862 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5863 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5864 codes and find the ultimate containing object, which we return.
5866 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5867 bit position, and *PUNSIGNEDP to the signedness of the field.
5868 If the position of the field is variable, we store a tree
5869 giving the variable offset (in units) in *POFFSET.
5870 This offset is in addition to the bit position.
5871 If the position is not variable, we store 0 in *POFFSET.
5873 If any of the extraction expressions is volatile,
5874 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5876 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5877 Otherwise, it is a mode that can be used to access the field.
5879 If the field describes a variable-sized object, *PMODE is set to
5880 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5881 this case, but the address of the object can be found.
5883 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5884 look through nodes that serve as markers of a greater alignment than
5885 the one that can be deduced from the expression. These nodes make it
5886 possible for front-ends to prevent temporaries from being created by
5887 the middle-end on alignment considerations. For that purpose, the
5888 normal operating mode at high-level is to always pass FALSE so that
5889 the ultimate containing object is really returned; moreover, the
5890 associated predicate handled_component_p will always return TRUE
5891 on these nodes, thus indicating that they are essentially handled
5892 by get_inner_reference. TRUE should only be passed when the caller
5893 is scanning the expression in order to build another representation
5894 and specifically knows how to handle these nodes; as such, this is
5895 the normal operating mode in the RTL expanders. */
5898 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5899 HOST_WIDE_INT *pbitpos, tree *poffset,
5900 enum machine_mode *pmode, int *punsignedp,
5901 int *pvolatilep, bool keep_aligning)
5904 enum machine_mode mode = VOIDmode;
5905 bool blkmode_bitfield = false;
5906 tree offset = size_zero_node;
5907 tree bit_offset = bitsize_zero_node;
5909 /* First get the mode, signedness, and size. We do this from just the
5910 outermost expression. */
5911 if (TREE_CODE (exp) == COMPONENT_REF)
5913 tree field = TREE_OPERAND (exp, 1);
5914 size_tree = DECL_SIZE (field);
5915 if (!DECL_BIT_FIELD (field))
5916 mode = DECL_MODE (field);
5917 else if (DECL_MODE (field) == BLKmode)
5918 blkmode_bitfield = true;
5920 *punsignedp = DECL_UNSIGNED (field);
5922 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5924 size_tree = TREE_OPERAND (exp, 1);
5925 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5926 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5928 /* For vector types, with the correct size of access, use the mode of
5930 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5931 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5932 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5933 mode = TYPE_MODE (TREE_TYPE (exp));
5937 mode = TYPE_MODE (TREE_TYPE (exp));
5938 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5940 if (mode == BLKmode)
5941 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5943 *pbitsize = GET_MODE_BITSIZE (mode);
5948 if (! host_integerp (size_tree, 1))
5949 mode = BLKmode, *pbitsize = -1;
5951 *pbitsize = tree_low_cst (size_tree, 1);
5954 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5955 and find the ultimate containing object. */
5958 switch (TREE_CODE (exp))
5961 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5962 TREE_OPERAND (exp, 2));
5967 tree field = TREE_OPERAND (exp, 1);
5968 tree this_offset = component_ref_field_offset (exp);
5970 /* If this field hasn't been filled in yet, don't go past it.
5971 This should only happen when folding expressions made during
5972 type construction. */
5973 if (this_offset == 0)
5976 offset = size_binop (PLUS_EXPR, offset, this_offset);
5977 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5978 DECL_FIELD_BIT_OFFSET (field));
5980 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5985 case ARRAY_RANGE_REF:
5987 tree index = TREE_OPERAND (exp, 1);
5988 tree low_bound = array_ref_low_bound (exp);
5989 tree unit_size = array_ref_element_size (exp);
5991 /* We assume all arrays have sizes that are a multiple of a byte.
5992 First subtract the lower bound, if any, in the type of the
5993 index, then convert to sizetype and multiply by the size of
5994 the array element. */
5995 if (! integer_zerop (low_bound))
5996 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5999 offset = size_binop (PLUS_EXPR, offset,
6000 size_binop (MULT_EXPR,
6001 fold_convert (sizetype, index),
6010 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6011 bitsize_int (*pbitsize));
6014 case VIEW_CONVERT_EXPR:
6015 if (keep_aligning && STRICT_ALIGNMENT
6016 && (TYPE_ALIGN (TREE_TYPE (exp))
6017 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6018 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6019 < BIGGEST_ALIGNMENT)
6020 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6021 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6029 /* If any reference in the chain is volatile, the effect is volatile. */
6030 if (TREE_THIS_VOLATILE (exp))
6033 exp = TREE_OPERAND (exp, 0);
6037 /* If OFFSET is constant, see if we can return the whole thing as a
6038 constant bit position. Make sure to handle overflow during
6040 if (host_integerp (offset, 0))
6042 double_int tem = double_int_mul (tree_to_double_int (offset),
6043 uhwi_to_double_int (BITS_PER_UNIT));
6044 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6045 if (double_int_fits_in_shwi_p (tem))
6047 *pbitpos = double_int_to_shwi (tem);
6048 *poffset = offset = NULL_TREE;
6052 /* Otherwise, split it up. */
6055 *pbitpos = tree_low_cst (bit_offset, 0);
6059 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6060 if (mode == VOIDmode
6062 && (*pbitpos % BITS_PER_UNIT) == 0
6063 && (*pbitsize % BITS_PER_UNIT) == 0)
6071 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6072 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6073 EXP is marked as PACKED. */
6076 contains_packed_reference (const_tree exp)
6078 bool packed_p = false;
6082 switch (TREE_CODE (exp))
6086 tree field = TREE_OPERAND (exp, 1);
6087 packed_p = DECL_PACKED (field)
6088 || TYPE_PACKED (TREE_TYPE (field))
6089 || TYPE_PACKED (TREE_TYPE (exp));
6097 case ARRAY_RANGE_REF:
6100 case VIEW_CONVERT_EXPR:
6106 exp = TREE_OPERAND (exp, 0);
6112 /* Return a tree of sizetype representing the size, in bytes, of the element
6113 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6116 array_ref_element_size (tree exp)
6118 tree aligned_size = TREE_OPERAND (exp, 3);
6119 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6121 /* If a size was specified in the ARRAY_REF, it's the size measured
6122 in alignment units of the element type. So multiply by that value. */
6125 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6126 sizetype from another type of the same width and signedness. */
6127 if (TREE_TYPE (aligned_size) != sizetype)
6128 aligned_size = fold_convert (sizetype, aligned_size);
6129 return size_binop (MULT_EXPR, aligned_size,
6130 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6133 /* Otherwise, take the size from that of the element type. Substitute
6134 any PLACEHOLDER_EXPR that we have. */
6136 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6139 /* Return a tree representing the lower bound of the array mentioned in
6140 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6143 array_ref_low_bound (tree exp)
6145 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6147 /* If a lower bound is specified in EXP, use it. */
6148 if (TREE_OPERAND (exp, 2))
6149 return TREE_OPERAND (exp, 2);
6151 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6152 substituting for a PLACEHOLDER_EXPR as needed. */
6153 if (domain_type && TYPE_MIN_VALUE (domain_type))
6154 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6156 /* Otherwise, return a zero of the appropriate type. */
6157 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6160 /* Return a tree representing the upper bound of the array mentioned in
6161 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6164 array_ref_up_bound (tree exp)
6166 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6168 /* If there is a domain type and it has an upper bound, use it, substituting
6169 for a PLACEHOLDER_EXPR as needed. */
6170 if (domain_type && TYPE_MAX_VALUE (domain_type))
6171 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6173 /* Otherwise fail. */
6177 /* Return a tree representing the offset, in bytes, of the field referenced
6178 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6181 component_ref_field_offset (tree exp)
6183 tree aligned_offset = TREE_OPERAND (exp, 2);
6184 tree field = TREE_OPERAND (exp, 1);
6186 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6187 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6191 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6192 sizetype from another type of the same width and signedness. */
6193 if (TREE_TYPE (aligned_offset) != sizetype)
6194 aligned_offset = fold_convert (sizetype, aligned_offset);
6195 return size_binop (MULT_EXPR, aligned_offset,
6196 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6199 /* Otherwise, take the offset from that of the field. Substitute
6200 any PLACEHOLDER_EXPR that we have. */
6202 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6205 /* Given an rtx VALUE that may contain additions and multiplications, return
6206 an equivalent value that just refers to a register, memory, or constant.
6207 This is done by generating instructions to perform the arithmetic and
6208 returning a pseudo-register containing the value.
6210 The returned value may be a REG, SUBREG, MEM or constant. */
6213 force_operand (rtx value, rtx target)
6216 /* Use subtarget as the target for operand 0 of a binary operation. */
6217 rtx subtarget = get_subtarget (target);
6218 enum rtx_code code = GET_CODE (value);
6220 /* Check for subreg applied to an expression produced by loop optimizer. */
6222 && !REG_P (SUBREG_REG (value))
6223 && !MEM_P (SUBREG_REG (value)))
6226 = simplify_gen_subreg (GET_MODE (value),
6227 force_reg (GET_MODE (SUBREG_REG (value)),
6228 force_operand (SUBREG_REG (value),
6230 GET_MODE (SUBREG_REG (value)),
6231 SUBREG_BYTE (value));
6232 code = GET_CODE (value);
6235 /* Check for a PIC address load. */
6236 if ((code == PLUS || code == MINUS)
6237 && XEXP (value, 0) == pic_offset_table_rtx
6238 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6239 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6240 || GET_CODE (XEXP (value, 1)) == CONST))
6243 subtarget = gen_reg_rtx (GET_MODE (value));
6244 emit_move_insn (subtarget, value);
6248 if (ARITHMETIC_P (value))
6250 op2 = XEXP (value, 1);
6251 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6253 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6256 op2 = negate_rtx (GET_MODE (value), op2);
6259 /* Check for an addition with OP2 a constant integer and our first
6260 operand a PLUS of a virtual register and something else. In that
6261 case, we want to emit the sum of the virtual register and the
6262 constant first and then add the other value. This allows virtual
6263 register instantiation to simply modify the constant rather than
6264 creating another one around this addition. */
6265 if (code == PLUS && GET_CODE (op2) == CONST_INT
6266 && GET_CODE (XEXP (value, 0)) == PLUS
6267 && REG_P (XEXP (XEXP (value, 0), 0))
6268 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6269 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6271 rtx temp = expand_simple_binop (GET_MODE (value), code,
6272 XEXP (XEXP (value, 0), 0), op2,
6273 subtarget, 0, OPTAB_LIB_WIDEN);
6274 return expand_simple_binop (GET_MODE (value), code, temp,
6275 force_operand (XEXP (XEXP (value,
6277 target, 0, OPTAB_LIB_WIDEN);
6280 op1 = force_operand (XEXP (value, 0), subtarget);
6281 op2 = force_operand (op2, NULL_RTX);
6285 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6287 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6288 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6289 target, 1, OPTAB_LIB_WIDEN);
6291 return expand_divmod (0,
6292 FLOAT_MODE_P (GET_MODE (value))
6293 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6294 GET_MODE (value), op1, op2, target, 0);
6296 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6299 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6302 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6305 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6306 target, 0, OPTAB_LIB_WIDEN);
6308 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6309 target, 1, OPTAB_LIB_WIDEN);
6312 if (UNARY_P (value))
6315 target = gen_reg_rtx (GET_MODE (value));
6316 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6323 case FLOAT_TRUNCATE:
6324 convert_move (target, op1, code == ZERO_EXTEND);
6329 expand_fix (target, op1, code == UNSIGNED_FIX);
6333 case UNSIGNED_FLOAT:
6334 expand_float (target, op1, code == UNSIGNED_FLOAT);
6338 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6342 #ifdef INSN_SCHEDULING
6343 /* On machines that have insn scheduling, we want all memory reference to be
6344 explicit, so we need to deal with such paradoxical SUBREGs. */
6345 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6346 && (GET_MODE_SIZE (GET_MODE (value))
6347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6349 = simplify_gen_subreg (GET_MODE (value),
6350 force_reg (GET_MODE (SUBREG_REG (value)),
6351 force_operand (SUBREG_REG (value),
6353 GET_MODE (SUBREG_REG (value)),
6354 SUBREG_BYTE (value));
6360 /* Subroutine of expand_expr: return nonzero iff there is no way that
6361 EXP can reference X, which is being modified. TOP_P is nonzero if this
6362 call is going to be used to determine whether we need a temporary
6363 for EXP, as opposed to a recursive call to this function.
6365 It is always safe for this routine to return zero since it merely
6366 searches for optimization opportunities. */
6369 safe_from_p (const_rtx x, tree exp, int top_p)
6375 /* If EXP has varying size, we MUST use a target since we currently
6376 have no way of allocating temporaries of variable size
6377 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6378 So we assume here that something at a higher level has prevented a
6379 clash. This is somewhat bogus, but the best we can do. Only
6380 do this when X is BLKmode and when we are at the top level. */
6381 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6382 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6383 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6384 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6385 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6387 && GET_MODE (x) == BLKmode)
6388 /* If X is in the outgoing argument area, it is always safe. */
6390 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6391 || (GET_CODE (XEXP (x, 0)) == PLUS
6392 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6395 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6396 find the underlying pseudo. */
6397 if (GET_CODE (x) == SUBREG)
6400 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6404 /* Now look at our tree code and possibly recurse. */
6405 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6407 case tcc_declaration:
6408 exp_rtl = DECL_RTL_IF_SET (exp);
6414 case tcc_exceptional:
6415 if (TREE_CODE (exp) == TREE_LIST)
6419 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6421 exp = TREE_CHAIN (exp);
6424 if (TREE_CODE (exp) != TREE_LIST)
6425 return safe_from_p (x, exp, 0);
6428 else if (TREE_CODE (exp) == CONSTRUCTOR)
6430 constructor_elt *ce;
6431 unsigned HOST_WIDE_INT idx;
6434 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6436 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6437 || !safe_from_p (x, ce->value, 0))
6441 else if (TREE_CODE (exp) == ERROR_MARK)
6442 return 1; /* An already-visited SAVE_EXPR? */
6447 /* The only case we look at here is the DECL_INITIAL inside a
6449 return (TREE_CODE (exp) != DECL_EXPR
6450 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6451 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6452 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6455 case tcc_comparison:
6456 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6461 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6463 case tcc_expression:
6466 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6467 the expression. If it is set, we conflict iff we are that rtx or
6468 both are in memory. Otherwise, we check all operands of the
6469 expression recursively. */
6471 switch (TREE_CODE (exp))
6474 /* If the operand is static or we are static, we can't conflict.
6475 Likewise if we don't conflict with the operand at all. */
6476 if (staticp (TREE_OPERAND (exp, 0))
6477 || TREE_STATIC (exp)
6478 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6481 /* Otherwise, the only way this can conflict is if we are taking
6482 the address of a DECL a that address if part of X, which is
6484 exp = TREE_OPERAND (exp, 0);
6487 if (!DECL_RTL_SET_P (exp)
6488 || !MEM_P (DECL_RTL (exp)))
6491 exp_rtl = XEXP (DECL_RTL (exp), 0);
6495 case MISALIGNED_INDIRECT_REF:
6496 case ALIGN_INDIRECT_REF:
6499 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6500 get_alias_set (exp)))
6505 /* Assume that the call will clobber all hard registers and
6507 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6512 case WITH_CLEANUP_EXPR:
6513 case CLEANUP_POINT_EXPR:
6514 /* Lowered by gimplify.c. */
6518 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6524 /* If we have an rtx, we do not need to scan our operands. */
6528 nops = TREE_OPERAND_LENGTH (exp);
6529 for (i = 0; i < nops; i++)
6530 if (TREE_OPERAND (exp, i) != 0
6531 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6537 /* Should never get a type here. */
6541 /* If we have an rtl, find any enclosed object. Then see if we conflict
6545 if (GET_CODE (exp_rtl) == SUBREG)
6547 exp_rtl = SUBREG_REG (exp_rtl);
6549 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6553 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6554 are memory and they conflict. */
6555 return ! (rtx_equal_p (x, exp_rtl)
6556 || (MEM_P (x) && MEM_P (exp_rtl)
6557 && true_dependence (exp_rtl, VOIDmode, x,
6558 rtx_addr_varies_p)));
6561 /* If we reach here, it is safe. */
6566 /* Return the highest power of two that EXP is known to be a multiple of.
6567 This is used in updating alignment of MEMs in array references. */
6569 unsigned HOST_WIDE_INT
6570 highest_pow2_factor (const_tree exp)
6572 unsigned HOST_WIDE_INT c0, c1;
6574 switch (TREE_CODE (exp))
6577 /* We can find the lowest bit that's a one. If the low
6578 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6579 We need to handle this case since we can find it in a COND_EXPR,
6580 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6581 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6583 if (TREE_OVERFLOW (exp))
6584 return BIGGEST_ALIGNMENT;
6587 /* Note: tree_low_cst is intentionally not used here,
6588 we don't care about the upper bits. */
6589 c0 = TREE_INT_CST_LOW (exp);
6591 return c0 ? c0 : BIGGEST_ALIGNMENT;
6595 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6596 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6597 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6598 return MIN (c0, c1);
6601 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6602 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6605 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6607 if (integer_pow2p (TREE_OPERAND (exp, 1))
6608 && host_integerp (TREE_OPERAND (exp, 1), 1))
6610 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6611 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6612 return MAX (1, c0 / c1);
6617 /* The highest power of two of a bit-and expression is the maximum of
6618 that of its operands. We typically get here for a complex LHS and
6619 a constant negative power of two on the RHS to force an explicit
6620 alignment, so don't bother looking at the LHS. */
6621 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6625 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6628 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6631 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6632 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6633 return MIN (c0, c1);
6642 /* Similar, except that the alignment requirements of TARGET are
6643 taken into account. Assume it is at least as aligned as its
6644 type, unless it is a COMPONENT_REF in which case the layout of
6645 the structure gives the alignment. */
6647 static unsigned HOST_WIDE_INT
6648 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6650 unsigned HOST_WIDE_INT target_align, factor;
6652 factor = highest_pow2_factor (exp);
6653 if (TREE_CODE (target) == COMPONENT_REF)
6654 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6656 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6657 return MAX (factor, target_align);
6660 /* Return &VAR expression for emulated thread local VAR. */
6663 emutls_var_address (tree var)
6665 tree emuvar = emutls_decl (var);
6666 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6667 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6668 tree arglist = build_tree_list (NULL_TREE, arg);
6669 tree call = build_function_call_expr (fn, arglist);
6670 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6674 /* Subroutine of expand_expr. Expand the two operands of a binary
6675 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6676 The value may be stored in TARGET if TARGET is nonzero. The
6677 MODIFIER argument is as documented by expand_expr. */
6680 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6681 enum expand_modifier modifier)
6683 if (! safe_from_p (target, exp1, 1))
6685 if (operand_equal_p (exp0, exp1, 0))
6687 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6688 *op1 = copy_rtx (*op0);
6692 /* If we need to preserve evaluation order, copy exp0 into its own
6693 temporary variable so that it can't be clobbered by exp1. */
6694 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6695 exp0 = save_expr (exp0);
6696 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6697 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6702 /* Return a MEM that contains constant EXP. DEFER is as for
6703 output_constant_def and MODIFIER is as for expand_expr. */
6706 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6710 mem = output_constant_def (exp, defer);
6711 if (modifier != EXPAND_INITIALIZER)
6712 mem = use_anchored_address (mem);
6716 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6717 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6720 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6721 enum expand_modifier modifier)
6723 rtx result, subtarget;
6725 HOST_WIDE_INT bitsize, bitpos;
6726 int volatilep, unsignedp;
6727 enum machine_mode mode1;
6729 /* If we are taking the address of a constant and are at the top level,
6730 we have to use output_constant_def since we can't call force_const_mem
6732 /* ??? This should be considered a front-end bug. We should not be
6733 generating ADDR_EXPR of something that isn't an LVALUE. The only
6734 exception here is STRING_CST. */
6735 if (CONSTANT_CLASS_P (exp))
6736 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6738 /* Everything must be something allowed by is_gimple_addressable. */
6739 switch (TREE_CODE (exp))
6742 /* This case will happen via recursion for &a->b. */
6743 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6746 /* Recurse and make the output_constant_def clause above handle this. */
6747 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6751 /* The real part of the complex number is always first, therefore
6752 the address is the same as the address of the parent object. */
6755 inner = TREE_OPERAND (exp, 0);
6759 /* The imaginary part of the complex number is always second.
6760 The expression is therefore always offset by the size of the
6763 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6764 inner = TREE_OPERAND (exp, 0);
6768 /* TLS emulation hook - replace __thread VAR's &VAR with
6769 __emutls_get_address (&_emutls.VAR). */
6770 if (! targetm.have_tls
6771 && TREE_CODE (exp) == VAR_DECL
6772 && DECL_THREAD_LOCAL_P (exp))
6774 exp = emutls_var_address (exp);
6775 return expand_expr (exp, target, tmode, modifier);
6780 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6781 expand_expr, as that can have various side effects; LABEL_DECLs for
6782 example, may not have their DECL_RTL set yet. Expand the rtl of
6783 CONSTRUCTORs too, which should yield a memory reference for the
6784 constructor's contents. Assume language specific tree nodes can
6785 be expanded in some interesting way. */
6786 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6788 || TREE_CODE (exp) == CONSTRUCTOR
6789 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6791 result = expand_expr (exp, target, tmode,
6792 modifier == EXPAND_INITIALIZER
6793 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6795 /* If the DECL isn't in memory, then the DECL wasn't properly
6796 marked TREE_ADDRESSABLE, which will be either a front-end
6797 or a tree optimizer bug. */
6798 gcc_assert (MEM_P (result));
6799 result = XEXP (result, 0);
6801 /* ??? Is this needed anymore? */
6802 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6804 assemble_external (exp);
6805 TREE_USED (exp) = 1;
6808 if (modifier != EXPAND_INITIALIZER
6809 && modifier != EXPAND_CONST_ADDRESS)
6810 result = force_operand (result, target);
6814 /* Pass FALSE as the last argument to get_inner_reference although
6815 we are expanding to RTL. The rationale is that we know how to
6816 handle "aligning nodes" here: we can just bypass them because
6817 they won't change the final object whose address will be returned
6818 (they actually exist only for that purpose). */
6819 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6820 &mode1, &unsignedp, &volatilep, false);
6824 /* We must have made progress. */
6825 gcc_assert (inner != exp);
6827 subtarget = offset || bitpos ? NULL_RTX : target;
6828 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6829 inner alignment, force the inner to be sufficiently aligned. */
6830 if (CONSTANT_CLASS_P (inner)
6831 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6833 inner = copy_node (inner);
6834 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6835 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6836 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6838 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6844 if (modifier != EXPAND_NORMAL)
6845 result = force_operand (result, NULL);
6846 tmp = expand_expr (offset, NULL_RTX, tmode,
6847 modifier == EXPAND_INITIALIZER
6848 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6850 result = convert_memory_address (tmode, result);
6851 tmp = convert_memory_address (tmode, tmp);
6853 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6854 result = gen_rtx_PLUS (tmode, result, tmp);
6857 subtarget = bitpos ? NULL_RTX : target;
6858 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6859 1, OPTAB_LIB_WIDEN);
6865 /* Someone beforehand should have rejected taking the address
6866 of such an object. */
6867 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6869 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6870 if (modifier < EXPAND_SUM)
6871 result = force_operand (result, target);
6877 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6878 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6881 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6882 enum expand_modifier modifier)
6884 enum machine_mode rmode;
6887 /* Target mode of VOIDmode says "whatever's natural". */
6888 if (tmode == VOIDmode)
6889 tmode = TYPE_MODE (TREE_TYPE (exp));
6891 /* We can get called with some Weird Things if the user does silliness
6892 like "(short) &a". In that case, convert_memory_address won't do
6893 the right thing, so ignore the given target mode. */
6894 if (tmode != Pmode && tmode != ptr_mode)
6897 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6900 /* Despite expand_expr claims concerning ignoring TMODE when not
6901 strictly convenient, stuff breaks if we don't honor it. Note
6902 that combined with the above, we only do this for pointer modes. */
6903 rmode = GET_MODE (result);
6904 if (rmode == VOIDmode)
6907 result = convert_memory_address (tmode, result);
6912 /* Generate code for computing CONSTRUCTOR EXP.
6913 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6914 is TRUE, instead of creating a temporary variable in memory
6915 NULL is returned and the caller needs to handle it differently. */
6918 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6919 bool avoid_temp_mem)
6921 tree type = TREE_TYPE (exp);
6922 enum machine_mode mode = TYPE_MODE (type);
6924 /* Try to avoid creating a temporary at all. This is possible
6925 if all of the initializer is zero.
6926 FIXME: try to handle all [0..255] initializers we can handle
6928 if (TREE_STATIC (exp)
6929 && !TREE_ADDRESSABLE (exp)
6930 && target != 0 && mode == BLKmode
6931 && all_zeros_p (exp))
6933 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6937 /* All elts simple constants => refer to a constant in memory. But
6938 if this is a non-BLKmode mode, let it store a field at a time
6939 since that should make a CONST_INT or CONST_DOUBLE when we
6940 fold. Likewise, if we have a target we can use, it is best to
6941 store directly into the target unless the type is large enough
6942 that memcpy will be used. If we are making an initializer and
6943 all operands are constant, put it in memory as well.
6945 FIXME: Avoid trying to fill vector constructors piece-meal.
6946 Output them with output_constant_def below unless we're sure
6947 they're zeros. This should go away when vector initializers
6948 are treated like VECTOR_CST instead of arrays. */
6949 if ((TREE_STATIC (exp)
6950 && ((mode == BLKmode
6951 && ! (target != 0 && safe_from_p (target, exp, 1)))
6952 || TREE_ADDRESSABLE (exp)
6953 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6954 && (! MOVE_BY_PIECES_P
6955 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6957 && ! mostly_zeros_p (exp))))
6958 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6959 && TREE_CONSTANT (exp)))
6966 constructor = expand_expr_constant (exp, 1, modifier);
6968 if (modifier != EXPAND_CONST_ADDRESS
6969 && modifier != EXPAND_INITIALIZER
6970 && modifier != EXPAND_SUM)
6971 constructor = validize_mem (constructor);
6976 /* Handle calls that pass values in multiple non-contiguous
6977 locations. The Irix 6 ABI has examples of this. */
6978 if (target == 0 || ! safe_from_p (target, exp, 1)
6979 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6985 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6986 | (TREE_READONLY (exp)
6987 * TYPE_QUAL_CONST))),
6988 0, TREE_ADDRESSABLE (exp), 1);
6991 store_constructor (exp, target, 0, int_expr_size (exp));
6995 /* Return the defining gimple statement for SSA_NAME NAME if it is an
6996 assigment and the code of the expresion on the RHS is CODE. Return
7000 get_def_for_expr (tree name, enum tree_code code)
7004 if (TREE_CODE (name) != SSA_NAME)
7007 def_stmt = get_gimple_for_ssa_name (name);
7009 || gimple_assign_rhs_code (def_stmt) != code)
7016 /* expand_expr: generate code for computing expression EXP.
7017 An rtx for the computed value is returned. The value is never null.
7018 In the case of a void EXP, const0_rtx is returned.
7020 The value may be stored in TARGET if TARGET is nonzero.
7021 TARGET is just a suggestion; callers must assume that
7022 the rtx returned may not be the same as TARGET.
7024 If TARGET is CONST0_RTX, it means that the value will be ignored.
7026 If TMODE is not VOIDmode, it suggests generating the
7027 result in mode TMODE. But this is done only when convenient.
7028 Otherwise, TMODE is ignored and the value generated in its natural mode.
7029 TMODE is just a suggestion; callers must assume that
7030 the rtx returned may not have mode TMODE.
7032 Note that TARGET may have neither TMODE nor MODE. In that case, it
7033 probably will not be used.
7035 If MODIFIER is EXPAND_SUM then when EXP is an addition
7036 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7037 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7038 products as above, or REG or MEM, or constant.
7039 Ordinarily in such cases we would output mul or add instructions
7040 and then return a pseudo reg containing the sum.
7042 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7043 it also marks a label as absolutely required (it can't be dead).
7044 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7045 This is used for outputting expressions used in initializers.
7047 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7048 with a constant address even if that address is not normally legitimate.
7049 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7051 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7052 a call parameter. Such targets require special care as we haven't yet
7053 marked TARGET so that it's safe from being trashed by libcalls. We
7054 don't want to use TARGET for anything but the final result;
7055 Intermediate values must go elsewhere. Additionally, calls to
7056 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7058 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7059 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7060 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7061 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7064 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7065 enum expand_modifier, rtx *);
7068 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7069 enum expand_modifier modifier, rtx *alt_rtl)
7072 rtx ret, last = NULL;
7074 /* Handle ERROR_MARK before anybody tries to access its type. */
7075 if (TREE_CODE (exp) == ERROR_MARK
7076 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7078 ret = CONST0_RTX (tmode);
7079 return ret ? ret : const0_rtx;
7082 if (flag_non_call_exceptions)
7084 rn = lookup_expr_eh_region (exp);
7086 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7088 last = get_last_insn ();
7091 /* If this is an expression of some kind and it has an associated line
7092 number, then emit the line number before expanding the expression.
7094 We need to save and restore the file and line information so that
7095 errors discovered during expansion are emitted with the right
7096 information. It would be better of the diagnostic routines
7097 used the file/line information embedded in the tree nodes rather
7099 if (cfun && EXPR_HAS_LOCATION (exp))
7101 location_t saved_location = input_location;
7102 input_location = EXPR_LOCATION (exp);
7103 set_curr_insn_source_location (input_location);
7105 /* Record where the insns produced belong. */
7106 set_curr_insn_block (TREE_BLOCK (exp));
7108 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7110 input_location = saved_location;
7114 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7117 /* If using non-call exceptions, mark all insns that may trap.
7118 expand_call() will mark CALL_INSNs before we get to this code,
7119 but it doesn't handle libcalls, and these may trap. */
7123 for (insn = next_real_insn (last); insn;
7124 insn = next_real_insn (insn))
7126 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7127 /* If we want exceptions for non-call insns, any
7128 may_trap_p instruction may throw. */
7129 && GET_CODE (PATTERN (insn)) != CLOBBER
7130 && GET_CODE (PATTERN (insn)) != USE
7131 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7132 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7140 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7141 enum expand_modifier modifier, rtx *alt_rtl)
7143 rtx op0, op1, op2, temp, decl_rtl;
7146 enum machine_mode mode;
7147 enum tree_code code = TREE_CODE (exp);
7149 rtx subtarget, original_target;
7151 tree context, subexp0, subexp1;
7152 bool reduce_bit_field;
7153 gimple subexp0_def, subexp1_def;
7155 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7156 ? reduce_to_bit_field_precision ((expr), \
7161 type = TREE_TYPE (exp);
7162 mode = TYPE_MODE (type);
7163 unsignedp = TYPE_UNSIGNED (type);
7165 ignore = (target == const0_rtx
7166 || ((CONVERT_EXPR_CODE_P (code)
7167 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7168 && TREE_CODE (type) == VOID_TYPE));
7170 /* An operation in what may be a bit-field type needs the
7171 result to be reduced to the precision of the bit-field type,
7172 which is narrower than that of the type's mode. */
7173 reduce_bit_field = (!ignore
7174 && TREE_CODE (type) == INTEGER_TYPE
7175 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7177 /* If we are going to ignore this result, we need only do something
7178 if there is a side-effect somewhere in the expression. If there
7179 is, short-circuit the most common cases here. Note that we must
7180 not call expand_expr with anything but const0_rtx in case this
7181 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7185 if (! TREE_SIDE_EFFECTS (exp))
7188 /* Ensure we reference a volatile object even if value is ignored, but
7189 don't do this if all we are doing is taking its address. */
7190 if (TREE_THIS_VOLATILE (exp)
7191 && TREE_CODE (exp) != FUNCTION_DECL
7192 && mode != VOIDmode && mode != BLKmode
7193 && modifier != EXPAND_CONST_ADDRESS)
7195 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7197 temp = copy_to_reg (temp);
7201 if (TREE_CODE_CLASS (code) == tcc_unary
7202 || code == COMPONENT_REF || code == INDIRECT_REF)
7203 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7206 else if (TREE_CODE_CLASS (code) == tcc_binary
7207 || TREE_CODE_CLASS (code) == tcc_comparison
7208 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7210 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7211 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7214 else if (code == BIT_FIELD_REF)
7216 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7217 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7218 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7225 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7228 /* Use subtarget as the target for operand 0 of a binary operation. */
7229 subtarget = get_subtarget (target);
7230 original_target = target;
7236 tree function = decl_function_context (exp);
7238 temp = label_rtx (exp);
7239 temp = gen_rtx_LABEL_REF (Pmode, temp);
7241 if (function != current_function_decl
7243 LABEL_REF_NONLOCAL_P (temp) = 1;
7245 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7250 /* ??? ivopts calls expander, without any preparation from
7251 out-of-ssa. So fake instructions as if this was an access to the
7252 base variable. This unnecessarily allocates a pseudo, see how we can
7253 reuse it, if partition base vars have it set already. */
7254 if (!currently_expanding_to_rtl)
7255 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
7257 gimple g = get_gimple_for_ssa_name (exp);
7259 return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
7260 tmode, modifier, NULL);
7262 decl_rtl = get_rtx_for_ssa_name (exp);
7263 exp = SSA_NAME_VAR (exp);
7264 goto expand_decl_rtl;
7268 /* If a static var's type was incomplete when the decl was written,
7269 but the type is complete now, lay out the decl now. */
7270 if (DECL_SIZE (exp) == 0
7271 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7272 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7273 layout_decl (exp, 0);
7275 /* TLS emulation hook - replace __thread vars with
7276 *__emutls_get_address (&_emutls.var). */
7277 if (! targetm.have_tls
7278 && TREE_CODE (exp) == VAR_DECL
7279 && DECL_THREAD_LOCAL_P (exp))
7281 exp = build_fold_indirect_ref (emutls_var_address (exp));
7282 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7285 /* ... fall through ... */
7289 decl_rtl = DECL_RTL (exp);
7291 gcc_assert (decl_rtl);
7292 decl_rtl = copy_rtx (decl_rtl);
7294 /* Ensure variable marked as used even if it doesn't go through
7295 a parser. If it hasn't be used yet, write out an external
7297 if (! TREE_USED (exp))
7299 assemble_external (exp);
7300 TREE_USED (exp) = 1;
7303 /* Show we haven't gotten RTL for this yet. */
7306 /* Variables inherited from containing functions should have
7307 been lowered by this point. */
7308 context = decl_function_context (exp);
7309 gcc_assert (!context
7310 || context == current_function_decl
7311 || TREE_STATIC (exp)
7312 /* ??? C++ creates functions that are not TREE_STATIC. */
7313 || TREE_CODE (exp) == FUNCTION_DECL);
7315 /* This is the case of an array whose size is to be determined
7316 from its initializer, while the initializer is still being parsed.
7319 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7320 temp = validize_mem (decl_rtl);
7322 /* If DECL_RTL is memory, we are in the normal case and the
7323 address is not valid, get the address into a register. */
7325 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7328 *alt_rtl = decl_rtl;
7329 decl_rtl = use_anchored_address (decl_rtl);
7330 if (modifier != EXPAND_CONST_ADDRESS
7331 && modifier != EXPAND_SUM
7332 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7333 temp = replace_equiv_address (decl_rtl,
7334 copy_rtx (XEXP (decl_rtl, 0)));
7337 /* If we got something, return it. But first, set the alignment
7338 if the address is a register. */
7341 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7342 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7347 /* If the mode of DECL_RTL does not match that of the decl, it
7348 must be a promoted value. We return a SUBREG of the wanted mode,
7349 but mark it so that we know that it was already extended. */
7351 if (REG_P (decl_rtl)
7352 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7354 enum machine_mode pmode;
7356 /* Get the signedness used for this variable. Ensure we get the
7357 same mode we got when the variable was declared. */
7358 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7359 (TREE_CODE (exp) == RESULT_DECL
7360 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7361 gcc_assert (GET_MODE (decl_rtl) == pmode);
7363 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7364 SUBREG_PROMOTED_VAR_P (temp) = 1;
7365 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7372 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7373 TREE_INT_CST_HIGH (exp), mode);
7379 tree tmp = NULL_TREE;
7380 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7381 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7382 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7383 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7384 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7385 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7386 return const_vector_from_tree (exp);
7387 if (GET_MODE_CLASS (mode) == MODE_INT)
7389 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7391 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7394 tmp = build_constructor_from_list (type,
7395 TREE_VECTOR_CST_ELTS (exp));
7396 return expand_expr (tmp, ignore ? const0_rtx : target,
7401 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7404 /* If optimized, generate immediate CONST_DOUBLE
7405 which will be turned into memory by reload if necessary.
7407 We used to force a register so that loop.c could see it. But
7408 this does not allow gen_* patterns to perform optimizations with
7409 the constants. It also produces two insns in cases like "x = 1.0;".
7410 On most machines, floating-point constants are not permitted in
7411 many insns, so we'd end up copying it to a register in any case.
7413 Now, we do the copying in expand_binop, if appropriate. */
7414 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7415 TYPE_MODE (TREE_TYPE (exp)));
7418 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7419 TYPE_MODE (TREE_TYPE (exp)));
7422 /* Handle evaluating a complex constant in a CONCAT target. */
7423 if (original_target && GET_CODE (original_target) == CONCAT)
7425 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7428 rtarg = XEXP (original_target, 0);
7429 itarg = XEXP (original_target, 1);
7431 /* Move the real and imaginary parts separately. */
7432 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7433 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7436 emit_move_insn (rtarg, op0);
7438 emit_move_insn (itarg, op1);
7440 return original_target;
7443 /* ... fall through ... */
7446 temp = expand_expr_constant (exp, 1, modifier);
7448 /* temp contains a constant address.
7449 On RISC machines where a constant address isn't valid,
7450 make some insns to get that address into a register. */
7451 if (modifier != EXPAND_CONST_ADDRESS
7452 && modifier != EXPAND_INITIALIZER
7453 && modifier != EXPAND_SUM
7454 && ! memory_address_p (mode, XEXP (temp, 0)))
7455 return replace_equiv_address (temp,
7456 copy_rtx (XEXP (temp, 0)));
7461 tree val = TREE_OPERAND (exp, 0);
7462 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7464 if (!SAVE_EXPR_RESOLVED_P (exp))
7466 /* We can indeed still hit this case, typically via builtin
7467 expanders calling save_expr immediately before expanding
7468 something. Assume this means that we only have to deal
7469 with non-BLKmode values. */
7470 gcc_assert (GET_MODE (ret) != BLKmode);
7472 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7473 DECL_ARTIFICIAL (val) = 1;
7474 DECL_IGNORED_P (val) = 1;
7475 TREE_OPERAND (exp, 0) = val;
7476 SAVE_EXPR_RESOLVED_P (exp) = 1;
7478 if (!CONSTANT_P (ret))
7479 ret = copy_to_reg (ret);
7480 SET_DECL_RTL (val, ret);
7487 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7488 expand_goto (TREE_OPERAND (exp, 0));
7490 expand_computed_goto (TREE_OPERAND (exp, 0));
7494 /* If we don't need the result, just ensure we evaluate any
7498 unsigned HOST_WIDE_INT idx;
7501 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7502 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7507 return expand_constructor (exp, target, modifier, false);
7509 case MISALIGNED_INDIRECT_REF:
7510 case ALIGN_INDIRECT_REF:
7513 tree exp1 = TREE_OPERAND (exp, 0);
7515 if (modifier != EXPAND_WRITE)
7519 t = fold_read_from_constant_string (exp);
7521 return expand_expr (t, target, tmode, modifier);
7524 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7525 op0 = memory_address (mode, op0);
7527 if (code == ALIGN_INDIRECT_REF)
7529 int align = TYPE_ALIGN_UNIT (type);
7530 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7531 op0 = memory_address (mode, op0);
7534 temp = gen_rtx_MEM (mode, op0);
7536 set_mem_attributes (temp, exp, 0);
7538 /* Resolve the misalignment now, so that we don't have to remember
7539 to resolve it later. Of course, this only works for reads. */
7540 /* ??? When we get around to supporting writes, we'll have to handle
7541 this in store_expr directly. The vectorizer isn't generating
7542 those yet, however. */
7543 if (code == MISALIGNED_INDIRECT_REF)
7548 gcc_assert (modifier == EXPAND_NORMAL
7549 || modifier == EXPAND_STACK_PARM);
7551 /* The vectorizer should have already checked the mode. */
7552 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7553 gcc_assert (icode != CODE_FOR_nothing);
7555 /* We've already validated the memory, and we're creating a
7556 new pseudo destination. The predicates really can't fail. */
7557 reg = gen_reg_rtx (mode);
7559 /* Nor can the insn generator. */
7560 insn = GEN_FCN (icode) (reg, temp);
7569 case TARGET_MEM_REF:
7571 struct mem_address addr;
7573 get_address_description (exp, &addr);
7574 op0 = addr_for_mem_ref (&addr, true);
7575 op0 = memory_address (mode, op0);
7576 temp = gen_rtx_MEM (mode, op0);
7577 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7584 tree array = TREE_OPERAND (exp, 0);
7585 tree index = TREE_OPERAND (exp, 1);
7587 /* Fold an expression like: "foo"[2].
7588 This is not done in fold so it won't happen inside &.
7589 Don't fold if this is for wide characters since it's too
7590 difficult to do correctly and this is a very rare case. */
7592 if (modifier != EXPAND_CONST_ADDRESS
7593 && modifier != EXPAND_INITIALIZER
7594 && modifier != EXPAND_MEMORY)
7596 tree t = fold_read_from_constant_string (exp);
7599 return expand_expr (t, target, tmode, modifier);
7602 /* If this is a constant index into a constant array,
7603 just get the value from the array. Handle both the cases when
7604 we have an explicit constructor and when our operand is a variable
7605 that was declared const. */
7607 if (modifier != EXPAND_CONST_ADDRESS
7608 && modifier != EXPAND_INITIALIZER
7609 && modifier != EXPAND_MEMORY
7610 && TREE_CODE (array) == CONSTRUCTOR
7611 && ! TREE_SIDE_EFFECTS (array)
7612 && TREE_CODE (index) == INTEGER_CST)
7614 unsigned HOST_WIDE_INT ix;
7617 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7619 if (tree_int_cst_equal (field, index))
7621 if (!TREE_SIDE_EFFECTS (value))
7622 return expand_expr (fold (value), target, tmode, modifier);
7627 else if (optimize >= 1
7628 && modifier != EXPAND_CONST_ADDRESS
7629 && modifier != EXPAND_INITIALIZER
7630 && modifier != EXPAND_MEMORY
7631 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7632 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7633 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7634 && targetm.binds_local_p (array))
7636 if (TREE_CODE (index) == INTEGER_CST)
7638 tree init = DECL_INITIAL (array);
7640 if (TREE_CODE (init) == CONSTRUCTOR)
7642 unsigned HOST_WIDE_INT ix;
7645 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7647 if (tree_int_cst_equal (field, index))
7649 if (TREE_SIDE_EFFECTS (value))
7652 if (TREE_CODE (value) == CONSTRUCTOR)
7654 /* If VALUE is a CONSTRUCTOR, this
7655 optimization is only useful if
7656 this doesn't store the CONSTRUCTOR
7657 into memory. If it does, it is more
7658 efficient to just load the data from
7659 the array directly. */
7660 rtx ret = expand_constructor (value, target,
7662 if (ret == NULL_RTX)
7666 return expand_expr (fold (value), target, tmode,
7670 else if(TREE_CODE (init) == STRING_CST)
7672 tree index1 = index;
7673 tree low_bound = array_ref_low_bound (exp);
7674 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7676 /* Optimize the special-case of a zero lower bound.
7678 We convert the low_bound to sizetype to avoid some problems
7679 with constant folding. (E.g. suppose the lower bound is 1,
7680 and its mode is QI. Without the conversion,l (ARRAY
7681 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7682 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7684 if (! integer_zerop (low_bound))
7685 index1 = size_diffop (index1, fold_convert (sizetype,
7688 if (0 > compare_tree_int (index1,
7689 TREE_STRING_LENGTH (init)))
7691 tree type = TREE_TYPE (TREE_TYPE (init));
7692 enum machine_mode mode = TYPE_MODE (type);
7694 if (GET_MODE_CLASS (mode) == MODE_INT
7695 && GET_MODE_SIZE (mode) == 1)
7696 return gen_int_mode (TREE_STRING_POINTER (init)
7697 [TREE_INT_CST_LOW (index1)],
7704 goto normal_inner_ref;
7707 /* If the operand is a CONSTRUCTOR, we can just extract the
7708 appropriate field if it is present. */
7709 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7711 unsigned HOST_WIDE_INT idx;
7714 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7716 if (field == TREE_OPERAND (exp, 1)
7717 /* We can normally use the value of the field in the
7718 CONSTRUCTOR. However, if this is a bitfield in
7719 an integral mode that we can fit in a HOST_WIDE_INT,
7720 we must mask only the number of bits in the bitfield,
7721 since this is done implicitly by the constructor. If
7722 the bitfield does not meet either of those conditions,
7723 we can't do this optimization. */
7724 && (! DECL_BIT_FIELD (field)
7725 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7726 && (GET_MODE_BITSIZE (DECL_MODE (field))
7727 <= HOST_BITS_PER_WIDE_INT))))
7729 if (DECL_BIT_FIELD (field)
7730 && modifier == EXPAND_STACK_PARM)
7732 op0 = expand_expr (value, target, tmode, modifier);
7733 if (DECL_BIT_FIELD (field))
7735 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7736 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7738 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7740 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7741 op0 = expand_and (imode, op0, op1, target);
7746 = build_int_cst (NULL_TREE,
7747 GET_MODE_BITSIZE (imode) - bitsize);
7749 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7751 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7759 goto normal_inner_ref;
7762 case ARRAY_RANGE_REF:
7765 enum machine_mode mode1, mode2;
7766 HOST_WIDE_INT bitsize, bitpos;
7768 int volatilep = 0, must_force_mem;
7769 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7770 &mode1, &unsignedp, &volatilep, true);
7771 rtx orig_op0, memloc;
7773 /* If we got back the original object, something is wrong. Perhaps
7774 we are evaluating an expression too early. In any event, don't
7775 infinitely recurse. */
7776 gcc_assert (tem != exp);
7778 /* If TEM's type is a union of variable size, pass TARGET to the inner
7779 computation, since it will need a temporary and TARGET is known
7780 to have to do. This occurs in unchecked conversion in Ada. */
7783 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7784 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7786 && modifier != EXPAND_STACK_PARM
7787 ? target : NULL_RTX),
7789 (modifier == EXPAND_INITIALIZER
7790 || modifier == EXPAND_CONST_ADDRESS
7791 || modifier == EXPAND_STACK_PARM)
7792 ? modifier : EXPAND_NORMAL);
7795 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
7797 /* If we have either an offset, a BLKmode result, or a reference
7798 outside the underlying object, we must force it to memory.
7799 Such a case can occur in Ada if we have unchecked conversion
7800 of an expression from a scalar type to an aggregate type or
7801 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
7802 passed a partially uninitialized object or a view-conversion
7803 to a larger size. */
7804 must_force_mem = (offset
7806 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
7808 /* If this is a constant, put it in a register if it is a legitimate
7809 constant and we don't need a memory reference. */
7810 if (CONSTANT_P (op0)
7812 && LEGITIMATE_CONSTANT_P (op0)
7814 op0 = force_reg (mode2, op0);
7816 /* Otherwise, if this is a constant, try to force it to the constant
7817 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
7818 is a legitimate constant. */
7819 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
7820 op0 = validize_mem (memloc);
7822 /* Otherwise, if this is a constant or the object is not in memory
7823 and need be, put it there. */
7824 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
7826 tree nt = build_qualified_type (TREE_TYPE (tem),
7827 (TYPE_QUALS (TREE_TYPE (tem))
7828 | TYPE_QUAL_CONST));
7829 memloc = assign_temp (nt, 1, 1, 1);
7830 emit_move_insn (memloc, op0);
7836 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7839 gcc_assert (MEM_P (op0));
7841 #ifdef POINTERS_EXTEND_UNSIGNED
7842 if (GET_MODE (offset_rtx) != Pmode)
7843 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7845 if (GET_MODE (offset_rtx) != ptr_mode)
7846 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7849 if (GET_MODE (op0) == BLKmode
7850 /* A constant address in OP0 can have VOIDmode, we must
7851 not try to call force_reg in that case. */
7852 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7854 && (bitpos % bitsize) == 0
7855 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7856 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7858 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7862 op0 = offset_address (op0, offset_rtx,
7863 highest_pow2_factor (offset));
7866 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7867 record its alignment as BIGGEST_ALIGNMENT. */
7868 if (MEM_P (op0) && bitpos == 0 && offset != 0
7869 && is_aligning_offset (offset, tem))
7870 set_mem_align (op0, BIGGEST_ALIGNMENT);
7872 /* Don't forget about volatility even if this is a bitfield. */
7873 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7875 if (op0 == orig_op0)
7876 op0 = copy_rtx (op0);
7878 MEM_VOLATILE_P (op0) = 1;
7881 /* The following code doesn't handle CONCAT.
7882 Assume only bitpos == 0 can be used for CONCAT, due to
7883 one element arrays having the same mode as its element. */
7884 if (GET_CODE (op0) == CONCAT)
7886 gcc_assert (bitpos == 0
7887 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7891 /* In cases where an aligned union has an unaligned object
7892 as a field, we might be extracting a BLKmode value from
7893 an integer-mode (e.g., SImode) object. Handle this case
7894 by doing the extract into an object as wide as the field
7895 (which we know to be the width of a basic mode), then
7896 storing into memory, and changing the mode to BLKmode. */
7897 if (mode1 == VOIDmode
7898 || REG_P (op0) || GET_CODE (op0) == SUBREG
7899 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7900 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7901 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7902 && modifier != EXPAND_CONST_ADDRESS
7903 && modifier != EXPAND_INITIALIZER)
7904 /* If the field isn't aligned enough to fetch as a memref,
7905 fetch it as a bit field. */
7906 || (mode1 != BLKmode
7907 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7908 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7910 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7911 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7912 && ((modifier == EXPAND_CONST_ADDRESS
7913 || modifier == EXPAND_INITIALIZER)
7915 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7916 || (bitpos % BITS_PER_UNIT != 0)))
7917 /* If the type and the field are a constant size and the
7918 size of the type isn't the same size as the bitfield,
7919 we must use bitfield operations. */
7921 && TYPE_SIZE (TREE_TYPE (exp))
7922 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7923 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7926 enum machine_mode ext_mode = mode;
7928 if (ext_mode == BLKmode
7929 && ! (target != 0 && MEM_P (op0)
7931 && bitpos % BITS_PER_UNIT == 0))
7932 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7934 if (ext_mode == BLKmode)
7937 target = assign_temp (type, 0, 1, 1);
7942 /* In this case, BITPOS must start at a byte boundary and
7943 TARGET, if specified, must be a MEM. */
7944 gcc_assert (MEM_P (op0)
7945 && (!target || MEM_P (target))
7946 && !(bitpos % BITS_PER_UNIT));
7948 emit_block_move (target,
7949 adjust_address (op0, VOIDmode,
7950 bitpos / BITS_PER_UNIT),
7951 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7953 (modifier == EXPAND_STACK_PARM
7954 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7959 op0 = validize_mem (op0);
7961 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7962 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7964 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7965 (modifier == EXPAND_STACK_PARM
7966 ? NULL_RTX : target),
7967 ext_mode, ext_mode);
7969 /* If the result is a record type and BITSIZE is narrower than
7970 the mode of OP0, an integral mode, and this is a big endian
7971 machine, we must put the field into the high-order bits. */
7972 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7973 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7974 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7975 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7976 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7980 /* If the result type is BLKmode, store the data into a temporary
7981 of the appropriate type, but with the mode corresponding to the
7982 mode for the data we have (op0's mode). It's tempting to make
7983 this a constant type, since we know it's only being stored once,
7984 but that can cause problems if we are taking the address of this
7985 COMPONENT_REF because the MEM of any reference via that address
7986 will have flags corresponding to the type, which will not
7987 necessarily be constant. */
7988 if (mode == BLKmode)
7990 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7993 /* If the reference doesn't use the alias set of its type,
7994 we cannot create the temporary using that type. */
7995 if (component_uses_parent_alias_set (exp))
7997 new_rtx = assign_stack_local (ext_mode, size, 0);
7998 set_mem_alias_set (new_rtx, get_alias_set (exp));
8001 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
8003 emit_move_insn (new_rtx, op0);
8004 op0 = copy_rtx (new_rtx);
8005 PUT_MODE (op0, BLKmode);
8006 set_mem_attributes (op0, exp, 1);
8012 /* If the result is BLKmode, use that to access the object
8014 if (mode == BLKmode)
8017 /* Get a reference to just this component. */
8018 if (modifier == EXPAND_CONST_ADDRESS
8019 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8020 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
8022 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
8024 if (op0 == orig_op0)
8025 op0 = copy_rtx (op0);
8027 set_mem_attributes (op0, exp, 0);
8028 if (REG_P (XEXP (op0, 0)))
8029 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8031 MEM_VOLATILE_P (op0) |= volatilep;
8032 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8033 || modifier == EXPAND_CONST_ADDRESS
8034 || modifier == EXPAND_INITIALIZER)
8036 else if (target == 0)
8037 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8039 convert_move (target, op0, unsignedp);
8044 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8047 /* All valid uses of __builtin_va_arg_pack () are removed during
8049 if (CALL_EXPR_VA_ARG_PACK (exp))
8050 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8052 tree fndecl = get_callee_fndecl (exp), attr;
8055 && (attr = lookup_attribute ("error",
8056 DECL_ATTRIBUTES (fndecl))) != NULL)
8057 error ("%Kcall to %qs declared with attribute error: %s",
8058 exp, lang_hooks.decl_printable_name (fndecl, 1),
8059 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8061 && (attr = lookup_attribute ("warning",
8062 DECL_ATTRIBUTES (fndecl))) != NULL)
8063 warning_at (tree_nonartificial_location (exp),
8064 0, "%Kcall to %qs declared with attribute warning: %s",
8065 exp, lang_hooks.decl_printable_name (fndecl, 1),
8066 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8068 /* Check for a built-in function. */
8069 if (fndecl && DECL_BUILT_IN (fndecl))
8071 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
8072 return expand_builtin (exp, target, subtarget, tmode, ignore);
8075 return expand_call (exp, target, ignore);
8079 if (TREE_OPERAND (exp, 0) == error_mark_node)
8082 if (TREE_CODE (type) == UNION_TYPE)
8084 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8086 /* If both input and output are BLKmode, this conversion isn't doing
8087 anything except possibly changing memory attribute. */
8088 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8090 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8093 result = copy_rtx (result);
8094 set_mem_attributes (result, exp, 0);
8100 if (TYPE_MODE (type) != BLKmode)
8101 target = gen_reg_rtx (TYPE_MODE (type));
8103 target = assign_temp (type, 0, 1, 1);
8107 /* Store data into beginning of memory target. */
8108 store_expr (TREE_OPERAND (exp, 0),
8109 adjust_address (target, TYPE_MODE (valtype), 0),
8110 modifier == EXPAND_STACK_PARM,
8115 gcc_assert (REG_P (target));
8117 /* Store this field into a union of the proper type. */
8118 store_field (target,
8119 MIN ((int_size_in_bytes (TREE_TYPE
8120 (TREE_OPERAND (exp, 0)))
8122 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8123 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8127 /* Return the entire union. */
8131 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8136 /* If the signedness of the conversion differs and OP0 is
8137 a promoted SUBREG, clear that indication since we now
8138 have to do the proper extension. */
8139 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8140 && GET_CODE (op0) == SUBREG)
8141 SUBREG_PROMOTED_VAR_P (op0) = 0;
8143 return REDUCE_BIT_FIELD (op0);
8146 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8147 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8148 if (GET_MODE (op0) == mode)
8151 /* If OP0 is a constant, just convert it into the proper mode. */
8152 else if (CONSTANT_P (op0))
8154 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8155 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8157 if (modifier == EXPAND_INITIALIZER)
8158 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8159 subreg_lowpart_offset (mode,
8162 op0= convert_modes (mode, inner_mode, op0,
8163 TYPE_UNSIGNED (inner_type));
8166 else if (modifier == EXPAND_INITIALIZER)
8167 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8169 else if (target == 0)
8170 op0 = convert_to_mode (mode, op0,
8171 TYPE_UNSIGNED (TREE_TYPE
8172 (TREE_OPERAND (exp, 0))));
8175 convert_move (target, op0,
8176 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8180 return REDUCE_BIT_FIELD (op0);
8182 case VIEW_CONVERT_EXPR:
8185 /* If we are converting to BLKmode, try to avoid an intermediate
8186 temporary by fetching an inner memory reference. */
8188 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
8189 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode
8190 && handled_component_p (TREE_OPERAND (exp, 0)))
8192 enum machine_mode mode1;
8193 HOST_WIDE_INT bitsize, bitpos;
8198 = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos,
8199 &offset, &mode1, &unsignedp, &volatilep,
8203 /* ??? We should work harder and deal with non-zero offsets. */
8205 && (bitpos % BITS_PER_UNIT) == 0
8207 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
8209 /* See the normal_inner_ref case for the rationale. */
8212 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8213 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8215 && modifier != EXPAND_STACK_PARM
8216 ? target : NULL_RTX),
8218 (modifier == EXPAND_INITIALIZER
8219 || modifier == EXPAND_CONST_ADDRESS
8220 || modifier == EXPAND_STACK_PARM)
8221 ? modifier : EXPAND_NORMAL);
8223 if (MEM_P (orig_op0))
8227 /* Get a reference to just this component. */
8228 if (modifier == EXPAND_CONST_ADDRESS
8229 || modifier == EXPAND_SUM
8230 || modifier == EXPAND_INITIALIZER)
8231 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
8233 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
8235 if (op0 == orig_op0)
8236 op0 = copy_rtx (op0);
8238 set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0);
8239 if (REG_P (XEXP (op0, 0)))
8240 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8242 MEM_VOLATILE_P (op0) |= volatilep;
8248 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8250 /* If the input and output modes are both the same, we are done. */
8251 if (mode == GET_MODE (op0))
8253 /* If neither mode is BLKmode, and both modes are the same size
8254 then we can use gen_lowpart. */
8255 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
8256 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)))
8258 if (GET_CODE (op0) == SUBREG)
8259 op0 = force_reg (GET_MODE (op0), op0);
8260 op0 = gen_lowpart (mode, op0);
8262 /* If both modes are integral, then we can convert from one to the
8264 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
8265 op0 = convert_modes (mode, GET_MODE (op0), op0,
8266 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8267 /* As a last resort, spill op0 to memory, and reload it in a
8269 else if (!MEM_P (op0))
8271 /* If the operand is not a MEM, force it into memory. Since we
8272 are going to be changing the mode of the MEM, don't call
8273 force_const_mem for constants because we don't allow pool
8274 constants to change mode. */
8275 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8277 gcc_assert (!TREE_ADDRESSABLE (exp));
8279 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8281 = assign_stack_temp_for_type
8282 (TYPE_MODE (inner_type),
8283 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8285 emit_move_insn (target, op0);
8289 /* At this point, OP0 is in the correct mode. If the output type is
8290 such that the operand is known to be aligned, indicate that it is.
8291 Otherwise, we need only be concerned about alignment for non-BLKmode
8295 op0 = copy_rtx (op0);
8297 if (TYPE_ALIGN_OK (type))
8298 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8299 else if (STRICT_ALIGNMENT
8301 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
8303 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8304 HOST_WIDE_INT temp_size
8305 = MAX (int_size_in_bytes (inner_type),
8306 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
8308 = assign_stack_temp_for_type (mode, temp_size, 0, type);
8309 rtx new_with_op0_mode
8310 = adjust_address (new_rtx, GET_MODE (op0), 0);
8312 gcc_assert (!TREE_ADDRESSABLE (exp));
8314 if (GET_MODE (op0) == BLKmode)
8315 emit_block_move (new_with_op0_mode, op0,
8316 GEN_INT (GET_MODE_SIZE (mode)),
8317 (modifier == EXPAND_STACK_PARM
8318 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8320 emit_move_insn (new_with_op0_mode, op0);
8325 op0 = adjust_address (op0, mode, 0);
8330 case POINTER_PLUS_EXPR:
8331 /* Even though the sizetype mode and the pointer's mode can be different
8332 expand is able to handle this correctly and get the correct result out
8333 of the PLUS_EXPR code. */
8334 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8335 if sizetype precision is smaller than pointer precision. */
8336 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8337 exp = build2 (PLUS_EXPR, type,
8338 TREE_OPERAND (exp, 0),
8340 fold_convert (ssizetype,
8341 TREE_OPERAND (exp, 1))));
8344 /* Check if this is a case for multiplication and addition. */
8345 if ((TREE_CODE (type) == INTEGER_TYPE
8346 || TREE_CODE (type) == FIXED_POINT_TYPE)
8347 && (subexp0_def = get_def_for_expr (TREE_OPERAND (exp, 0),
8350 tree subsubexp0, subsubexp1;
8351 gimple subsubexp0_def, subsubexp1_def;
8352 enum tree_code this_code;
8354 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8355 : FIXED_CONVERT_EXPR;
8356 subsubexp0 = gimple_assign_rhs1 (subexp0_def);
8357 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
8358 subsubexp1 = gimple_assign_rhs2 (subexp0_def);
8359 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
8360 if (subsubexp0_def && subsubexp1_def
8361 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
8362 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
8363 && (TYPE_PRECISION (TREE_TYPE (top0))
8364 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8365 && (TYPE_PRECISION (TREE_TYPE (top0))
8366 == TYPE_PRECISION (TREE_TYPE (top1)))
8367 && (TYPE_UNSIGNED (TREE_TYPE (top0))
8368 == TYPE_UNSIGNED (TREE_TYPE (top1))))
8370 tree op0type = TREE_TYPE (top0);
8371 enum machine_mode innermode = TYPE_MODE (op0type);
8372 bool zextend_p = TYPE_UNSIGNED (op0type);
8373 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8375 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8377 this_optab = zextend_p ? usmadd_widen_optab
8378 : ssmadd_widen_optab;
8379 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8380 && (optab_handler (this_optab, mode)->insn_code
8381 != CODE_FOR_nothing))
8383 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
8385 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8386 VOIDmode, EXPAND_NORMAL);
8387 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8390 return REDUCE_BIT_FIELD (temp);
8395 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8396 something else, make sure we add the register to the constant and
8397 then to the other thing. This case can occur during strength
8398 reduction and doing it this way will produce better code if the
8399 frame pointer or argument pointer is eliminated.
8401 fold-const.c will ensure that the constant is always in the inner
8402 PLUS_EXPR, so the only case we need to do anything about is if
8403 sp, ap, or fp is our second argument, in which case we must swap
8404 the innermost first argument and our second argument. */
8406 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8407 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8408 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8409 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8410 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8411 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8413 tree t = TREE_OPERAND (exp, 1);
8415 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8416 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8419 /* If the result is to be ptr_mode and we are adding an integer to
8420 something, we might be forming a constant. So try to use
8421 plus_constant. If it produces a sum and we can't accept it,
8422 use force_operand. This allows P = &ARR[const] to generate
8423 efficient code on machines where a SYMBOL_REF is not a valid
8426 If this is an EXPAND_SUM call, always return the sum. */
8427 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8428 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8430 if (modifier == EXPAND_STACK_PARM)
8432 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8433 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8434 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8438 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8440 /* Use immed_double_const to ensure that the constant is
8441 truncated according to the mode of OP1, then sign extended
8442 to a HOST_WIDE_INT. Using the constant directly can result
8443 in non-canonical RTL in a 64x32 cross compile. */
8445 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8447 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8448 op1 = plus_constant (op1, INTVAL (constant_part));
8449 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8450 op1 = force_operand (op1, target);
8451 return REDUCE_BIT_FIELD (op1);
8454 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8455 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8456 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8460 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8461 (modifier == EXPAND_INITIALIZER
8462 ? EXPAND_INITIALIZER : EXPAND_SUM));
8463 if (! CONSTANT_P (op0))
8465 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8466 VOIDmode, modifier);
8467 /* Return a PLUS if modifier says it's OK. */
8468 if (modifier == EXPAND_SUM
8469 || modifier == EXPAND_INITIALIZER)
8470 return simplify_gen_binary (PLUS, mode, op0, op1);
8473 /* Use immed_double_const to ensure that the constant is
8474 truncated according to the mode of OP1, then sign extended
8475 to a HOST_WIDE_INT. Using the constant directly can result
8476 in non-canonical RTL in a 64x32 cross compile. */
8478 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8480 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8481 op0 = plus_constant (op0, INTVAL (constant_part));
8482 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8483 op0 = force_operand (op0, target);
8484 return REDUCE_BIT_FIELD (op0);
8488 /* No sense saving up arithmetic to be done
8489 if it's all in the wrong mode to form part of an address.
8490 And force_operand won't know whether to sign-extend or
8492 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8493 || mode != ptr_mode)
8495 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8496 subtarget, &op0, &op1, EXPAND_NORMAL);
8497 if (op0 == const0_rtx)
8499 if (op1 == const0_rtx)
8504 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8505 subtarget, &op0, &op1, modifier);
8506 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8509 /* Check if this is a case for multiplication and subtraction. */
8510 if ((TREE_CODE (type) == INTEGER_TYPE
8511 || TREE_CODE (type) == FIXED_POINT_TYPE)
8512 && (subexp1_def = get_def_for_expr (TREE_OPERAND (exp, 1),
8515 tree subsubexp0, subsubexp1;
8516 gimple subsubexp0_def, subsubexp1_def;
8517 enum tree_code this_code;
8519 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8520 : FIXED_CONVERT_EXPR;
8521 subsubexp0 = gimple_assign_rhs1 (subexp1_def);
8522 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
8523 subsubexp1 = gimple_assign_rhs2 (subexp1_def);
8524 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
8525 if (subsubexp0_def && subsubexp1_def
8526 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
8527 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
8528 && (TYPE_PRECISION (TREE_TYPE (top0))
8529 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8530 && (TYPE_PRECISION (TREE_TYPE (top0))
8531 == TYPE_PRECISION (TREE_TYPE (top1)))
8532 && (TYPE_UNSIGNED (TREE_TYPE (top0))
8533 == TYPE_UNSIGNED (TREE_TYPE (top1))))
8535 tree op0type = TREE_TYPE (top0);
8536 enum machine_mode innermode = TYPE_MODE (op0type);
8537 bool zextend_p = TYPE_UNSIGNED (op0type);
8538 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8540 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8542 this_optab = zextend_p ? usmsub_widen_optab
8543 : ssmsub_widen_optab;
8544 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8545 && (optab_handler (this_optab, mode)->insn_code
8546 != CODE_FOR_nothing))
8548 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
8550 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8551 VOIDmode, EXPAND_NORMAL);
8552 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8555 return REDUCE_BIT_FIELD (temp);
8560 /* For initializers, we are allowed to return a MINUS of two
8561 symbolic constants. Here we handle all cases when both operands
8563 /* Handle difference of two symbolic constants,
8564 for the sake of an initializer. */
8565 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8566 && really_constant_p (TREE_OPERAND (exp, 0))
8567 && really_constant_p (TREE_OPERAND (exp, 1)))
8569 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8570 NULL_RTX, &op0, &op1, modifier);
8572 /* If the last operand is a CONST_INT, use plus_constant of
8573 the negated constant. Else make the MINUS. */
8574 if (GET_CODE (op1) == CONST_INT)
8575 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8577 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8580 /* No sense saving up arithmetic to be done
8581 if it's all in the wrong mode to form part of an address.
8582 And force_operand won't know whether to sign-extend or
8584 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8585 || mode != ptr_mode)
8588 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8589 subtarget, &op0, &op1, modifier);
8591 /* Convert A - const to A + (-const). */
8592 if (GET_CODE (op1) == CONST_INT)
8594 op1 = negate_rtx (mode, op1);
8595 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8601 /* If this is a fixed-point operation, then we cannot use the code
8602 below because "expand_mult" doesn't support sat/no-sat fixed-point
8604 if (ALL_FIXED_POINT_MODE_P (mode))
8607 /* If first operand is constant, swap them.
8608 Thus the following special case checks need only
8609 check the second operand. */
8610 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8612 tree t1 = TREE_OPERAND (exp, 0);
8613 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8614 TREE_OPERAND (exp, 1) = t1;
8617 /* Attempt to return something suitable for generating an
8618 indexed address, for machines that support that. */
8620 if (modifier == EXPAND_SUM && mode == ptr_mode
8621 && host_integerp (TREE_OPERAND (exp, 1), 0))
8623 tree exp1 = TREE_OPERAND (exp, 1);
8625 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8629 op0 = force_operand (op0, NULL_RTX);
8631 op0 = copy_to_mode_reg (mode, op0);
8633 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8634 gen_int_mode (tree_low_cst (exp1, 0),
8635 TYPE_MODE (TREE_TYPE (exp1)))));
8638 if (modifier == EXPAND_STACK_PARM)
8641 /* Check for multiplying things that have been extended
8642 from a narrower type. If this machine supports multiplying
8643 in that narrower type with a result in the desired type,
8644 do it that way, and avoid the explicit type-conversion. */
8646 subexp0 = TREE_OPERAND (exp, 0);
8647 subexp1 = TREE_OPERAND (exp, 1);
8648 subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
8649 subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
8650 top0 = top1 = NULL_TREE;
8652 /* First, check if we have a multiplication of one signed and one
8653 unsigned operand. */
8655 && (top0 = gimple_assign_rhs1 (subexp0_def))
8657 && (top1 = gimple_assign_rhs1 (subexp1_def))
8658 && TREE_CODE (type) == INTEGER_TYPE
8659 && (TYPE_PRECISION (TREE_TYPE (top0))
8660 < TYPE_PRECISION (TREE_TYPE (subexp0)))
8661 && (TYPE_PRECISION (TREE_TYPE (top0))
8662 == TYPE_PRECISION (TREE_TYPE (top1)))
8663 && (TYPE_UNSIGNED (TREE_TYPE (top0))
8664 != TYPE_UNSIGNED (TREE_TYPE (top1))))
8666 enum machine_mode innermode
8667 = TYPE_MODE (TREE_TYPE (top0));
8668 this_optab = usmul_widen_optab;
8669 if (mode == GET_MODE_WIDER_MODE (innermode))
8671 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8673 if (TYPE_UNSIGNED (TREE_TYPE (top0)))
8674 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
8677 expand_operands (top0, top1, NULL_RTX, &op1, &op0,
8684 /* Check for a multiplication with matching signedness. If
8685 valid, TOP0 and TOP1 were set in the previous if
8688 && TREE_CODE (type) == INTEGER_TYPE
8689 && (TYPE_PRECISION (TREE_TYPE (top0))
8690 < TYPE_PRECISION (TREE_TYPE (subexp0)))
8691 && ((TREE_CODE (subexp1) == INTEGER_CST
8692 && int_fits_type_p (subexp1, TREE_TYPE (top0))
8693 /* Don't use a widening multiply if a shift will do. */
8694 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
8695 > HOST_BITS_PER_WIDE_INT)
8696 || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
8699 && (TYPE_PRECISION (TREE_TYPE (top1))
8700 == TYPE_PRECISION (TREE_TYPE (top0))
8701 /* If both operands are extended, they must either both
8702 be zero-extended or both be sign-extended. */
8703 && (TYPE_UNSIGNED (TREE_TYPE (top1))
8704 == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
8706 tree op0type = TREE_TYPE (top0);
8707 enum machine_mode innermode = TYPE_MODE (op0type);
8708 bool zextend_p = TYPE_UNSIGNED (op0type);
8709 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8710 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8712 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8714 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8716 if (TREE_CODE (subexp1) == INTEGER_CST)
8717 expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
8720 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
8724 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8725 && innermode == word_mode)
8728 op0 = expand_normal (top0);
8729 if (TREE_CODE (subexp1) == INTEGER_CST)
8730 op1 = convert_modes (innermode, mode,
8731 expand_normal (subexp1), unsignedp);
8733 op1 = expand_normal (top1);
8734 temp = expand_binop (mode, other_optab, op0, op1, target,
8735 unsignedp, OPTAB_LIB_WIDEN);
8736 hipart = gen_highpart (innermode, temp);
8737 htem = expand_mult_highpart_adjust (innermode, hipart,
8741 emit_move_insn (hipart, htem);
8742 return REDUCE_BIT_FIELD (temp);
8746 expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
8747 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8749 case TRUNC_DIV_EXPR:
8750 case FLOOR_DIV_EXPR:
8752 case ROUND_DIV_EXPR:
8753 case EXACT_DIV_EXPR:
8754 /* If this is a fixed-point operation, then we cannot use the code
8755 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8757 if (ALL_FIXED_POINT_MODE_P (mode))
8760 if (modifier == EXPAND_STACK_PARM)
8762 /* Possible optimization: compute the dividend with EXPAND_SUM
8763 then if the divisor is constant can optimize the case
8764 where some terms of the dividend have coeffs divisible by it. */
8765 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8766 subtarget, &op0, &op1, EXPAND_NORMAL);
8767 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8772 case TRUNC_MOD_EXPR:
8773 case FLOOR_MOD_EXPR:
8775 case ROUND_MOD_EXPR:
8776 if (modifier == EXPAND_STACK_PARM)
8778 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8779 subtarget, &op0, &op1, EXPAND_NORMAL);
8780 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8782 case FIXED_CONVERT_EXPR:
8783 op0 = expand_normal (TREE_OPERAND (exp, 0));
8784 if (target == 0 || modifier == EXPAND_STACK_PARM)
8785 target = gen_reg_rtx (mode);
8787 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8788 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8789 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8790 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8792 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8795 case FIX_TRUNC_EXPR:
8796 op0 = expand_normal (TREE_OPERAND (exp, 0));
8797 if (target == 0 || modifier == EXPAND_STACK_PARM)
8798 target = gen_reg_rtx (mode);
8799 expand_fix (target, op0, unsignedp);
8803 op0 = expand_normal (TREE_OPERAND (exp, 0));
8804 if (target == 0 || modifier == EXPAND_STACK_PARM)
8805 target = gen_reg_rtx (mode);
8806 /* expand_float can't figure out what to do if FROM has VOIDmode.
8807 So give it the correct mode. With -O, cse will optimize this. */
8808 if (GET_MODE (op0) == VOIDmode)
8809 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8811 expand_float (target, op0,
8812 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8816 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8817 VOIDmode, EXPAND_NORMAL);
8818 if (modifier == EXPAND_STACK_PARM)
8820 temp = expand_unop (mode,
8821 optab_for_tree_code (NEGATE_EXPR, type,
8825 return REDUCE_BIT_FIELD (temp);
8828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8829 VOIDmode, EXPAND_NORMAL);
8830 if (modifier == EXPAND_STACK_PARM)
8833 /* ABS_EXPR is not valid for complex arguments. */
8834 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8835 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8837 /* Unsigned abs is simply the operand. Testing here means we don't
8838 risk generating incorrect code below. */
8839 if (TYPE_UNSIGNED (type))
8842 return expand_abs (mode, op0, target, unsignedp,
8843 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8847 target = original_target;
8849 || modifier == EXPAND_STACK_PARM
8850 || (MEM_P (target) && MEM_VOLATILE_P (target))
8851 || GET_MODE (target) != mode
8853 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8854 target = gen_reg_rtx (mode);
8855 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8856 target, &op0, &op1, EXPAND_NORMAL);
8858 /* First try to do it with a special MIN or MAX instruction.
8859 If that does not win, use a conditional jump to select the proper
8861 this_optab = optab_for_tree_code (code, type, optab_default);
8862 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8867 /* At this point, a MEM target is no longer useful; we will get better
8870 if (! REG_P (target))
8871 target = gen_reg_rtx (mode);
8873 /* If op1 was placed in target, swap op0 and op1. */
8874 if (target != op0 && target == op1)
8881 /* We generate better code and avoid problems with op1 mentioning
8882 target by forcing op1 into a pseudo if it isn't a constant. */
8883 if (! CONSTANT_P (op1))
8884 op1 = force_reg (mode, op1);
8887 enum rtx_code comparison_code;
8890 if (code == MAX_EXPR)
8891 comparison_code = unsignedp ? GEU : GE;
8893 comparison_code = unsignedp ? LEU : LE;
8895 /* Canonicalize to comparisons against 0. */
8896 if (op1 == const1_rtx)
8898 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8899 or (a != 0 ? a : 1) for unsigned.
8900 For MIN we are safe converting (a <= 1 ? a : 1)
8901 into (a <= 0 ? a : 1) */
8902 cmpop1 = const0_rtx;
8903 if (code == MAX_EXPR)
8904 comparison_code = unsignedp ? NE : GT;
8906 if (op1 == constm1_rtx && !unsignedp)
8908 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8909 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8910 cmpop1 = const0_rtx;
8911 if (code == MIN_EXPR)
8912 comparison_code = LT;
8914 #ifdef HAVE_conditional_move
8915 /* Use a conditional move if possible. */
8916 if (can_conditionally_move_p (mode))
8920 /* ??? Same problem as in expmed.c: emit_conditional_move
8921 forces a stack adjustment via compare_from_rtx, and we
8922 lose the stack adjustment if the sequence we are about
8923 to create is discarded. */
8924 do_pending_stack_adjust ();
8928 /* Try to emit the conditional move. */
8929 insn = emit_conditional_move (target, comparison_code,
8934 /* If we could do the conditional move, emit the sequence,
8938 rtx seq = get_insns ();
8944 /* Otherwise discard the sequence and fall back to code with
8950 emit_move_insn (target, op0);
8952 temp = gen_label_rtx ();
8953 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8954 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8956 emit_move_insn (target, op1);
8961 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8962 VOIDmode, EXPAND_NORMAL);
8963 if (modifier == EXPAND_STACK_PARM)
8965 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8969 /* ??? Can optimize bitwise operations with one arg constant.
8970 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8971 and (a bitwise1 b) bitwise2 b (etc)
8972 but that is probably not worth while. */
8974 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8975 boolean values when we want in all cases to compute both of them. In
8976 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8977 as actual zero-or-1 values and then bitwise anding. In cases where
8978 there cannot be any side effects, better code would be made by
8979 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8980 how to recognize those cases. */
8982 case TRUTH_AND_EXPR:
8983 code = BIT_AND_EXPR;
8988 code = BIT_IOR_EXPR;
8992 case TRUTH_XOR_EXPR:
8993 code = BIT_XOR_EXPR;
8999 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9000 || (GET_MODE_PRECISION (TYPE_MODE (type))
9001 == TYPE_PRECISION (type)));
9006 /* If this is a fixed-point operation, then we cannot use the code
9007 below because "expand_shift" doesn't support sat/no-sat fixed-point
9009 if (ALL_FIXED_POINT_MODE_P (mode))
9012 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9014 if (modifier == EXPAND_STACK_PARM)
9016 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
9017 VOIDmode, EXPAND_NORMAL);
9018 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
9020 if (code == LSHIFT_EXPR)
9021 temp = REDUCE_BIT_FIELD (temp);
9024 /* Could determine the answer when only additive constants differ. Also,
9025 the addition of one can be handled by changing the condition. */
9032 case UNORDERED_EXPR:
9040 temp = do_store_flag (exp,
9041 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9042 tmode != VOIDmode ? tmode : mode);
9046 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
9047 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
9049 && REG_P (original_target)
9050 && (GET_MODE (original_target)
9051 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9053 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
9054 VOIDmode, EXPAND_NORMAL);
9056 /* If temp is constant, we can just compute the result. */
9057 if (GET_CODE (temp) == CONST_INT)
9059 if (INTVAL (temp) != 0)
9060 emit_move_insn (target, const1_rtx);
9062 emit_move_insn (target, const0_rtx);
9067 if (temp != original_target)
9069 enum machine_mode mode1 = GET_MODE (temp);
9070 if (mode1 == VOIDmode)
9071 mode1 = tmode != VOIDmode ? tmode : mode;
9073 temp = copy_to_mode_reg (mode1, temp);
9076 op1 = gen_label_rtx ();
9077 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
9078 GET_MODE (temp), unsignedp, op1);
9079 emit_move_insn (temp, const1_rtx);
9084 /* If no set-flag instruction, must generate a conditional store
9085 into a temporary variable. Drop through and handle this
9087 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9088 are occassionally created by folding during expansion. */
9089 case TRUTH_ANDIF_EXPR:
9090 case TRUTH_ORIF_EXPR:
9093 || modifier == EXPAND_STACK_PARM
9094 || ! safe_from_p (target, exp, 1)
9095 /* Make sure we don't have a hard reg (such as function's return
9096 value) live across basic blocks, if not optimizing. */
9097 || (!optimize && REG_P (target)
9098 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9099 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9102 emit_move_insn (target, const0_rtx);
9104 op1 = gen_label_rtx ();
9105 jumpifnot (exp, op1);
9108 emit_move_insn (target, const1_rtx);
9111 return ignore ? const0_rtx : target;
9113 case TRUTH_NOT_EXPR:
9114 if (modifier == EXPAND_STACK_PARM)
9116 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9117 VOIDmode, EXPAND_NORMAL);
9118 /* The parser is careful to generate TRUTH_NOT_EXPR
9119 only with operands that are always zero or one. */
9120 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9121 target, 1, OPTAB_LIB_WIDEN);
9125 case STATEMENT_LIST:
9127 tree_stmt_iterator iter;
9129 gcc_assert (ignore);
9131 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9132 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9137 /* A COND_EXPR with its type being VOID_TYPE represents a
9138 conditional jump and is handled in
9139 expand_gimple_cond_expr. */
9140 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9142 /* Note that COND_EXPRs whose type is a structure or union
9143 are required to be constructed to contain assignments of
9144 a temporary variable, so that we can evaluate them here
9145 for side effect only. If type is void, we must do likewise. */
9147 gcc_assert (!TREE_ADDRESSABLE (type)
9149 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9150 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9152 /* If we are not to produce a result, we have no target. Otherwise,
9153 if a target was specified use it; it will not be used as an
9154 intermediate target unless it is safe. If no target, use a
9157 if (modifier != EXPAND_STACK_PARM
9159 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9160 && GET_MODE (original_target) == mode
9161 #ifdef HAVE_conditional_move
9162 && (! can_conditionally_move_p (mode)
9163 || REG_P (original_target))
9165 && !MEM_P (original_target))
9166 temp = original_target;
9168 temp = assign_temp (type, 0, 0, 1);
9170 do_pending_stack_adjust ();
9172 op0 = gen_label_rtx ();
9173 op1 = gen_label_rtx ();
9174 jumpifnot (TREE_OPERAND (exp, 0), op0);
9175 store_expr (TREE_OPERAND (exp, 1), temp,
9176 modifier == EXPAND_STACK_PARM,
9179 emit_jump_insn (gen_jump (op1));
9182 store_expr (TREE_OPERAND (exp, 2), temp,
9183 modifier == EXPAND_STACK_PARM,
9191 target = expand_vec_cond_expr (exp, target);
9196 tree lhs = TREE_OPERAND (exp, 0);
9197 tree rhs = TREE_OPERAND (exp, 1);
9198 gcc_assert (ignore);
9200 /* Check for |= or &= of a bitfield of size one into another bitfield
9201 of size 1. In this case, (unless we need the result of the
9202 assignment) we can do this more efficiently with a
9203 test followed by an assignment, if necessary.
9205 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9206 things change so we do, this code should be enhanced to
9208 if (TREE_CODE (lhs) == COMPONENT_REF
9209 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9210 || TREE_CODE (rhs) == BIT_AND_EXPR)
9211 && TREE_OPERAND (rhs, 0) == lhs
9212 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9213 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9214 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9216 rtx label = gen_label_rtx ();
9217 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9218 do_jump (TREE_OPERAND (rhs, 1),
9221 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9222 MOVE_NONTEMPORAL (exp));
9223 do_pending_stack_adjust ();
9228 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9233 if (!TREE_OPERAND (exp, 0))
9234 expand_null_return ();
9236 expand_return (TREE_OPERAND (exp, 0));
9240 return expand_expr_addr_expr (exp, target, tmode, modifier);
9243 /* Get the rtx code of the operands. */
9244 op0 = expand_normal (TREE_OPERAND (exp, 0));
9245 op1 = expand_normal (TREE_OPERAND (exp, 1));
9248 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9250 /* Move the real (op0) and imaginary (op1) parts to their location. */
9251 write_complex_part (target, op0, false);
9252 write_complex_part (target, op1, true);
9257 op0 = expand_normal (TREE_OPERAND (exp, 0));
9258 return read_complex_part (op0, false);
9261 op0 = expand_normal (TREE_OPERAND (exp, 0));
9262 return read_complex_part (op0, true);
9265 expand_resx_expr (exp);
9268 case TRY_CATCH_EXPR:
9270 case EH_FILTER_EXPR:
9271 case TRY_FINALLY_EXPR:
9272 /* Lowered by tree-eh.c. */
9275 case WITH_CLEANUP_EXPR:
9276 case CLEANUP_POINT_EXPR:
9278 case CASE_LABEL_EXPR:
9284 case PREINCREMENT_EXPR:
9285 case PREDECREMENT_EXPR:
9286 case POSTINCREMENT_EXPR:
9287 case POSTDECREMENT_EXPR:
9290 /* Lowered by gimplify.c. */
9293 case CHANGE_DYNAMIC_TYPE_EXPR:
9294 /* This is ignored at the RTL level. The tree level set
9295 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9296 overkill for the RTL layer but is all that we can
9301 return get_exception_pointer ();
9304 return get_exception_filter ();
9307 /* Function descriptors are not valid except for as
9308 initialization constants, and should not be expanded. */
9316 expand_label (TREE_OPERAND (exp, 0));
9320 expand_asm_expr (exp);
9323 case WITH_SIZE_EXPR:
9324 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9325 have pulled out the size to use in whatever context it needed. */
9326 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9329 case REALIGN_LOAD_EXPR:
9331 tree oprnd0 = TREE_OPERAND (exp, 0);
9332 tree oprnd1 = TREE_OPERAND (exp, 1);
9333 tree oprnd2 = TREE_OPERAND (exp, 2);
9336 this_optab = optab_for_tree_code (code, type, optab_default);
9337 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9338 op2 = expand_normal (oprnd2);
9339 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9347 tree oprnd0 = TREE_OPERAND (exp, 0);
9348 tree oprnd1 = TREE_OPERAND (exp, 1);
9349 tree oprnd2 = TREE_OPERAND (exp, 2);
9352 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9353 op2 = expand_normal (oprnd2);
9354 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9359 case WIDEN_SUM_EXPR:
9361 tree oprnd0 = TREE_OPERAND (exp, 0);
9362 tree oprnd1 = TREE_OPERAND (exp, 1);
9364 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9365 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9370 case REDUC_MAX_EXPR:
9371 case REDUC_MIN_EXPR:
9372 case REDUC_PLUS_EXPR:
9374 op0 = expand_normal (TREE_OPERAND (exp, 0));
9375 this_optab = optab_for_tree_code (code, type, optab_default);
9376 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9381 case VEC_EXTRACT_EVEN_EXPR:
9382 case VEC_EXTRACT_ODD_EXPR:
9384 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9385 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9386 this_optab = optab_for_tree_code (code, type, optab_default);
9387 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9393 case VEC_INTERLEAVE_HIGH_EXPR:
9394 case VEC_INTERLEAVE_LOW_EXPR:
9396 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9397 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9398 this_optab = optab_for_tree_code (code, type, optab_default);
9399 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9405 case VEC_LSHIFT_EXPR:
9406 case VEC_RSHIFT_EXPR:
9408 target = expand_vec_shift_expr (exp, target);
9412 case VEC_UNPACK_HI_EXPR:
9413 case VEC_UNPACK_LO_EXPR:
9415 op0 = expand_normal (TREE_OPERAND (exp, 0));
9416 this_optab = optab_for_tree_code (code, type, optab_default);
9417 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9423 case VEC_UNPACK_FLOAT_HI_EXPR:
9424 case VEC_UNPACK_FLOAT_LO_EXPR:
9426 op0 = expand_normal (TREE_OPERAND (exp, 0));
9427 /* The signedness is determined from input operand. */
9428 this_optab = optab_for_tree_code (code,
9429 TREE_TYPE (TREE_OPERAND (exp, 0)),
9431 temp = expand_widen_pattern_expr
9432 (exp, op0, NULL_RTX, NULL_RTX,
9433 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9439 case VEC_WIDEN_MULT_HI_EXPR:
9440 case VEC_WIDEN_MULT_LO_EXPR:
9442 tree oprnd0 = TREE_OPERAND (exp, 0);
9443 tree oprnd1 = TREE_OPERAND (exp, 1);
9445 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9446 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9448 gcc_assert (target);
9452 case VEC_PACK_TRUNC_EXPR:
9453 case VEC_PACK_SAT_EXPR:
9454 case VEC_PACK_FIX_TRUNC_EXPR:
9455 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9458 case COMPOUND_LITERAL_EXPR:
9460 /* Initialize the anonymous variable declared in the compound
9461 literal, then return the variable. */
9462 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9464 /* Create RTL for this variable. */
9465 if (!DECL_RTL_SET_P (decl))
9467 if (DECL_HARD_REGISTER (decl))
9468 /* The user specified an assembler name for this variable.
9470 rest_of_decl_compilation (decl, 0, 0);
9475 return expand_expr_real (decl, original_target, tmode,
9483 /* Here to do an ordinary binary operator. */
9485 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9486 subtarget, &op0, &op1, EXPAND_NORMAL);
9488 this_optab = optab_for_tree_code (code, type, optab_default);
9490 if (modifier == EXPAND_STACK_PARM)
9492 temp = expand_binop (mode, this_optab, op0, op1, target,
9493 unsignedp, OPTAB_LIB_WIDEN);
9495 return REDUCE_BIT_FIELD (temp);
9497 #undef REDUCE_BIT_FIELD
9499 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9500 signedness of TYPE), possibly returning the result in TARGET. */
9502 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9504 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9505 if (target && GET_MODE (target) != GET_MODE (exp))
9507 /* For constant values, reduce using build_int_cst_type. */
9508 if (GET_CODE (exp) == CONST_INT)
9510 HOST_WIDE_INT value = INTVAL (exp);
9511 tree t = build_int_cst_type (type, value);
9512 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9514 else if (TYPE_UNSIGNED (type))
9517 if (prec < HOST_BITS_PER_WIDE_INT)
9518 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9521 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9522 ((unsigned HOST_WIDE_INT) 1
9523 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9525 return expand_and (GET_MODE (exp), exp, mask, target);
9529 tree count = build_int_cst (NULL_TREE,
9530 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9531 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9532 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9536 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9537 when applied to the address of EXP produces an address known to be
9538 aligned more than BIGGEST_ALIGNMENT. */
9541 is_aligning_offset (const_tree offset, const_tree exp)
9543 /* Strip off any conversions. */
9544 while (CONVERT_EXPR_P (offset))
9545 offset = TREE_OPERAND (offset, 0);
9547 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9548 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9549 if (TREE_CODE (offset) != BIT_AND_EXPR
9550 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9551 || compare_tree_int (TREE_OPERAND (offset, 1),
9552 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9553 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9556 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9557 It must be NEGATE_EXPR. Then strip any more conversions. */
9558 offset = TREE_OPERAND (offset, 0);
9559 while (CONVERT_EXPR_P (offset))
9560 offset = TREE_OPERAND (offset, 0);
9562 if (TREE_CODE (offset) != NEGATE_EXPR)
9565 offset = TREE_OPERAND (offset, 0);
9566 while (CONVERT_EXPR_P (offset))
9567 offset = TREE_OPERAND (offset, 0);
9569 /* This must now be the address of EXP. */
9570 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9573 /* Return the tree node if an ARG corresponds to a string constant or zero
9574 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9575 in bytes within the string that ARG is accessing. The type of the
9576 offset will be `sizetype'. */
9579 string_constant (tree arg, tree *ptr_offset)
9581 tree array, offset, lower_bound;
9584 if (TREE_CODE (arg) == ADDR_EXPR)
9586 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9588 *ptr_offset = size_zero_node;
9589 return TREE_OPERAND (arg, 0);
9591 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9593 array = TREE_OPERAND (arg, 0);
9594 offset = size_zero_node;
9596 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9598 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9599 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9600 if (TREE_CODE (array) != STRING_CST
9601 && TREE_CODE (array) != VAR_DECL)
9604 /* Check if the array has a nonzero lower bound. */
9605 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9606 if (!integer_zerop (lower_bound))
9608 /* If the offset and base aren't both constants, return 0. */
9609 if (TREE_CODE (lower_bound) != INTEGER_CST)
9611 if (TREE_CODE (offset) != INTEGER_CST)
9613 /* Adjust offset by the lower bound. */
9614 offset = size_diffop (fold_convert (sizetype, offset),
9615 fold_convert (sizetype, lower_bound));
9621 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9623 tree arg0 = TREE_OPERAND (arg, 0);
9624 tree arg1 = TREE_OPERAND (arg, 1);
9629 if (TREE_CODE (arg0) == ADDR_EXPR
9630 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9631 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9633 array = TREE_OPERAND (arg0, 0);
9636 else if (TREE_CODE (arg1) == ADDR_EXPR
9637 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9638 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9640 array = TREE_OPERAND (arg1, 0);
9649 if (TREE_CODE (array) == STRING_CST)
9651 *ptr_offset = fold_convert (sizetype, offset);
9654 else if (TREE_CODE (array) == VAR_DECL)
9658 /* Variables initialized to string literals can be handled too. */
9659 if (DECL_INITIAL (array) == NULL_TREE
9660 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9663 /* If they are read-only, non-volatile and bind locally. */
9664 if (! TREE_READONLY (array)
9665 || TREE_SIDE_EFFECTS (array)
9666 || ! targetm.binds_local_p (array))
9669 /* Avoid const char foo[4] = "abcde"; */
9670 if (DECL_SIZE_UNIT (array) == NULL_TREE
9671 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9672 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9673 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9676 /* If variable is bigger than the string literal, OFFSET must be constant
9677 and inside of the bounds of the string literal. */
9678 offset = fold_convert (sizetype, offset);
9679 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9680 && (! host_integerp (offset, 1)
9681 || compare_tree_int (offset, length) >= 0))
9684 *ptr_offset = offset;
9685 return DECL_INITIAL (array);
9691 /* Generate code to calculate EXP using a store-flag instruction
9692 and return an rtx for the result. EXP is either a comparison
9693 or a TRUTH_NOT_EXPR whose operand is a comparison.
9695 If TARGET is nonzero, store the result there if convenient.
9697 Return zero if there is no suitable set-flag instruction
9698 available on this machine.
9700 Once expand_expr has been called on the arguments of the comparison,
9701 we are committed to doing the store flag, since it is not safe to
9702 re-evaluate the expression. We emit the store-flag insn by calling
9703 emit_store_flag, but only expand the arguments if we have a reason
9704 to believe that emit_store_flag will be successful. If we think that
9705 it will, but it isn't, we have to simulate the store-flag with a
9706 set/jump/set sequence. */
9709 do_store_flag (tree exp, rtx target, enum machine_mode mode)
9712 tree arg0, arg1, type;
9714 enum machine_mode operand_mode;
9718 rtx subtarget = target;
9721 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9722 result at the end. We can't simply invert the test since it would
9723 have already been inverted if it were valid. This case occurs for
9724 some floating-point comparisons. */
9726 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9727 invert = 1, exp = TREE_OPERAND (exp, 0);
9729 arg0 = TREE_OPERAND (exp, 0);
9730 arg1 = TREE_OPERAND (exp, 1);
9732 /* Don't crash if the comparison was erroneous. */
9733 if (arg0 == error_mark_node || arg1 == error_mark_node)
9736 type = TREE_TYPE (arg0);
9737 operand_mode = TYPE_MODE (type);
9738 unsignedp = TYPE_UNSIGNED (type);
9740 /* We won't bother with BLKmode store-flag operations because it would mean
9741 passing a lot of information to emit_store_flag. */
9742 if (operand_mode == BLKmode)
9745 /* We won't bother with store-flag operations involving function pointers
9746 when function pointers must be canonicalized before comparisons. */
9747 #ifdef HAVE_canonicalize_funcptr_for_compare
9748 if (HAVE_canonicalize_funcptr_for_compare
9749 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9750 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9752 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9753 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9754 == FUNCTION_TYPE))))
9761 /* Get the rtx comparison code to use. We know that EXP is a comparison
9762 operation of some type. Some comparisons against 1 and -1 can be
9763 converted to comparisons with zero. Do so here so that the tests
9764 below will be aware that we have a comparison with zero. These
9765 tests will not catch constants in the first operand, but constants
9766 are rarely passed as the first operand. */
9768 switch (TREE_CODE (exp))
9777 if (integer_onep (arg1))
9778 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9780 code = unsignedp ? LTU : LT;
9783 if (! unsignedp && integer_all_onesp (arg1))
9784 arg1 = integer_zero_node, code = LT;
9786 code = unsignedp ? LEU : LE;
9789 if (! unsignedp && integer_all_onesp (arg1))
9790 arg1 = integer_zero_node, code = GE;
9792 code = unsignedp ? GTU : GT;
9795 if (integer_onep (arg1))
9796 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9798 code = unsignedp ? GEU : GE;
9801 case UNORDERED_EXPR:
9830 /* Put a constant second. */
9831 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9832 || TREE_CODE (arg0) == FIXED_CST)
9834 tem = arg0; arg0 = arg1; arg1 = tem;
9835 code = swap_condition (code);
9838 /* If this is an equality or inequality test of a single bit, we can
9839 do this by shifting the bit being tested to the low-order bit and
9840 masking the result with the constant 1. If the condition was EQ,
9841 we xor it with 1. This does not require an scc insn and is faster
9842 than an scc insn even if we have it.
9844 The code to make this transformation was moved into fold_single_bit_test,
9845 so we just call into the folder and expand its result. */
9847 if ((code == NE || code == EQ)
9848 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9849 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9851 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9852 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9854 target, VOIDmode, EXPAND_NORMAL);
9857 /* Now see if we are likely to be able to do this. Return if not. */
9858 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9861 if (! get_subtarget (target)
9862 || GET_MODE (subtarget) != operand_mode)
9865 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
9868 target = gen_reg_rtx (mode);
9870 result = emit_store_flag (target, code, op0, op1,
9871 operand_mode, unsignedp, 1);
9876 result = expand_binop (mode, xor_optab, result, const1_rtx,
9877 result, 0, OPTAB_LIB_WIDEN);
9881 /* If this failed, we have to do this with set/compare/jump/set code. */
9883 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9884 target = gen_reg_rtx (GET_MODE (target));
9886 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9887 label = gen_label_rtx ();
9888 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9891 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9898 /* Stubs in case we haven't got a casesi insn. */
9900 # define HAVE_casesi 0
9901 # define gen_casesi(a, b, c, d, e) (0)
9902 # define CODE_FOR_casesi CODE_FOR_nothing
9905 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9906 0 otherwise (i.e. if there is no casesi instruction). */
9908 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9909 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9910 rtx fallback_label ATTRIBUTE_UNUSED)
9912 enum machine_mode index_mode = SImode;
9913 int index_bits = GET_MODE_BITSIZE (index_mode);
9914 rtx op1, op2, index;
9915 enum machine_mode op_mode;
9920 /* Convert the index to SImode. */
9921 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9923 enum machine_mode omode = TYPE_MODE (index_type);
9924 rtx rangertx = expand_normal (range);
9926 /* We must handle the endpoints in the original mode. */
9927 index_expr = build2 (MINUS_EXPR, index_type,
9928 index_expr, minval);
9929 minval = integer_zero_node;
9930 index = expand_normal (index_expr);
9932 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9933 omode, 1, default_label);
9934 /* Now we can safely truncate. */
9935 index = convert_to_mode (index_mode, index, 0);
9939 if (TYPE_MODE (index_type) != index_mode)
9941 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9942 index_expr = fold_convert (index_type, index_expr);
9945 index = expand_normal (index_expr);
9948 do_pending_stack_adjust ();
9950 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9951 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9953 index = copy_to_mode_reg (op_mode, index);
9955 op1 = expand_normal (minval);
9957 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9958 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9959 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9960 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9962 op1 = copy_to_mode_reg (op_mode, op1);
9964 op2 = expand_normal (range);
9966 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9967 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9968 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9969 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9971 op2 = copy_to_mode_reg (op_mode, op2);
9973 emit_jump_insn (gen_casesi (index, op1, op2,
9974 table_label, !default_label
9975 ? fallback_label : default_label));
9979 /* Attempt to generate a tablejump instruction; same concept. */
9980 #ifndef HAVE_tablejump
9981 #define HAVE_tablejump 0
9982 #define gen_tablejump(x, y) (0)
9985 /* Subroutine of the next function.
9987 INDEX is the value being switched on, with the lowest value
9988 in the table already subtracted.
9989 MODE is its expected mode (needed if INDEX is constant).
9990 RANGE is the length of the jump table.
9991 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9993 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9994 index value is out of range. */
9997 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10002 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10003 cfun->cfg->max_jumptable_ents = INTVAL (range);
10005 /* Do an unsigned comparison (in the proper mode) between the index
10006 expression and the value which represents the length of the range.
10007 Since we just finished subtracting the lower bound of the range
10008 from the index expression, this comparison allows us to simultaneously
10009 check that the original index expression value is both greater than
10010 or equal to the minimum value of the range and less than or equal to
10011 the maximum value of the range. */
10014 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10017 /* If index is in range, it must fit in Pmode.
10018 Convert to Pmode so we can index with it. */
10020 index = convert_to_mode (Pmode, index, 1);
10022 /* Don't let a MEM slip through, because then INDEX that comes
10023 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10024 and break_out_memory_refs will go to work on it and mess it up. */
10025 #ifdef PIC_CASE_VECTOR_ADDRESS
10026 if (flag_pic && !REG_P (index))
10027 index = copy_to_mode_reg (Pmode, index);
10030 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10031 GET_MODE_SIZE, because this indicates how large insns are. The other
10032 uses should all be Pmode, because they are addresses. This code
10033 could fail if addresses and insns are not the same size. */
10034 index = gen_rtx_PLUS (Pmode,
10035 gen_rtx_MULT (Pmode, index,
10036 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10037 gen_rtx_LABEL_REF (Pmode, table_label));
10038 #ifdef PIC_CASE_VECTOR_ADDRESS
10040 index = PIC_CASE_VECTOR_ADDRESS (index);
10043 index = memory_address (CASE_VECTOR_MODE, index);
10044 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10045 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10046 convert_move (temp, vector, 0);
10048 emit_jump_insn (gen_tablejump (temp, table_label));
10050 /* If we are generating PIC code or if the table is PC-relative, the
10051 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10052 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10057 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10058 rtx table_label, rtx default_label)
10062 if (! HAVE_tablejump)
10065 index_expr = fold_build2 (MINUS_EXPR, index_type,
10066 fold_convert (index_type, index_expr),
10067 fold_convert (index_type, minval));
10068 index = expand_normal (index_expr);
10069 do_pending_stack_adjust ();
10071 do_tablejump (index, TYPE_MODE (index_type),
10072 convert_modes (TYPE_MODE (index_type),
10073 TYPE_MODE (TREE_TYPE (range)),
10074 expand_normal (range),
10075 TYPE_UNSIGNED (TREE_TYPE (range))),
10076 table_label, default_label);
10080 /* Nonzero if the mode is a valid vector mode for this architecture.
10081 This returns nonzero even if there is no hardware support for the
10082 vector mode, but we can emulate with narrower modes. */
10085 vector_mode_valid_p (enum machine_mode mode)
10087 enum mode_class mclass = GET_MODE_CLASS (mode);
10088 enum machine_mode innermode;
10090 /* Doh! What's going on? */
10091 if (mclass != MODE_VECTOR_INT
10092 && mclass != MODE_VECTOR_FLOAT
10093 && mclass != MODE_VECTOR_FRACT
10094 && mclass != MODE_VECTOR_UFRACT
10095 && mclass != MODE_VECTOR_ACCUM
10096 && mclass != MODE_VECTOR_UACCUM)
10099 /* Hardware support. Woo hoo! */
10100 if (targetm.vector_mode_supported_p (mode))
10103 innermode = GET_MODE_INNER (mode);
10105 /* We should probably return 1 if requesting V4DI and we have no DI,
10106 but we have V2DI, but this is probably very unlikely. */
10108 /* If we have support for the inner mode, we can safely emulate it.
10109 We may not have V2DI, but me can emulate with a pair of DIs. */
10110 return targetm.scalar_mode_supported_p (innermode);
10113 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10115 const_vector_from_tree (tree exp)
10120 enum machine_mode inner, mode;
10122 mode = TYPE_MODE (TREE_TYPE (exp));
10124 if (initializer_zerop (exp))
10125 return CONST0_RTX (mode);
10127 units = GET_MODE_NUNITS (mode);
10128 inner = GET_MODE_INNER (mode);
10130 v = rtvec_alloc (units);
10132 link = TREE_VECTOR_CST_ELTS (exp);
10133 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10135 elt = TREE_VALUE (link);
10137 if (TREE_CODE (elt) == REAL_CST)
10138 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10140 else if (TREE_CODE (elt) == FIXED_CST)
10141 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10144 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10145 TREE_INT_CST_HIGH (elt),
10149 /* Initialize remaining elements to 0. */
10150 for (; i < units; ++i)
10151 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10153 return gen_rtx_CONST_VECTOR (mode, v);
10155 #include "gt-expr.h"