1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
246 /* This is run to set up which modes can be used
247 directly in memory and to initialize the block move optab. It is run
248 at the beginning of compilation and when the target is reinitialized. */
251 init_expr_target (void)
254 enum machine_mode mode;
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
280 PUT_MODE (mem1, mode);
281 PUT_MODE (reg, mode);
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
291 if (! HARD_REGNO_MODE_OK (regno, mode))
294 SET_REGNO (reg, regno);
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
325 srcmode = GET_MODE_WIDER_MODE (srcmode))
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
333 PUT_MODE (mem, srcmode);
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
341 /* This is run at the start of compiling a function. */
346 memset (&crtl->expr, 0, sizeof (crtl->expr));
349 /* Copy data from FROM to TO, where the machine modes are not the same.
350 Both modes may be integer, or both may be floating, or both may be
352 UNSIGNEDP should be nonzero if FROM is an unsigned type.
353 This causes zero-extension instead of sign-extension. */
356 convert_move (rtx to, rtx from, int unsignedp)
358 enum machine_mode to_mode = GET_MODE (to);
359 enum machine_mode from_mode = GET_MODE (from);
360 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
361 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
365 /* rtx code for making an equivalent value. */
366 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
367 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
370 gcc_assert (to_real == from_real);
371 gcc_assert (to_mode != BLKmode);
372 gcc_assert (from_mode != BLKmode);
374 /* If the source and destination are already the same, then there's
379 /* If FROM is a SUBREG that indicates that we have already done at least
380 the required extension, strip it. We don't handle such SUBREGs as
383 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
384 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
385 >= GET_MODE_SIZE (to_mode))
386 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
387 from = gen_lowpart (to_mode, from), from_mode = to_mode;
389 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391 if (to_mode == from_mode
392 || (from_mode == VOIDmode && CONSTANT_P (from)))
394 emit_move_insn (to, from);
398 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402 if (VECTOR_MODE_P (to_mode))
403 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407 emit_move_insn (to, from);
411 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
414 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
423 gcc_assert ((GET_MODE_PRECISION (from_mode)
424 != GET_MODE_PRECISION (to_mode))
425 || (DECIMAL_FLOAT_MODE_P (from_mode)
426 != DECIMAL_FLOAT_MODE_P (to_mode)));
428 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
429 /* Conversion between decimal float and binary float, same size. */
430 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
431 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
436 /* Try converting directly if the insn is supported. */
438 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
439 if (code != CODE_FOR_nothing)
441 emit_unop_insn (code, to, from,
442 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
446 /* Otherwise use a libcall. */
447 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449 /* Is this conversion implemented yet? */
450 gcc_assert (libcall);
453 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 insns = get_insns ();
457 emit_libcall_block (insns, to, value,
458 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 : gen_rtx_FLOAT_EXTEND (to_mode, from));
464 /* Handle pointer conversion. */ /* SPEE 900220. */
465 /* Targets are expected to provide conversion insns between PxImode and
466 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
467 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469 enum machine_mode full_mode
470 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
473 != CODE_FOR_nothing);
475 if (full_mode != from_mode)
476 from = convert_to_mode (full_mode, from, unsignedp);
477 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
481 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
484 enum machine_mode full_mode
485 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
488 != CODE_FOR_nothing);
490 if (to_mode == full_mode)
492 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
497 new_from = gen_reg_rtx (full_mode);
498 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
499 new_from, from, UNKNOWN);
501 /* else proceed to integer conversions below. */
502 from_mode = full_mode;
506 /* Make sure both are fixed-point modes or both are not. */
507 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
508 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
509 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511 /* If we widen from_mode to to_mode and they are in the same class,
512 we won't saturate the result.
513 Otherwise, always saturate the result to play safe. */
514 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
515 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
516 expand_fixed_convert (to, from, 0, 0);
518 expand_fixed_convert (to, from, 0, 1);
522 /* Now both modes are integers. */
524 /* Handle expanding beyond a word. */
525 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
526 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
533 enum machine_mode lowpart_mode;
534 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536 /* Try converting directly if the insn is supported. */
537 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
540 /* If FROM is a SUBREG, put it into a register. Do this
541 so that we always generate the same set of insns for
542 better cse'ing; if an intermediate assignment occurred,
543 we won't be doing the operation directly on the SUBREG. */
544 if (optimize > 0 && GET_CODE (from) == SUBREG)
545 from = force_reg (from_mode, from);
546 emit_unop_insn (code, to, from, equiv_code);
549 /* Next, try converting via full word. */
550 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
551 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
552 != CODE_FOR_nothing))
554 rtx word_to = gen_reg_rtx (word_mode);
557 if (reg_overlap_mentioned_p (to, from))
558 from = force_reg (from_mode, from);
561 convert_move (word_to, from, unsignedp);
562 emit_unop_insn (code, to, word_to, equiv_code);
566 /* No special multiword conversion insn; do it by hand. */
569 /* Since we will turn this into a no conflict block, we must ensure
570 that the source does not overlap the target. */
572 if (reg_overlap_mentioned_p (to, from))
573 from = force_reg (from_mode, from);
575 /* Get a copy of FROM widened to a word, if necessary. */
576 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
577 lowpart_mode = word_mode;
579 lowpart_mode = from_mode;
581 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583 lowpart = gen_lowpart (lowpart_mode, to);
584 emit_move_insn (lowpart, lowfrom);
586 /* Compute the value to put in each remaining word. */
588 fill_value = const0_rtx;
590 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
591 LT, lowfrom, const0_rtx,
594 /* Fill the remaining words. */
595 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
598 rtx subword = operand_subword (to, index, 1, to_mode);
600 gcc_assert (subword);
602 if (fill_value != subword)
603 emit_move_insn (subword, fill_value);
606 insns = get_insns ();
613 /* Truncating multi-word to a word or less. */
614 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
615 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
618 && ! MEM_VOLATILE_P (from)
619 && direct_load[(int) to_mode]
620 && ! mode_dependent_address_p (XEXP (from, 0)))
622 || GET_CODE (from) == SUBREG))
623 from = force_reg (from_mode, from);
624 convert_move (to, gen_lowpart (word_mode, from), 0);
628 /* Now follow all the conversions between integers
629 no more than a word long. */
631 /* For truncation, usually we can just refer to FROM in a narrower mode. */
632 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
633 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
634 GET_MODE_BITSIZE (from_mode)))
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
644 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
645 from = copy_to_reg (from);
646 emit_move_insn (to, gen_lowpart (to_mode, from));
650 /* Handle extension. */
651 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653 /* Convert directly if that works. */
654 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
657 emit_unop_insn (code, to, from, equiv_code);
662 enum machine_mode intermediate;
666 /* Search for a mode to convert via. */
667 for (intermediate = from_mode; intermediate != VOIDmode;
668 intermediate = GET_MODE_WIDER_MODE (intermediate))
669 if (((can_extend_p (to_mode, intermediate, unsignedp)
671 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
672 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
673 GET_MODE_BITSIZE (intermediate))))
674 && (can_extend_p (intermediate, from_mode, unsignedp)
675 != CODE_FOR_nothing))
677 convert_move (to, convert_to_mode (intermediate, from,
678 unsignedp), unsignedp);
682 /* No suitable intermediate mode.
683 Generate what we need with shifts. */
684 shift_amount = build_int_cst (NULL_TREE,
685 GET_MODE_BITSIZE (to_mode)
686 - GET_MODE_BITSIZE (from_mode));
687 from = gen_lowpart (to_mode, force_reg (from_mode, from));
688 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
693 emit_move_insn (to, tmp);
698 /* Support special truncate insns for certain modes. */
699 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
706 /* Handle truncation of volatile memrefs, and so on;
707 the things that couldn't be truncated directly,
708 and for which there was no special instruction.
710 ??? Code above formerly short-circuited this, for most integer
711 mode pairs, with a force_reg in from_mode followed by a recursive
712 call to this routine. Appears always to have been wrong. */
713 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
716 emit_move_insn (to, temp);
720 /* Mode combination is not recognized. */
724 /* Return an rtx for a value that would result
725 from converting X to mode MODE.
726 Both X and MODE may be floating, or both integer.
727 UNSIGNEDP is nonzero if X is an unsigned value.
728 This can be done by referring to a part of X in place
729 or by copying to a new temporary with conversion. */
732 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734 return convert_modes (mode, VOIDmode, x, unsignedp);
737 /* Return an rtx for a value that would result
738 from converting X from mode OLDMODE to mode MODE.
739 Both modes may be floating, or both integer.
740 UNSIGNEDP is nonzero if X is an unsigned value.
742 This can be done by referring to a part of X in place
743 or by copying to a new temporary with conversion.
745 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
748 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
752 /* If FROM is a SUBREG that indicates that we have already done at least
753 the required extension, strip it. */
755 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
756 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
757 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
758 x = gen_lowpart (mode, x);
760 if (GET_MODE (x) != VOIDmode)
761 oldmode = GET_MODE (x);
766 /* There is one case that we must handle specially: If we are converting
767 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
768 we are to interpret the constant as unsigned, gen_lowpart will do
769 the wrong if the constant appears negative. What we want to do is
770 make the high-order word of the constant zero, not all ones. */
772 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
773 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
774 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
776 HOST_WIDE_INT val = INTVAL (x);
778 if (oldmode != VOIDmode
779 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781 int width = GET_MODE_BITSIZE (oldmode);
783 /* We need to zero extend VAL. */
784 val &= ((HOST_WIDE_INT) 1 << width) - 1;
787 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
790 /* We can do this with a gen_lowpart if both desired and current modes
791 are integer, and this is either a constant integer, a register, or a
792 non-volatile MEM. Except for the constant case where MODE is no
793 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
795 if ((GET_CODE (x) == CONST_INT
796 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
797 || (GET_MODE_CLASS (mode) == MODE_INT
798 && GET_MODE_CLASS (oldmode) == MODE_INT
799 && (GET_CODE (x) == CONST_DOUBLE
800 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
801 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
802 && direct_load[(int) mode])
804 && (! HARD_REGISTER_P (x)
805 || HARD_REGNO_MODE_OK (REGNO (x), mode))
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
807 GET_MODE_BITSIZE (GET_MODE (x)))))))))
809 /* ?? If we don't know OLDMODE, we have to assume here that
810 X does not need sign- or zero-extension. This may not be
811 the case, but it's the best we can do. */
812 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
813 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815 HOST_WIDE_INT val = INTVAL (x);
816 int width = GET_MODE_BITSIZE (oldmode);
818 /* We must sign or zero-extend in this case. Start by
819 zero-extending, then sign extend if we need to. */
820 val &= ((HOST_WIDE_INT) 1 << width) - 1;
822 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
823 val |= (HOST_WIDE_INT) (-1) << width;
825 return gen_int_mode (val, mode);
828 return gen_lowpart (mode, x);
831 /* Converting from integer constant into mode is always equivalent to an
833 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
836 return simplify_gen_subreg (mode, x, oldmode, 0);
839 temp = gen_reg_rtx (mode);
840 convert_move (temp, x, unsignedp);
844 /* STORE_MAX_PIECES is the number of bytes at a time that we can
845 store efficiently. Due to internal GCC limitations, this is
846 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
847 for an immediate constant. */
849 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851 /* Determine whether the LEN bytes can be moved by using several move
852 instructions. Return nonzero if a call to move_by_pieces should
856 can_move_by_pieces (unsigned HOST_WIDE_INT len,
857 unsigned int align ATTRIBUTE_UNUSED)
859 return MOVE_BY_PIECES_P (len, align);
862 /* Generate several move instructions to copy LEN bytes from block FROM to
863 block TO. (These are MEM rtx's with BLKmode).
865 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
866 used to push FROM to the stack.
868 ALIGN is maximum stack alignment we can assume.
870 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
871 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
875 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
876 unsigned int align, int endp)
878 struct move_by_pieces data;
879 rtx to_addr, from_addr = XEXP (from, 0);
880 unsigned int max_size = MOVE_MAX_PIECES + 1;
881 enum machine_mode mode = VOIDmode, tmode;
882 enum insn_code icode;
884 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
887 data.from_addr = from_addr;
890 to_addr = XEXP (to, 0);
893 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
894 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
896 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
903 #ifdef STACK_GROWS_DOWNWARD
909 data.to_addr = to_addr;
912 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
913 || GET_CODE (from_addr) == POST_INC
914 || GET_CODE (from_addr) == POST_DEC);
916 data.explicit_inc_from = 0;
917 data.explicit_inc_to = 0;
918 if (data.reverse) data.offset = len;
921 /* If copying requires more than two move insns,
922 copy addresses to registers (to make displacements shorter)
923 and use post-increment if available. */
924 if (!(data.autinc_from && data.autinc_to)
925 && move_by_pieces_ninsns (len, align, max_size) > 2)
927 /* Find the mode of the largest move... */
928 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
929 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) < max_size)
933 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
935 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
936 data.autinc_from = 1;
937 data.explicit_inc_from = -1;
939 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
941 data.from_addr = copy_addr_to_reg (from_addr);
942 data.autinc_from = 1;
943 data.explicit_inc_from = 1;
945 if (!data.autinc_from && CONSTANT_P (from_addr))
946 data.from_addr = copy_addr_to_reg (from_addr);
947 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
949 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_addr_to_reg (to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_addr_to_reg (to_addr);
963 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
964 if (align >= GET_MODE_ALIGNMENT (tmode))
965 align = GET_MODE_ALIGNMENT (tmode);
968 enum machine_mode xmode;
970 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
972 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
973 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
974 || SLOW_UNALIGNED_ACCESS (tmode, align))
977 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
985 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
986 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
987 if (GET_MODE_SIZE (tmode) < max_size)
990 if (mode == VOIDmode)
993 icode = optab_handler (mov_optab, mode)->insn_code;
994 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
995 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
997 max_size = GET_MODE_SIZE (mode);
1000 /* The code above should have handled everything. */
1001 gcc_assert (!data.len);
1007 gcc_assert (!data.reverse);
1012 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1013 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1015 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1018 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1025 to1 = adjust_address (data.to, QImode, data.offset);
1033 /* Return number of insns required to move L bytes by pieces.
1034 ALIGN (in bits) is maximum alignment we can assume. */
1036 static unsigned HOST_WIDE_INT
1037 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1038 unsigned int max_size)
1040 unsigned HOST_WIDE_INT n_insns = 0;
1041 enum machine_mode tmode;
1043 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1044 if (align >= GET_MODE_ALIGNMENT (tmode))
1045 align = GET_MODE_ALIGNMENT (tmode);
1048 enum machine_mode tmode, xmode;
1050 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1052 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1053 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1054 || SLOW_UNALIGNED_ACCESS (tmode, align))
1057 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1060 while (max_size > 1)
1062 enum machine_mode mode = VOIDmode;
1063 enum insn_code icode;
1065 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1066 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1067 if (GET_MODE_SIZE (tmode) < max_size)
1070 if (mode == VOIDmode)
1073 icode = optab_handler (mov_optab, mode)->insn_code;
1074 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1075 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1077 max_size = GET_MODE_SIZE (mode);
1084 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1085 with move instructions for mode MODE. GENFUN is the gen_... function
1086 to make a move insn for that mode. DATA has all the other info. */
1089 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1090 struct move_by_pieces *data)
1092 unsigned int size = GET_MODE_SIZE (mode);
1093 rtx to1 = NULL_RTX, from1;
1095 while (data->len >= size)
1098 data->offset -= size;
1102 if (data->autinc_to)
1103 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1106 to1 = adjust_address (data->to, mode, data->offset);
1109 if (data->autinc_from)
1110 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1113 from1 = adjust_address (data->from, mode, data->offset);
1115 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1116 emit_insn (gen_add2_insn (data->to_addr,
1117 GEN_INT (-(HOST_WIDE_INT)size)));
1118 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1119 emit_insn (gen_add2_insn (data->from_addr,
1120 GEN_INT (-(HOST_WIDE_INT)size)));
1123 emit_insn ((*genfun) (to1, from1));
1126 #ifdef PUSH_ROUNDING
1127 emit_single_push_insn (mode, from1, NULL);
1133 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1134 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1135 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1136 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1138 if (! data->reverse)
1139 data->offset += size;
1145 /* Emit code to move a block Y to a block X. This may be done with
1146 string-move instructions, with multiple scalar move instructions,
1147 or with a library call.
1149 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1150 SIZE is an rtx that says how long they are.
1151 ALIGN is the maximum alignment we can assume they have.
1152 METHOD describes what kind of copy this is, and what mechanisms may be used.
1154 Return the address of the new block, if memcpy is called and returns it,
1158 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1159 unsigned int expected_align, HOST_WIDE_INT expected_size)
1167 case BLOCK_OP_NORMAL:
1168 case BLOCK_OP_TAILCALL:
1169 may_use_call = true;
1172 case BLOCK_OP_CALL_PARM:
1173 may_use_call = block_move_libcall_safe_for_call_parm ();
1175 /* Make inhibit_defer_pop nonzero around the library call
1176 to force it to pop the arguments right away. */
1180 case BLOCK_OP_NO_LIBCALL:
1181 may_use_call = false;
1188 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1190 gcc_assert (MEM_P (x));
1191 gcc_assert (MEM_P (y));
1194 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1195 block copy is more efficient for other large modes, e.g. DCmode. */
1196 x = adjust_address (x, BLKmode, 0);
1197 y = adjust_address (y, BLKmode, 0);
1199 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1200 can be incorrect is coming from __builtin_memcpy. */
1201 if (GET_CODE (size) == CONST_INT)
1203 if (INTVAL (size) == 0)
1206 x = shallow_copy_rtx (x);
1207 y = shallow_copy_rtx (y);
1208 set_mem_size (x, size);
1209 set_mem_size (y, size);
1212 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1213 move_by_pieces (x, y, INTVAL (size), align, 0);
1214 else if (emit_block_move_via_movmem (x, y, size, align,
1215 expected_align, expected_size))
1217 else if (may_use_call)
1218 retval = emit_block_move_via_libcall (x, y, size,
1219 method == BLOCK_OP_TAILCALL);
1221 emit_block_move_via_loop (x, y, size, align);
1223 if (method == BLOCK_OP_CALL_PARM)
1230 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1232 return emit_block_move_hints (x, y, size, method, 0, -1);
1235 /* A subroutine of emit_block_move. Returns true if calling the
1236 block move libcall will not clobber any parameters which may have
1237 already been placed on the stack. */
1240 block_move_libcall_safe_for_call_parm (void)
1242 #if defined (REG_PARM_STACK_SPACE)
1246 /* If arguments are pushed on the stack, then they're safe. */
1250 /* If registers go on the stack anyway, any argument is sure to clobber
1251 an outgoing argument. */
1252 #if defined (REG_PARM_STACK_SPACE)
1253 fn = emit_block_move_libcall_fn (false);
1254 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1255 && REG_PARM_STACK_SPACE (fn) != 0)
1259 /* If any argument goes in memory, then it might clobber an outgoing
1262 CUMULATIVE_ARGS args_so_far;
1265 fn = emit_block_move_libcall_fn (false);
1266 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1268 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1269 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1271 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1272 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1273 if (!tmp || !REG_P (tmp))
1275 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1277 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1283 /* A subroutine of emit_block_move. Expand a movmem pattern;
1284 return true if successful. */
1287 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1288 unsigned int expected_align, HOST_WIDE_INT expected_size)
1290 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1291 int save_volatile_ok = volatile_ok;
1292 enum machine_mode mode;
1294 if (expected_align < align)
1295 expected_align = align;
1297 /* Since this is a move insn, we don't care about volatility. */
1300 /* Try the most limited insn first, because there's no point
1301 including more than one in the machine description unless
1302 the more limited one has some advantage. */
1304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1305 mode = GET_MODE_WIDER_MODE (mode))
1307 enum insn_code code = movmem_optab[(int) mode];
1308 insn_operand_predicate_fn pred;
1310 if (code != CODE_FOR_nothing
1311 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1312 here because if SIZE is less than the mode mask, as it is
1313 returned by the macro, it will definitely be less than the
1314 actual mode mask. */
1315 && ((GET_CODE (size) == CONST_INT
1316 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1317 <= (GET_MODE_MASK (mode) >> 1)))
1318 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1319 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1320 || (*pred) (x, BLKmode))
1321 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1322 || (*pred) (y, BLKmode))
1323 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1324 || (*pred) (opalign, VOIDmode)))
1327 rtx last = get_last_insn ();
1330 op2 = convert_to_mode (mode, size, 1);
1331 pred = insn_data[(int) code].operand[2].predicate;
1332 if (pred != 0 && ! (*pred) (op2, mode))
1333 op2 = copy_to_mode_reg (mode, op2);
1335 /* ??? When called via emit_block_move_for_call, it'd be
1336 nice if there were some way to inform the backend, so
1337 that it doesn't fail the expansion because it thinks
1338 emitting the libcall would be more efficient. */
1340 if (insn_data[(int) code].n_operands == 4)
1341 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1343 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1344 GEN_INT (expected_align
1346 GEN_INT (expected_size));
1350 volatile_ok = save_volatile_ok;
1354 delete_insns_since (last);
1358 volatile_ok = save_volatile_ok;
1362 /* A subroutine of emit_block_move. Expand a call to memcpy.
1363 Return the return value from memcpy, 0 otherwise. */
1366 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1368 rtx dst_addr, src_addr;
1369 tree call_expr, fn, src_tree, dst_tree, size_tree;
1370 enum machine_mode size_mode;
1373 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1374 pseudos. We can then place those new pseudos into a VAR_DECL and
1377 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1378 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1380 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1381 src_addr = convert_memory_address (ptr_mode, src_addr);
1383 dst_tree = make_tree (ptr_type_node, dst_addr);
1384 src_tree = make_tree (ptr_type_node, src_addr);
1386 size_mode = TYPE_MODE (sizetype);
1388 size = convert_to_mode (size_mode, size, 1);
1389 size = copy_to_mode_reg (size_mode, size);
1391 /* It is incorrect to use the libcall calling conventions to call
1392 memcpy in this context. This could be a user call to memcpy and
1393 the user may wish to examine the return value from memcpy. For
1394 targets where libcalls and normal calls have different conventions
1395 for returning pointers, we could end up generating incorrect code. */
1397 size_tree = make_tree (sizetype, size);
1399 fn = emit_block_move_libcall_fn (true);
1400 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1401 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1403 retval = expand_normal (call_expr);
1408 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1409 for the function we use for block copies. The first time FOR_CALL
1410 is true, we call assemble_external. */
1412 static GTY(()) tree block_move_fn;
1415 init_block_move_fn (const char *asmspec)
1421 fn = get_identifier ("memcpy");
1422 args = build_function_type_list (ptr_type_node, ptr_type_node,
1423 const_ptr_type_node, sizetype,
1426 fn = build_decl (FUNCTION_DECL, fn, args);
1427 DECL_EXTERNAL (fn) = 1;
1428 TREE_PUBLIC (fn) = 1;
1429 DECL_ARTIFICIAL (fn) = 1;
1430 TREE_NOTHROW (fn) = 1;
1431 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1432 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1438 set_user_assembler_name (block_move_fn, asmspec);
1442 emit_block_move_libcall_fn (int for_call)
1444 static bool emitted_extern;
1447 init_block_move_fn (NULL);
1449 if (for_call && !emitted_extern)
1451 emitted_extern = true;
1452 make_decl_rtl (block_move_fn);
1453 assemble_external (block_move_fn);
1456 return block_move_fn;
1459 /* A subroutine of emit_block_move. Copy the data via an explicit
1460 loop. This is used only when libcalls are forbidden. */
1461 /* ??? It'd be nice to copy in hunks larger than QImode. */
1464 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1465 unsigned int align ATTRIBUTE_UNUSED)
1467 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1468 enum machine_mode iter_mode;
1470 iter_mode = GET_MODE (size);
1471 if (iter_mode == VOIDmode)
1472 iter_mode = word_mode;
1474 top_label = gen_label_rtx ();
1475 cmp_label = gen_label_rtx ();
1476 iter = gen_reg_rtx (iter_mode);
1478 emit_move_insn (iter, const0_rtx);
1480 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1481 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1482 do_pending_stack_adjust ();
1484 emit_jump (cmp_label);
1485 emit_label (top_label);
1487 tmp = convert_modes (Pmode, iter_mode, iter, true);
1488 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1489 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1490 x = change_address (x, QImode, x_addr);
1491 y = change_address (y, QImode, y_addr);
1493 emit_move_insn (x, y);
1495 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1496 true, OPTAB_LIB_WIDEN);
1498 emit_move_insn (iter, tmp);
1500 emit_label (cmp_label);
1502 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1506 /* Copy all or part of a value X into registers starting at REGNO.
1507 The number of registers to be filled is NREGS. */
1510 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1513 #ifdef HAVE_load_multiple
1521 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1522 x = validize_mem (force_const_mem (mode, x));
1524 /* See if the machine can do this with a load multiple insn. */
1525 #ifdef HAVE_load_multiple
1526 if (HAVE_load_multiple)
1528 last = get_last_insn ();
1529 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1537 delete_insns_since (last);
1541 for (i = 0; i < nregs; i++)
1542 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1543 operand_subword_force (x, i, mode));
1546 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1547 The number of registers to be filled is NREGS. */
1550 move_block_from_reg (int regno, rtx x, int nregs)
1557 /* See if the machine can do this with a store multiple insn. */
1558 #ifdef HAVE_store_multiple
1559 if (HAVE_store_multiple)
1561 rtx last = get_last_insn ();
1562 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1570 delete_insns_since (last);
1574 for (i = 0; i < nregs; i++)
1576 rtx tem = operand_subword (x, i, 1, BLKmode);
1580 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1584 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1585 ORIG, where ORIG is a non-consecutive group of registers represented by
1586 a PARALLEL. The clone is identical to the original except in that the
1587 original set of registers is replaced by a new set of pseudo registers.
1588 The new set has the same modes as the original set. */
1591 gen_group_rtx (rtx orig)
1596 gcc_assert (GET_CODE (orig) == PARALLEL);
1598 length = XVECLEN (orig, 0);
1599 tmps = XALLOCAVEC (rtx, length);
1601 /* Skip a NULL entry in first slot. */
1602 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1607 for (; i < length; i++)
1609 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1610 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1612 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1615 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1618 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1619 except that values are placed in TMPS[i], and must later be moved
1620 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1623 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1627 enum machine_mode m = GET_MODE (orig_src);
1629 gcc_assert (GET_CODE (dst) == PARALLEL);
1632 && !SCALAR_INT_MODE_P (m)
1633 && !MEM_P (orig_src)
1634 && GET_CODE (orig_src) != CONCAT)
1636 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1637 if (imode == BLKmode)
1638 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1640 src = gen_reg_rtx (imode);
1641 if (imode != BLKmode)
1642 src = gen_lowpart (GET_MODE (orig_src), src);
1643 emit_move_insn (src, orig_src);
1644 /* ...and back again. */
1645 if (imode != BLKmode)
1646 src = gen_lowpart (imode, src);
1647 emit_group_load_1 (tmps, dst, src, type, ssize);
1651 /* Check for a NULL entry, used to indicate that the parameter goes
1652 both on the stack and in registers. */
1653 if (XEXP (XVECEXP (dst, 0, 0), 0))
1658 /* Process the pieces. */
1659 for (i = start; i < XVECLEN (dst, 0); i++)
1661 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1662 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1663 unsigned int bytelen = GET_MODE_SIZE (mode);
1666 /* Handle trailing fragments that run over the size of the struct. */
1667 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1669 /* Arrange to shift the fragment to where it belongs.
1670 extract_bit_field loads to the lsb of the reg. */
1672 #ifdef BLOCK_REG_PADDING
1673 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1674 == (BYTES_BIG_ENDIAN ? upward : downward)
1679 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1680 bytelen = ssize - bytepos;
1681 gcc_assert (bytelen > 0);
1684 /* If we won't be loading directly from memory, protect the real source
1685 from strange tricks we might play; but make sure that the source can
1686 be loaded directly into the destination. */
1688 if (!MEM_P (orig_src)
1689 && (!CONSTANT_P (orig_src)
1690 || (GET_MODE (orig_src) != mode
1691 && GET_MODE (orig_src) != VOIDmode)))
1693 if (GET_MODE (orig_src) == VOIDmode)
1694 src = gen_reg_rtx (mode);
1696 src = gen_reg_rtx (GET_MODE (orig_src));
1698 emit_move_insn (src, orig_src);
1701 /* Optimize the access just a bit. */
1703 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1704 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1705 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1706 && bytelen == GET_MODE_SIZE (mode))
1708 tmps[i] = gen_reg_rtx (mode);
1709 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1711 else if (COMPLEX_MODE_P (mode)
1712 && GET_MODE (src) == mode
1713 && bytelen == GET_MODE_SIZE (mode))
1714 /* Let emit_move_complex do the bulk of the work. */
1716 else if (GET_CODE (src) == CONCAT)
1718 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1719 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1721 if ((bytepos == 0 && bytelen == slen0)
1722 || (bytepos != 0 && bytepos + bytelen <= slen))
1724 /* The following assumes that the concatenated objects all
1725 have the same size. In this case, a simple calculation
1726 can be used to determine the object and the bit field
1728 tmps[i] = XEXP (src, bytepos / slen0);
1729 if (! CONSTANT_P (tmps[i])
1730 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1731 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1732 (bytepos % slen0) * BITS_PER_UNIT,
1733 1, NULL_RTX, mode, mode);
1739 gcc_assert (!bytepos);
1740 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1741 emit_move_insn (mem, src);
1742 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1743 0, 1, NULL_RTX, mode, mode);
1746 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1747 SIMD register, which is currently broken. While we get GCC
1748 to emit proper RTL for these cases, let's dump to memory. */
1749 else if (VECTOR_MODE_P (GET_MODE (dst))
1752 int slen = GET_MODE_SIZE (GET_MODE (src));
1755 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1756 emit_move_insn (mem, src);
1757 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1759 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1760 && XVECLEN (dst, 0) > 1)
1761 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1762 else if (CONSTANT_P (src))
1764 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1772 gcc_assert (2 * len == ssize);
1773 split_double (src, &first, &second);
1780 else if (REG_P (src) && GET_MODE (src) == mode)
1783 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1784 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1788 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1789 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1793 /* Emit code to move a block SRC of type TYPE to a block DST,
1794 where DST is non-consecutive registers represented by a PARALLEL.
1795 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1799 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1804 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1805 emit_group_load_1 (tmps, dst, src, type, ssize);
1807 /* Copy the extracted pieces into the proper (probable) hard regs. */
1808 for (i = 0; i < XVECLEN (dst, 0); i++)
1810 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1813 emit_move_insn (d, tmps[i]);
1817 /* Similar, but load SRC into new pseudos in a format that looks like
1818 PARALLEL. This can later be fed to emit_group_move to get things
1819 in the right place. */
1822 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1827 vec = rtvec_alloc (XVECLEN (parallel, 0));
1828 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1830 /* Convert the vector to look just like the original PARALLEL, except
1831 with the computed values. */
1832 for (i = 0; i < XVECLEN (parallel, 0); i++)
1834 rtx e = XVECEXP (parallel, 0, i);
1835 rtx d = XEXP (e, 0);
1839 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1840 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1842 RTVEC_ELT (vec, i) = e;
1845 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1848 /* Emit code to move a block SRC to block DST, where SRC and DST are
1849 non-consecutive groups of registers, each represented by a PARALLEL. */
1852 emit_group_move (rtx dst, rtx src)
1856 gcc_assert (GET_CODE (src) == PARALLEL
1857 && GET_CODE (dst) == PARALLEL
1858 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1860 /* Skip first entry if NULL. */
1861 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1862 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1863 XEXP (XVECEXP (src, 0, i), 0));
1866 /* Move a group of registers represented by a PARALLEL into pseudos. */
1869 emit_group_move_into_temps (rtx src)
1871 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1874 for (i = 0; i < XVECLEN (src, 0); i++)
1876 rtx e = XVECEXP (src, 0, i);
1877 rtx d = XEXP (e, 0);
1880 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1881 RTVEC_ELT (vec, i) = e;
1884 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1887 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1888 where SRC is non-consecutive registers represented by a PARALLEL.
1889 SSIZE represents the total size of block ORIG_DST, or -1 if not
1893 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1896 int start, finish, i;
1897 enum machine_mode m = GET_MODE (orig_dst);
1899 gcc_assert (GET_CODE (src) == PARALLEL);
1901 if (!SCALAR_INT_MODE_P (m)
1902 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1904 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1905 if (imode == BLKmode)
1906 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1908 dst = gen_reg_rtx (imode);
1909 emit_group_store (dst, src, type, ssize);
1910 if (imode != BLKmode)
1911 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1912 emit_move_insn (orig_dst, dst);
1916 /* Check for a NULL entry, used to indicate that the parameter goes
1917 both on the stack and in registers. */
1918 if (XEXP (XVECEXP (src, 0, 0), 0))
1922 finish = XVECLEN (src, 0);
1924 tmps = XALLOCAVEC (rtx, finish);
1926 /* Copy the (probable) hard regs into pseudos. */
1927 for (i = start; i < finish; i++)
1929 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1930 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1932 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1933 emit_move_insn (tmps[i], reg);
1939 /* If we won't be storing directly into memory, protect the real destination
1940 from strange tricks we might play. */
1942 if (GET_CODE (dst) == PARALLEL)
1946 /* We can get a PARALLEL dst if there is a conditional expression in
1947 a return statement. In that case, the dst and src are the same,
1948 so no action is necessary. */
1949 if (rtx_equal_p (dst, src))
1952 /* It is unclear if we can ever reach here, but we may as well handle
1953 it. Allocate a temporary, and split this into a store/load to/from
1956 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1957 emit_group_store (temp, src, type, ssize);
1958 emit_group_load (dst, temp, type, ssize);
1961 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1963 enum machine_mode outer = GET_MODE (dst);
1964 enum machine_mode inner;
1965 HOST_WIDE_INT bytepos;
1969 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1970 dst = gen_reg_rtx (outer);
1972 /* Make life a bit easier for combine. */
1973 /* If the first element of the vector is the low part
1974 of the destination mode, use a paradoxical subreg to
1975 initialize the destination. */
1978 inner = GET_MODE (tmps[start]);
1979 bytepos = subreg_lowpart_offset (inner, outer);
1980 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1982 temp = simplify_gen_subreg (outer, tmps[start],
1986 emit_move_insn (dst, temp);
1993 /* If the first element wasn't the low part, try the last. */
1995 && start < finish - 1)
1997 inner = GET_MODE (tmps[finish - 1]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2005 emit_move_insn (dst, temp);
2012 /* Otherwise, simply initialize the result to zero. */
2014 emit_move_insn (dst, CONST0_RTX (outer));
2017 /* Process the pieces. */
2018 for (i = start; i < finish; i++)
2020 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2021 enum machine_mode mode = GET_MODE (tmps[i]);
2022 unsigned int bytelen = GET_MODE_SIZE (mode);
2023 unsigned int adj_bytelen = bytelen;
2026 /* Handle trailing fragments that run over the size of the struct. */
2027 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2028 adj_bytelen = ssize - bytepos;
2030 if (GET_CODE (dst) == CONCAT)
2032 if (bytepos + adj_bytelen
2033 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2034 dest = XEXP (dst, 0);
2035 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2037 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2038 dest = XEXP (dst, 1);
2042 enum machine_mode dest_mode = GET_MODE (dest);
2043 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2045 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2047 if (GET_MODE_ALIGNMENT (dest_mode)
2048 >= GET_MODE_ALIGNMENT (tmp_mode))
2050 dest = assign_stack_temp (dest_mode,
2051 GET_MODE_SIZE (dest_mode),
2053 emit_move_insn (adjust_address (dest,
2061 dest = assign_stack_temp (tmp_mode,
2062 GET_MODE_SIZE (tmp_mode),
2064 emit_move_insn (dest, tmps[i]);
2065 dst = adjust_address (dest, dest_mode, bytepos);
2071 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2073 /* store_bit_field always takes its value from the lsb.
2074 Move the fragment to the lsb if it's not already there. */
2076 #ifdef BLOCK_REG_PADDING
2077 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2078 == (BYTES_BIG_ENDIAN ? upward : downward)
2084 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2085 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2086 build_int_cst (NULL_TREE, shift),
2089 bytelen = adj_bytelen;
2092 /* Optimize the access just a bit. */
2094 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2095 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2096 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2097 && bytelen == GET_MODE_SIZE (mode))
2098 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2100 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2104 /* Copy from the pseudo into the (probable) hard reg. */
2105 if (orig_dst != dst)
2106 emit_move_insn (orig_dst, dst);
2109 /* Generate code to copy a BLKmode object of TYPE out of a
2110 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2111 is null, a stack temporary is created. TGTBLK is returned.
2113 The purpose of this routine is to handle functions that return
2114 BLKmode structures in registers. Some machines (the PA for example)
2115 want to return all small structures in registers regardless of the
2116 structure's alignment. */
2119 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2121 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2122 rtx src = NULL, dst = NULL;
2123 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2124 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2125 enum machine_mode copy_mode;
2129 tgtblk = assign_temp (build_qualified_type (type,
2131 | TYPE_QUAL_CONST)),
2133 preserve_temp_slots (tgtblk);
2136 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2137 into a new pseudo which is a full word. */
2139 if (GET_MODE (srcreg) != BLKmode
2140 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2141 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2143 /* If the structure doesn't take up a whole number of words, see whether
2144 SRCREG is padded on the left or on the right. If it's on the left,
2145 set PADDING_CORRECTION to the number of bits to skip.
2147 In most ABIs, the structure will be returned at the least end of
2148 the register, which translates to right padding on little-endian
2149 targets and left padding on big-endian targets. The opposite
2150 holds if the structure is returned at the most significant
2151 end of the register. */
2152 if (bytes % UNITS_PER_WORD != 0
2153 && (targetm.calls.return_in_msb (type)
2155 : BYTES_BIG_ENDIAN))
2157 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2159 /* Copy the structure BITSIZE bits at a time. If the target lives in
2160 memory, take care of not reading/writing past its end by selecting
2161 a copy mode suited to BITSIZE. This should always be possible given
2164 We could probably emit more efficient code for machines which do not use
2165 strict alignment, but it doesn't seem worth the effort at the current
2168 copy_mode = word_mode;
2171 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2172 if (mem_mode != BLKmode)
2173 copy_mode = mem_mode;
2176 for (bitpos = 0, xbitpos = padding_correction;
2177 bitpos < bytes * BITS_PER_UNIT;
2178 bitpos += bitsize, xbitpos += bitsize)
2180 /* We need a new source operand each time xbitpos is on a
2181 word boundary and when xbitpos == padding_correction
2182 (the first time through). */
2183 if (xbitpos % BITS_PER_WORD == 0
2184 || xbitpos == padding_correction)
2185 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2188 /* We need a new destination operand each time bitpos is on
2190 if (bitpos % BITS_PER_WORD == 0)
2191 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2193 /* Use xbitpos for the source extraction (right justified) and
2194 bitpos for the destination store (left justified). */
2195 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2196 extract_bit_field (src, bitsize,
2197 xbitpos % BITS_PER_WORD, 1,
2198 NULL_RTX, copy_mode, copy_mode));
2204 /* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
2208 use_reg (rtx *call_fusage, rtx reg)
2210 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2213 = gen_rtx_EXPR_LIST (VOIDmode,
2214 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2217 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2218 starting at REGNO. All of these registers must be hard registers. */
2221 use_regs (rtx *call_fusage, int regno, int nregs)
2225 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2227 for (i = 0; i < nregs; i++)
2228 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2231 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2232 PARALLEL REGS. This is for calls that pass values in multiple
2233 non-contiguous locations. The Irix 6 ABI has examples of this. */
2236 use_group_regs (rtx *call_fusage, rtx regs)
2240 for (i = 0; i < XVECLEN (regs, 0); i++)
2242 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2244 /* A NULL entry means the parameter goes both on the stack and in
2245 registers. This can also be a MEM for targets that pass values
2246 partially on the stack and partially in registers. */
2247 if (reg != 0 && REG_P (reg))
2248 use_reg (call_fusage, reg);
2253 /* Determine whether the LEN bytes generated by CONSTFUN can be
2254 stored to memory using several move instructions. CONSTFUNDATA is
2255 a pointer which will be passed as argument in every CONSTFUN call.
2256 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2257 a memset operation and false if it's a copy of a constant string.
2258 Return nonzero if a call to store_by_pieces should succeed. */
2261 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2262 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2263 void *constfundata, unsigned int align, bool memsetp)
2265 unsigned HOST_WIDE_INT l;
2266 unsigned int max_size;
2267 HOST_WIDE_INT offset = 0;
2268 enum machine_mode mode, tmode;
2269 enum insn_code icode;
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2281 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2282 if (align >= GET_MODE_ALIGNMENT (tmode))
2283 align = GET_MODE_ALIGNMENT (tmode);
2286 enum machine_mode xmode;
2288 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2290 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2291 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2292 || SLOW_UNALIGNED_ACCESS (tmode, align))
2295 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2298 /* We would first store what we can in the largest integer mode, then go to
2299 successively smaller modes. */
2302 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2307 max_size = STORE_MAX_PIECES + 1;
2308 while (max_size > 1)
2310 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2311 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2312 if (GET_MODE_SIZE (tmode) < max_size)
2315 if (mode == VOIDmode)
2318 icode = optab_handler (mov_optab, mode)->insn_code;
2319 if (icode != CODE_FOR_nothing
2320 && align >= GET_MODE_ALIGNMENT (mode))
2322 unsigned int size = GET_MODE_SIZE (mode);
2329 cst = (*constfun) (constfundata, offset, mode);
2330 if (!LEGITIMATE_CONSTANT_P (cst))
2340 max_size = GET_MODE_SIZE (mode);
2343 /* The code above should have handled everything. */
2350 /* Generate several move instructions to store LEN bytes generated by
2351 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2352 pointer which will be passed as argument in every CONSTFUN call.
2353 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2354 a memset operation and false if it's a copy of a constant string.
2355 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2356 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2360 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2361 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2362 void *constfundata, unsigned int align, bool memsetp, int endp)
2364 struct store_by_pieces data;
2368 gcc_assert (endp != 2);
2373 ? SET_BY_PIECES_P (len, align)
2374 : STORE_BY_PIECES_P (len, align));
2375 data.constfun = constfun;
2376 data.constfundata = constfundata;
2379 store_by_pieces_1 (&data, align);
2384 gcc_assert (!data.reverse);
2389 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2390 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2392 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2395 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2402 to1 = adjust_address (data.to, QImode, data.offset);
2410 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2411 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2414 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2416 struct store_by_pieces data;
2421 data.constfun = clear_by_pieces_1;
2422 data.constfundata = NULL;
2425 store_by_pieces_1 (&data, align);
2428 /* Callback routine for clear_by_pieces.
2429 Return const0_rtx unconditionally. */
2432 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2433 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2434 enum machine_mode mode ATTRIBUTE_UNUSED)
2439 /* Subroutine of clear_by_pieces and store_by_pieces.
2440 Generate several move instructions to store LEN bytes of block TO. (A MEM
2441 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2444 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2445 unsigned int align ATTRIBUTE_UNUSED)
2447 rtx to_addr = XEXP (data->to, 0);
2448 unsigned int max_size = STORE_MAX_PIECES + 1;
2449 enum machine_mode mode = VOIDmode, tmode;
2450 enum insn_code icode;
2453 data->to_addr = to_addr;
2455 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2456 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2458 data->explicit_inc_to = 0;
2460 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2462 data->offset = data->len;
2464 /* If storing requires more than two move insns,
2465 copy addresses to registers (to make displacements shorter)
2466 and use post-increment if available. */
2467 if (!data->autinc_to
2468 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2470 /* Determine the main mode we'll be using. */
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2476 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2478 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2479 data->autinc_to = 1;
2480 data->explicit_inc_to = -1;
2483 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2484 && ! data->autinc_to)
2486 data->to_addr = copy_addr_to_reg (to_addr);
2487 data->autinc_to = 1;
2488 data->explicit_inc_to = 1;
2491 if ( !data->autinc_to && CONSTANT_P (to_addr))
2492 data->to_addr = copy_addr_to_reg (to_addr);
2495 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2496 if (align >= GET_MODE_ALIGNMENT (tmode))
2497 align = GET_MODE_ALIGNMENT (tmode);
2500 enum machine_mode xmode;
2502 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2504 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2505 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2506 || SLOW_UNALIGNED_ACCESS (tmode, align))
2509 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2512 /* First store what we can in the largest integer mode, then go to
2513 successively smaller modes. */
2515 while (max_size > 1)
2517 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2518 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2519 if (GET_MODE_SIZE (tmode) < max_size)
2522 if (mode == VOIDmode)
2525 icode = optab_handler (mov_optab, mode)->insn_code;
2526 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2527 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2529 max_size = GET_MODE_SIZE (mode);
2532 /* The code above should have handled everything. */
2533 gcc_assert (!data->len);
2536 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2537 with move instructions for mode MODE. GENFUN is the gen_... function
2538 to make a move insn for that mode. DATA has all the other info. */
2541 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2542 struct store_by_pieces *data)
2544 unsigned int size = GET_MODE_SIZE (mode);
2547 while (data->len >= size)
2550 data->offset -= size;
2552 if (data->autinc_to)
2553 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2556 to1 = adjust_address (data->to, mode, data->offset);
2558 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2559 emit_insn (gen_add2_insn (data->to_addr,
2560 GEN_INT (-(HOST_WIDE_INT) size)));
2562 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2563 emit_insn ((*genfun) (to1, cst));
2565 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2566 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2568 if (! data->reverse)
2569 data->offset += size;
2575 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2576 its length in bytes. */
2579 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2580 unsigned int expected_align, HOST_WIDE_INT expected_size)
2582 enum machine_mode mode = GET_MODE (object);
2585 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2587 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2588 just move a zero. Otherwise, do this a piece at a time. */
2590 && GET_CODE (size) == CONST_INT
2591 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2593 rtx zero = CONST0_RTX (mode);
2596 emit_move_insn (object, zero);
2600 if (COMPLEX_MODE_P (mode))
2602 zero = CONST0_RTX (GET_MODE_INNER (mode));
2605 write_complex_part (object, zero, 0);
2606 write_complex_part (object, zero, 1);
2612 if (size == const0_rtx)
2615 align = MEM_ALIGN (object);
2617 if (GET_CODE (size) == CONST_INT
2618 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2619 clear_by_pieces (object, INTVAL (size), align);
2620 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2621 expected_align, expected_size))
2624 return set_storage_via_libcall (object, size, const0_rtx,
2625 method == BLOCK_OP_TAILCALL);
2631 clear_storage (rtx object, rtx size, enum block_op_methods method)
2633 return clear_storage_hints (object, size, method, 0, -1);
2637 /* A subroutine of clear_storage. Expand a call to memset.
2638 Return the return value of memset, 0 otherwise. */
2641 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2643 tree call_expr, fn, object_tree, size_tree, val_tree;
2644 enum machine_mode size_mode;
2647 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2648 place those into new pseudos into a VAR_DECL and use them later. */
2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652 size_mode = TYPE_MODE (sizetype);
2653 size = convert_to_mode (size_mode, size, 1);
2654 size = copy_to_mode_reg (size_mode, size);
2656 /* It is incorrect to use the libcall calling conventions to call
2657 memset in this context. This could be a user call to memset and
2658 the user may wish to examine the return value from memset. For
2659 targets where libcalls and normal calls have different conventions
2660 for returning pointers, we could end up generating incorrect code. */
2662 object_tree = make_tree (ptr_type_node, object);
2663 if (GET_CODE (val) != CONST_INT)
2664 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2665 size_tree = make_tree (sizetype, size);
2666 val_tree = make_tree (integer_type_node, val);
2668 fn = clear_storage_libcall_fn (true);
2669 call_expr = build_call_expr (fn, 3,
2670 object_tree, integer_zero_node, size_tree);
2671 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2673 retval = expand_normal (call_expr);
2678 /* A subroutine of set_storage_via_libcall. Create the tree node
2679 for the function we use for block clears. The first time FOR_CALL
2680 is true, we call assemble_external. */
2682 tree block_clear_fn;
2685 init_block_clear_fn (const char *asmspec)
2687 if (!block_clear_fn)
2691 fn = get_identifier ("memset");
2692 args = build_function_type_list (ptr_type_node, ptr_type_node,
2693 integer_type_node, sizetype,
2696 fn = build_decl (FUNCTION_DECL, fn, args);
2697 DECL_EXTERNAL (fn) = 1;
2698 TREE_PUBLIC (fn) = 1;
2699 DECL_ARTIFICIAL (fn) = 1;
2700 TREE_NOTHROW (fn) = 1;
2701 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2702 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2704 block_clear_fn = fn;
2708 set_user_assembler_name (block_clear_fn, asmspec);
2712 clear_storage_libcall_fn (int for_call)
2714 static bool emitted_extern;
2716 if (!block_clear_fn)
2717 init_block_clear_fn (NULL);
2719 if (for_call && !emitted_extern)
2721 emitted_extern = true;
2722 make_decl_rtl (block_clear_fn);
2723 assemble_external (block_clear_fn);
2726 return block_clear_fn;
2729 /* Expand a setmem pattern; return true if successful. */
2732 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2733 unsigned int expected_align, HOST_WIDE_INT expected_size)
2735 /* Try the most limited insn first, because there's no point
2736 including more than one in the machine description unless
2737 the more limited one has some advantage. */
2739 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2740 enum machine_mode mode;
2742 if (expected_align < align)
2743 expected_align = align;
2745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2746 mode = GET_MODE_WIDER_MODE (mode))
2748 enum insn_code code = setmem_optab[(int) mode];
2749 insn_operand_predicate_fn pred;
2751 if (code != CODE_FOR_nothing
2752 /* We don't need MODE to be narrower than
2753 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2754 the mode mask, as it is returned by the macro, it will
2755 definitely be less than the actual mode mask. */
2756 && ((GET_CODE (size) == CONST_INT
2757 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2758 <= (GET_MODE_MASK (mode) >> 1)))
2759 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2760 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2761 || (*pred) (object, BLKmode))
2762 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2763 || (*pred) (opalign, VOIDmode)))
2766 enum machine_mode char_mode;
2767 rtx last = get_last_insn ();
2770 opsize = convert_to_mode (mode, size, 1);
2771 pred = insn_data[(int) code].operand[1].predicate;
2772 if (pred != 0 && ! (*pred) (opsize, mode))
2773 opsize = copy_to_mode_reg (mode, opsize);
2776 char_mode = insn_data[(int) code].operand[2].mode;
2777 if (char_mode != VOIDmode)
2779 opchar = convert_to_mode (char_mode, opchar, 1);
2780 pred = insn_data[(int) code].operand[2].predicate;
2781 if (pred != 0 && ! (*pred) (opchar, char_mode))
2782 opchar = copy_to_mode_reg (char_mode, opchar);
2785 if (insn_data[(int) code].n_operands == 4)
2786 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2788 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2789 GEN_INT (expected_align
2791 GEN_INT (expected_size));
2798 delete_insns_since (last);
2806 /* Write to one of the components of the complex value CPLX. Write VAL to
2807 the real part if IMAG_P is false, and the imaginary part if its true. */
2810 write_complex_part (rtx cplx, rtx val, bool imag_p)
2812 enum machine_mode cmode;
2813 enum machine_mode imode;
2816 if (GET_CODE (cplx) == CONCAT)
2818 emit_move_insn (XEXP (cplx, imag_p), val);
2822 cmode = GET_MODE (cplx);
2823 imode = GET_MODE_INNER (cmode);
2824 ibitsize = GET_MODE_BITSIZE (imode);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2833 emit_move_insn (adjust_address_nv (cplx, imode,
2834 imag_p ? GET_MODE_SIZE (imode) : 0),
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since store_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2852 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2856 emit_move_insn (part, val);
2860 /* simplify_gen_subreg may fail for sub-word MEMs. */
2861 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2864 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2867 /* Extract one of the components of the complex value CPLX. Extract the
2868 real part if IMAG_P is false, and the imaginary part if it's true. */
2871 read_complex_part (rtx cplx, bool imag_p)
2873 enum machine_mode cmode, imode;
2876 if (GET_CODE (cplx) == CONCAT)
2877 return XEXP (cplx, imag_p);
2879 cmode = GET_MODE (cplx);
2880 imode = GET_MODE_INNER (cmode);
2881 ibitsize = GET_MODE_BITSIZE (imode);
2883 /* Special case reads from complex constants that got spilled to memory. */
2884 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2886 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2887 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2889 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2890 if (CONSTANT_CLASS_P (part))
2891 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2895 /* For MEMs simplify_gen_subreg may generate an invalid new address
2896 because, e.g., the original address is considered mode-dependent
2897 by the target, which restricts simplify_subreg from invoking
2898 adjust_address_nv. Instead of preparing fallback support for an
2899 invalid address, we call adjust_address_nv directly. */
2901 return adjust_address_nv (cplx, imode,
2902 imag_p ? GET_MODE_SIZE (imode) : 0);
2904 /* If the sub-object is at least word sized, then we know that subregging
2905 will work. This special case is important, since extract_bit_field
2906 wants to operate on integer modes, and there's rarely an OImode to
2907 correspond to TCmode. */
2908 if (ibitsize >= BITS_PER_WORD
2909 /* For hard regs we have exact predicates. Assume we can split
2910 the original object if it spans an even number of hard regs.
2911 This special case is important for SCmode on 64-bit platforms
2912 where the natural size of floating-point regs is 32-bit. */
2914 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2915 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2917 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2918 imag_p ? GET_MODE_SIZE (imode) : 0);
2922 /* simplify_gen_subreg may fail for sub-word MEMs. */
2923 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2926 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2927 true, NULL_RTX, imode, imode);
2930 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2931 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2932 represented in NEW_MODE. If FORCE is true, this will never happen, as
2933 we'll force-create a SUBREG if needed. */
2936 emit_move_change_mode (enum machine_mode new_mode,
2937 enum machine_mode old_mode, rtx x, bool force)
2941 if (push_operand (x, GET_MODE (x)))
2943 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2944 MEM_COPY_ATTRIBUTES (ret, x);
2948 /* We don't have to worry about changing the address since the
2949 size in bytes is supposed to be the same. */
2950 if (reload_in_progress)
2952 /* Copy the MEM to change the mode and move any
2953 substitutions from the old MEM to the new one. */
2954 ret = adjust_address_nv (x, new_mode, 0);
2955 copy_replacements (x, ret);
2958 ret = adjust_address (x, new_mode, 0);
2962 /* Note that we do want simplify_subreg's behavior of validating
2963 that the new mode is ok for a hard register. If we were to use
2964 simplify_gen_subreg, we would create the subreg, but would
2965 probably run into the target not being able to implement it. */
2966 /* Except, of course, when FORCE is true, when this is exactly what
2967 we want. Which is needed for CCmodes on some targets. */
2969 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2971 ret = simplify_subreg (new_mode, x, old_mode, 0);
2977 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2978 an integer mode of the same size as MODE. Returns the instruction
2979 emitted, or NULL if such a move could not be generated. */
2982 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2984 enum machine_mode imode;
2985 enum insn_code code;
2987 /* There must exist a mode of the exact size we require. */
2988 imode = int_mode_for_mode (mode);
2989 if (imode == BLKmode)
2992 /* The target must support moves in this mode. */
2993 code = optab_handler (mov_optab, imode)->insn_code;
2994 if (code == CODE_FOR_nothing)
2997 x = emit_move_change_mode (imode, mode, x, force);
3000 y = emit_move_change_mode (imode, mode, y, force);
3003 return emit_insn (GEN_FCN (code) (x, y));
3006 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3007 Return an equivalent MEM that does not use an auto-increment. */
3010 emit_move_resolve_push (enum machine_mode mode, rtx x)
3012 enum rtx_code code = GET_CODE (XEXP (x, 0));
3013 HOST_WIDE_INT adjust;
3016 adjust = GET_MODE_SIZE (mode);
3017 #ifdef PUSH_ROUNDING
3018 adjust = PUSH_ROUNDING (adjust);
3020 if (code == PRE_DEC || code == POST_DEC)
3022 else if (code == PRE_MODIFY || code == POST_MODIFY)
3024 rtx expr = XEXP (XEXP (x, 0), 1);
3027 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3028 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3029 val = INTVAL (XEXP (expr, 1));
3030 if (GET_CODE (expr) == MINUS)
3032 gcc_assert (adjust == val || adjust == -val);
3036 /* Do not use anti_adjust_stack, since we don't want to update
3037 stack_pointer_delta. */
3038 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3039 GEN_INT (adjust), stack_pointer_rtx,
3040 0, OPTAB_LIB_WIDEN);
3041 if (temp != stack_pointer_rtx)
3042 emit_move_insn (stack_pointer_rtx, temp);
3049 temp = stack_pointer_rtx;
3054 temp = plus_constant (stack_pointer_rtx, -adjust);
3060 return replace_equiv_address (x, temp);
3063 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3064 X is known to satisfy push_operand, and MODE is known to be complex.
3065 Returns the last instruction emitted. */
3068 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3070 enum machine_mode submode = GET_MODE_INNER (mode);
3073 #ifdef PUSH_ROUNDING
3074 unsigned int submodesize = GET_MODE_SIZE (submode);
3076 /* In case we output to the stack, but the size is smaller than the
3077 machine can push exactly, we need to use move instructions. */
3078 if (PUSH_ROUNDING (submodesize) != submodesize)
3080 x = emit_move_resolve_push (mode, x);
3081 return emit_move_insn (x, y);
3085 /* Note that the real part always precedes the imag part in memory
3086 regardless of machine's endianness. */
3087 switch (GET_CODE (XEXP (x, 0)))
3101 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3102 read_complex_part (y, imag_first));
3103 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3104 read_complex_part (y, !imag_first));
3107 /* A subroutine of emit_move_complex. Perform the move from Y to X
3108 via two moves of the parts. Returns the last instruction emitted. */
3111 emit_move_complex_parts (rtx x, rtx y)
3113 /* Show the output dies here. This is necessary for SUBREGs
3114 of pseudos since we cannot track their lifetimes correctly;
3115 hard regs shouldn't appear here except as return values. */
3116 if (!reload_completed && !reload_in_progress
3117 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3120 write_complex_part (x, read_complex_part (y, false), false);
3121 write_complex_part (x, read_complex_part (y, true), true);
3123 return get_last_insn ();
3126 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3127 MODE is known to be complex. Returns the last instruction emitted. */
3130 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3134 /* Need to take special care for pushes, to maintain proper ordering
3135 of the data, and possibly extra padding. */
3136 if (push_operand (x, mode))
3137 return emit_move_complex_push (mode, x, y);
3139 /* See if we can coerce the target into moving both values at once. */
3141 /* Move floating point as parts. */
3142 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3143 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3145 /* Not possible if the values are inherently not adjacent. */
3146 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3148 /* Is possible if both are registers (or subregs of registers). */
3149 else if (register_operand (x, mode) && register_operand (y, mode))
3151 /* If one of the operands is a memory, and alignment constraints
3152 are friendly enough, we may be able to do combined memory operations.
3153 We do not attempt this if Y is a constant because that combination is
3154 usually better with the by-parts thing below. */
3155 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3156 && (!STRICT_ALIGNMENT
3157 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3166 /* For memory to memory moves, optimal behavior can be had with the
3167 existing block move logic. */
3168 if (MEM_P (x) && MEM_P (y))
3170 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3171 BLOCK_OP_NO_LIBCALL);
3172 return get_last_insn ();
3175 ret = emit_move_via_integer (mode, x, y, true);
3180 return emit_move_complex_parts (x, y);
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3184 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3187 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3191 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3194 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3195 if (code != CODE_FOR_nothing)
3197 x = emit_move_change_mode (CCmode, mode, x, true);
3198 y = emit_move_change_mode (CCmode, mode, y, true);
3199 return emit_insn (GEN_FCN (code) (x, y));
3203 /* Otherwise, find the MODE_INT mode of the same width. */
3204 ret = emit_move_via_integer (mode, x, y, false);
3205 gcc_assert (ret != NULL);
3209 /* Return true if word I of OP lies entirely in the
3210 undefined bits of a paradoxical subreg. */
3213 undefined_operand_subword_p (const_rtx op, int i)
3215 enum machine_mode innermode, innermostmode;
3217 if (GET_CODE (op) != SUBREG)
3219 innermode = GET_MODE (op);
3220 innermostmode = GET_MODE (SUBREG_REG (op));
3221 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3222 /* The SUBREG_BYTE represents offset, as if the value were stored in
3223 memory, except for a paradoxical subreg where we define
3224 SUBREG_BYTE to be 0; undo this exception as in
3226 if (SUBREG_BYTE (op) == 0
3227 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3229 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3230 if (WORDS_BIG_ENDIAN)
3231 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3232 if (BYTES_BIG_ENDIAN)
3233 offset += difference % UNITS_PER_WORD;
3235 if (offset >= GET_MODE_SIZE (innermostmode)
3236 || offset <= -GET_MODE_SIZE (word_mode))
3241 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3242 MODE is any multi-word or full-word mode that lacks a move_insn
3243 pattern. Note that you will get better code if you define such
3244 patterns, even if they must turn into multiple assembler instructions. */
3247 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3254 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3256 /* If X is a push on the stack, do the push now and replace
3257 X with a reference to the stack pointer. */
3258 if (push_operand (x, mode))
3259 x = emit_move_resolve_push (mode, x);
3261 /* If we are in reload, see if either operand is a MEM whose address
3262 is scheduled for replacement. */
3263 if (reload_in_progress && MEM_P (x)
3264 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3265 x = replace_equiv_address_nv (x, inner);
3266 if (reload_in_progress && MEM_P (y)
3267 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3268 y = replace_equiv_address_nv (y, inner);
3272 need_clobber = false;
3274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3277 rtx xpart = operand_subword (x, i, 1, mode);
3280 /* Do not generate code for a move if it would come entirely
3281 from the undefined bits of a paradoxical subreg. */
3282 if (undefined_operand_subword_p (y, i))
3285 ypart = operand_subword (y, i, 1, mode);
3287 /* If we can't get a part of Y, put Y into memory if it is a
3288 constant. Otherwise, force it into a register. Then we must
3289 be able to get a part of Y. */
3290 if (ypart == 0 && CONSTANT_P (y))
3292 y = use_anchored_address (force_const_mem (mode, y));
3293 ypart = operand_subword (y, i, 1, mode);
3295 else if (ypart == 0)
3296 ypart = operand_subword_force (y, i, mode);
3298 gcc_assert (xpart && ypart);
3300 need_clobber |= (GET_CODE (xpart) == SUBREG);
3302 last_insn = emit_move_insn (xpart, ypart);
3308 /* Show the output dies here. This is necessary for SUBREGs
3309 of pseudos since we cannot track their lifetimes correctly;
3310 hard regs shouldn't appear here except as return values.
3311 We never want to emit such a clobber after reload. */
3313 && ! (reload_in_progress || reload_completed)
3314 && need_clobber != 0)
3322 /* Low level part of emit_move_insn.
3323 Called just like emit_move_insn, but assumes X and Y
3324 are basically valid. */
3327 emit_move_insn_1 (rtx x, rtx y)
3329 enum machine_mode mode = GET_MODE (x);
3330 enum insn_code code;
3332 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3334 code = optab_handler (mov_optab, mode)->insn_code;
3335 if (code != CODE_FOR_nothing)
3336 return emit_insn (GEN_FCN (code) (x, y));
3338 /* Expand complex moves by moving real part and imag part. */
3339 if (COMPLEX_MODE_P (mode))
3340 return emit_move_complex (mode, x, y);
3342 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3343 || ALL_FIXED_POINT_MODE_P (mode))
3345 rtx result = emit_move_via_integer (mode, x, y, true);
3347 /* If we can't find an integer mode, use multi words. */
3351 return emit_move_multi_word (mode, x, y);
3354 if (GET_MODE_CLASS (mode) == MODE_CC)
3355 return emit_move_ccmode (mode, x, y);
3357 /* Try using a move pattern for the corresponding integer mode. This is
3358 only safe when simplify_subreg can convert MODE constants into integer
3359 constants. At present, it can only do this reliably if the value
3360 fits within a HOST_WIDE_INT. */
3361 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3363 rtx ret = emit_move_via_integer (mode, x, y, false);
3368 return emit_move_multi_word (mode, x, y);
3371 /* Generate code to copy Y into X.
3372 Both Y and X must have the same mode, except that
3373 Y can be a constant with VOIDmode.
3374 This mode cannot be BLKmode; use emit_block_move for that.
3376 Return the last instruction emitted. */
3379 emit_move_insn (rtx x, rtx y)
3381 enum machine_mode mode = GET_MODE (x);
3382 rtx y_cst = NULL_RTX;
3385 gcc_assert (mode != BLKmode
3386 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3391 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3392 && (last_insn = compress_float_constant (x, y)))
3397 if (!LEGITIMATE_CONSTANT_P (y))
3399 y = force_const_mem (mode, y);
3401 /* If the target's cannot_force_const_mem prevented the spill,
3402 assume that the target's move expanders will also take care
3403 of the non-legitimate constant. */
3407 y = use_anchored_address (y);
3411 /* If X or Y are memory references, verify that their addresses are valid
3414 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3415 && ! push_operand (x, GET_MODE (x))))
3416 x = validize_mem (x);
3419 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3420 y = validize_mem (y);
3422 gcc_assert (mode != BLKmode);
3424 last_insn = emit_move_insn_1 (x, y);
3426 if (y_cst && REG_P (x)
3427 && (set = single_set (last_insn)) != NULL_RTX
3428 && SET_DEST (set) == x
3429 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3430 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3435 /* If Y is representable exactly in a narrower mode, and the target can
3436 perform the extension directly from constant or memory, then emit the
3437 move as an extension. */
3440 compress_float_constant (rtx x, rtx y)
3442 enum machine_mode dstmode = GET_MODE (x);
3443 enum machine_mode orig_srcmode = GET_MODE (y);
3444 enum machine_mode srcmode;
3446 int oldcost, newcost;
3447 bool speed = optimize_insn_for_speed_p ();
3449 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3451 if (LEGITIMATE_CONSTANT_P (y))
3452 oldcost = rtx_cost (y, SET, speed);
3454 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3456 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3457 srcmode != orig_srcmode;
3458 srcmode = GET_MODE_WIDER_MODE (srcmode))
3461 rtx trunc_y, last_insn;
3463 /* Skip if the target can't extend this way. */
3464 ic = can_extend_p (dstmode, srcmode, 0);
3465 if (ic == CODE_FOR_nothing)
3468 /* Skip if the narrowed value isn't exact. */
3469 if (! exact_real_truncate (srcmode, &r))
3472 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3474 if (LEGITIMATE_CONSTANT_P (trunc_y))
3476 /* Skip if the target needs extra instructions to perform
3478 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3480 /* This is valid, but may not be cheaper than the original. */
3481 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3482 if (oldcost < newcost)
3485 else if (float_extend_from_mem[dstmode][srcmode])
3487 trunc_y = force_const_mem (srcmode, trunc_y);
3488 /* This is valid, but may not be cheaper than the original. */
3489 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3490 if (oldcost < newcost)
3492 trunc_y = validize_mem (trunc_y);
3497 /* For CSE's benefit, force the compressed constant pool entry
3498 into a new pseudo. This constant may be used in different modes,
3499 and if not, combine will put things back together for us. */
3500 trunc_y = force_reg (srcmode, trunc_y);
3501 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3502 last_insn = get_last_insn ();
3505 set_unique_reg_note (last_insn, REG_EQUAL, y);
3513 /* Pushing data onto the stack. */
3515 /* Push a block of length SIZE (perhaps variable)
3516 and return an rtx to address the beginning of the block.
3517 The value may be virtual_outgoing_args_rtx.
3519 EXTRA is the number of bytes of padding to push in addition to SIZE.
3520 BELOW nonzero means this padding comes at low addresses;
3521 otherwise, the padding comes at high addresses. */
3524 push_block (rtx size, int extra, int below)
3528 size = convert_modes (Pmode, ptr_mode, size, 1);
3529 if (CONSTANT_P (size))
3530 anti_adjust_stack (plus_constant (size, extra));
3531 else if (REG_P (size) && extra == 0)
3532 anti_adjust_stack (size);
3535 temp = copy_to_mode_reg (Pmode, size);
3537 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3538 temp, 0, OPTAB_LIB_WIDEN);
3539 anti_adjust_stack (temp);
3542 #ifndef STACK_GROWS_DOWNWARD
3548 temp = virtual_outgoing_args_rtx;
3549 if (extra != 0 && below)
3550 temp = plus_constant (temp, extra);
3554 if (GET_CODE (size) == CONST_INT)
3555 temp = plus_constant (virtual_outgoing_args_rtx,
3556 -INTVAL (size) - (below ? 0 : extra));
3557 else if (extra != 0 && !below)
3558 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3559 negate_rtx (Pmode, plus_constant (size, extra)));
3561 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3562 negate_rtx (Pmode, size));
3565 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3568 #ifdef PUSH_ROUNDING
3570 /* Emit single push insn. */
3573 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3576 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3578 enum insn_code icode;
3579 insn_operand_predicate_fn pred;
3581 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3582 /* If there is push pattern, use it. Otherwise try old way of throwing
3583 MEM representing push operation to move expander. */
3584 icode = optab_handler (push_optab, mode)->insn_code;
3585 if (icode != CODE_FOR_nothing)
3587 if (((pred = insn_data[(int) icode].operand[0].predicate)
3588 && !((*pred) (x, mode))))
3589 x = force_reg (mode, x);
3590 emit_insn (GEN_FCN (icode) (x));
3593 if (GET_MODE_SIZE (mode) == rounded_size)
3594 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3595 /* If we are to pad downward, adjust the stack pointer first and
3596 then store X into the stack location using an offset. This is
3597 because emit_move_insn does not know how to pad; it does not have
3599 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3601 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3602 HOST_WIDE_INT offset;
3604 emit_move_insn (stack_pointer_rtx,
3605 expand_binop (Pmode,
3606 #ifdef STACK_GROWS_DOWNWARD
3612 GEN_INT (rounded_size),
3613 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3615 offset = (HOST_WIDE_INT) padding_size;
3616 #ifdef STACK_GROWS_DOWNWARD
3617 if (STACK_PUSH_CODE == POST_DEC)
3618 /* We have already decremented the stack pointer, so get the
3620 offset += (HOST_WIDE_INT) rounded_size;
3622 if (STACK_PUSH_CODE == POST_INC)
3623 /* We have already incremented the stack pointer, so get the
3625 offset -= (HOST_WIDE_INT) rounded_size;
3627 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3631 #ifdef STACK_GROWS_DOWNWARD
3632 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3633 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3634 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3636 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3637 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3638 GEN_INT (rounded_size));
3640 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3643 dest = gen_rtx_MEM (mode, dest_addr);
3647 set_mem_attributes (dest, type, 1);
3649 if (flag_optimize_sibling_calls)
3650 /* Function incoming arguments may overlap with sibling call
3651 outgoing arguments and we cannot allow reordering of reads
3652 from function arguments with stores to outgoing arguments
3653 of sibling calls. */
3654 set_mem_alias_set (dest, 0);
3656 emit_move_insn (dest, x);
3660 /* Generate code to push X onto the stack, assuming it has mode MODE and
3662 MODE is redundant except when X is a CONST_INT (since they don't
3664 SIZE is an rtx for the size of data to be copied (in bytes),
3665 needed only if X is BLKmode.
3667 ALIGN (in bits) is maximum alignment we can assume.
3669 If PARTIAL and REG are both nonzero, then copy that many of the first
3670 bytes of X into registers starting with REG, and push the rest of X.
3671 The amount of space pushed is decreased by PARTIAL bytes.
3672 REG must be a hard register in this case.
3673 If REG is zero but PARTIAL is not, take any all others actions for an
3674 argument partially in registers, but do not actually load any
3677 EXTRA is the amount in bytes of extra space to leave next to this arg.
3678 This is ignored if an argument block has already been allocated.
3680 On a machine that lacks real push insns, ARGS_ADDR is the address of
3681 the bottom of the argument block for this call. We use indexing off there
3682 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3683 argument block has not been preallocated.
3685 ARGS_SO_FAR is the size of args previously pushed for this call.
3687 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3688 for arguments passed in registers. If nonzero, it will be the number
3689 of bytes required. */
3692 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3693 unsigned int align, int partial, rtx reg, int extra,
3694 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3698 enum direction stack_direction
3699 #ifdef STACK_GROWS_DOWNWARD
3705 /* Decide where to pad the argument: `downward' for below,
3706 `upward' for above, or `none' for don't pad it.
3707 Default is below for small data on big-endian machines; else above. */
3708 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3710 /* Invert direction if stack is post-decrement.
3712 if (STACK_PUSH_CODE == POST_DEC)
3713 if (where_pad != none)
3714 where_pad = (where_pad == downward ? upward : downward);
3719 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3721 /* Copy a block into the stack, entirely or partially. */
3728 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3729 used = partial - offset;
3731 if (mode != BLKmode)
3733 /* A value is to be stored in an insufficiently aligned
3734 stack slot; copy via a suitably aligned slot if
3736 size = GEN_INT (GET_MODE_SIZE (mode));
3737 if (!MEM_P (xinner))
3739 temp = assign_temp (type, 0, 1, 1);
3740 emit_move_insn (temp, xinner);
3747 /* USED is now the # of bytes we need not copy to the stack
3748 because registers will take care of them. */
3751 xinner = adjust_address (xinner, BLKmode, used);
3753 /* If the partial register-part of the arg counts in its stack size,
3754 skip the part of stack space corresponding to the registers.
3755 Otherwise, start copying to the beginning of the stack space,
3756 by setting SKIP to 0. */
3757 skip = (reg_parm_stack_space == 0) ? 0 : used;
3759 #ifdef PUSH_ROUNDING
3760 /* Do it with several push insns if that doesn't take lots of insns
3761 and if there is no difficulty with push insns that skip bytes
3762 on the stack for alignment purposes. */
3765 && GET_CODE (size) == CONST_INT
3767 && MEM_ALIGN (xinner) >= align
3768 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3769 /* Here we avoid the case of a structure whose weak alignment
3770 forces many pushes of a small amount of data,
3771 and such small pushes do rounding that causes trouble. */
3772 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3773 || align >= BIGGEST_ALIGNMENT
3774 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3775 == (align / BITS_PER_UNIT)))
3776 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3778 /* Push padding now if padding above and stack grows down,
3779 or if padding below and stack grows up.
3780 But if space already allocated, this has already been done. */
3781 if (extra && args_addr == 0
3782 && where_pad != none && where_pad != stack_direction)
3783 anti_adjust_stack (GEN_INT (extra));
3785 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3788 #endif /* PUSH_ROUNDING */
3792 /* Otherwise make space on the stack and copy the data
3793 to the address of that space. */
3795 /* Deduct words put into registers from the size we must copy. */
3798 if (GET_CODE (size) == CONST_INT)
3799 size = GEN_INT (INTVAL (size) - used);
3801 size = expand_binop (GET_MODE (size), sub_optab, size,
3802 GEN_INT (used), NULL_RTX, 0,
3806 /* Get the address of the stack space.
3807 In this case, we do not deal with EXTRA separately.
3808 A single stack adjust will do. */
3811 temp = push_block (size, extra, where_pad == downward);
3814 else if (GET_CODE (args_so_far) == CONST_INT)
3815 temp = memory_address (BLKmode,
3816 plus_constant (args_addr,
3817 skip + INTVAL (args_so_far)));
3819 temp = memory_address (BLKmode,
3820 plus_constant (gen_rtx_PLUS (Pmode,
3825 if (!ACCUMULATE_OUTGOING_ARGS)
3827 /* If the source is referenced relative to the stack pointer,
3828 copy it to another register to stabilize it. We do not need
3829 to do this if we know that we won't be changing sp. */
3831 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3832 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3833 temp = copy_to_reg (temp);
3836 target = gen_rtx_MEM (BLKmode, temp);
3838 /* We do *not* set_mem_attributes here, because incoming arguments
3839 may overlap with sibling call outgoing arguments and we cannot
3840 allow reordering of reads from function arguments with stores
3841 to outgoing arguments of sibling calls. We do, however, want
3842 to record the alignment of the stack slot. */
3843 /* ALIGN may well be better aligned than TYPE, e.g. due to
3844 PARM_BOUNDARY. Assume the caller isn't lying. */
3845 set_mem_align (target, align);
3847 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3850 else if (partial > 0)
3852 /* Scalar partly in registers. */
3854 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3857 /* # bytes of start of argument
3858 that we must make space for but need not store. */
3859 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3860 int args_offset = INTVAL (args_so_far);
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3870 /* If we make space by pushing it, we might as well push
3871 the real data. Otherwise, we can leave OFFSET nonzero
3872 and leave the space uninitialized. */
3876 /* Now NOT_STACK gets the number of words that we don't need to
3877 allocate on the stack. Convert OFFSET to words too. */
3878 not_stack = (partial - offset) / UNITS_PER_WORD;
3879 offset /= UNITS_PER_WORD;
3881 /* If the partial register-part of the arg counts in its stack size,
3882 skip the part of stack space corresponding to the registers.
3883 Otherwise, start copying to the beginning of the stack space,
3884 by setting SKIP to 0. */
3885 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3887 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3888 x = validize_mem (force_const_mem (mode, x));
3890 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3891 SUBREGs of such registers are not allowed. */
3892 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3893 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3894 x = copy_to_reg (x);
3896 /* Loop over all the words allocated on the stack for this arg. */
3897 /* We can do it by words, because any scalar bigger than a word
3898 has a size a multiple of a word. */
3899 #ifndef PUSH_ARGS_REVERSED
3900 for (i = not_stack; i < size; i++)
3902 for (i = size - 1; i >= not_stack; i--)
3904 if (i >= not_stack + offset)
3905 emit_push_insn (operand_subword_force (x, i, mode),
3906 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3908 GEN_INT (args_offset + ((i - not_stack + skip)
3910 reg_parm_stack_space, alignment_pad);
3917 /* Push padding now if padding above and stack grows down,
3918 or if padding below and stack grows up.
3919 But if space already allocated, this has already been done. */
3920 if (extra && args_addr == 0
3921 && where_pad != none && where_pad != stack_direction)
3922 anti_adjust_stack (GEN_INT (extra));
3924 #ifdef PUSH_ROUNDING
3925 if (args_addr == 0 && PUSH_ARGS)
3926 emit_single_push_insn (mode, x, type);
3930 if (GET_CODE (args_so_far) == CONST_INT)
3932 = memory_address (mode,
3933 plus_constant (args_addr,
3934 INTVAL (args_so_far)));
3936 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3938 dest = gen_rtx_MEM (mode, addr);
3940 /* We do *not* set_mem_attributes here, because incoming arguments
3941 may overlap with sibling call outgoing arguments and we cannot
3942 allow reordering of reads from function arguments with stores
3943 to outgoing arguments of sibling calls. We do, however, want
3944 to record the alignment of the stack slot. */
3945 /* ALIGN may well be better aligned than TYPE, e.g. due to
3946 PARM_BOUNDARY. Assume the caller isn't lying. */
3947 set_mem_align (dest, align);
3949 emit_move_insn (dest, x);
3953 /* If part should go in registers, copy that part
3954 into the appropriate registers. Do this now, at the end,
3955 since mem-to-mem copies above may do function calls. */
3956 if (partial > 0 && reg != 0)
3958 /* Handle calls that pass values in multiple non-contiguous locations.
3959 The Irix 6 ABI has examples of this. */
3960 if (GET_CODE (reg) == PARALLEL)
3961 emit_group_load (reg, x, type, -1);
3964 gcc_assert (partial % UNITS_PER_WORD == 0);
3965 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3969 if (extra && args_addr == 0 && where_pad == stack_direction)
3970 anti_adjust_stack (GEN_INT (extra));
3972 if (alignment_pad && args_addr == 0)
3973 anti_adjust_stack (alignment_pad);
3976 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3980 get_subtarget (rtx x)
3984 /* Only registers can be subtargets. */
3986 /* Don't use hard regs to avoid extending their life. */
3987 || REGNO (x) < FIRST_PSEUDO_REGISTER
3991 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3992 FIELD is a bitfield. Returns true if the optimization was successful,
3993 and there's nothing else to do. */
3996 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3997 unsigned HOST_WIDE_INT bitpos,
3998 enum machine_mode mode1, rtx str_rtx,
4001 enum machine_mode str_mode = GET_MODE (str_rtx);
4002 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4007 if (mode1 != VOIDmode
4008 || bitsize >= BITS_PER_WORD
4009 || str_bitsize > BITS_PER_WORD
4010 || TREE_SIDE_EFFECTS (to)
4011 || TREE_THIS_VOLATILE (to))
4015 if (!BINARY_CLASS_P (src)
4016 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4019 op0 = TREE_OPERAND (src, 0);
4020 op1 = TREE_OPERAND (src, 1);
4023 if (!operand_equal_p (to, op0, 0))
4026 if (MEM_P (str_rtx))
4028 unsigned HOST_WIDE_INT offset1;
4030 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4031 str_mode = word_mode;
4032 str_mode = get_best_mode (bitsize, bitpos,
4033 MEM_ALIGN (str_rtx), str_mode, 0);
4034 if (str_mode == VOIDmode)
4036 str_bitsize = GET_MODE_BITSIZE (str_mode);
4039 bitpos %= str_bitsize;
4040 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4041 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4043 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4046 /* If the bit field covers the whole REG/MEM, store_field
4047 will likely generate better code. */
4048 if (bitsize >= str_bitsize)
4051 /* We can't handle fields split across multiple entities. */
4052 if (bitpos + bitsize > str_bitsize)
4055 if (BYTES_BIG_ENDIAN)
4056 bitpos = str_bitsize - bitpos - bitsize;
4058 switch (TREE_CODE (src))
4062 /* For now, just optimize the case of the topmost bitfield
4063 where we don't need to do any masking and also
4064 1 bit bitfields where xor can be used.
4065 We might win by one instruction for the other bitfields
4066 too if insv/extv instructions aren't used, so that
4067 can be added later. */
4068 if (bitpos + bitsize != str_bitsize
4069 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4072 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4073 value = convert_modes (str_mode,
4074 TYPE_MODE (TREE_TYPE (op1)), value,
4075 TYPE_UNSIGNED (TREE_TYPE (op1)));
4077 /* We may be accessing data outside the field, which means
4078 we can alias adjacent data. */
4079 if (MEM_P (str_rtx))
4081 str_rtx = shallow_copy_rtx (str_rtx);
4082 set_mem_alias_set (str_rtx, 0);
4083 set_mem_expr (str_rtx, 0);
4086 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4087 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4089 value = expand_and (str_mode, value, const1_rtx, NULL);
4092 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4093 build_int_cst (NULL_TREE, bitpos),
4095 result = expand_binop (str_mode, binop, str_rtx,
4096 value, str_rtx, 1, OPTAB_WIDEN);
4097 if (result != str_rtx)
4098 emit_move_insn (str_rtx, result);
4103 if (TREE_CODE (op1) != INTEGER_CST)
4105 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4106 value = convert_modes (GET_MODE (str_rtx),
4107 TYPE_MODE (TREE_TYPE (op1)), value,
4108 TYPE_UNSIGNED (TREE_TYPE (op1)));
4110 /* We may be accessing data outside the field, which means
4111 we can alias adjacent data. */
4112 if (MEM_P (str_rtx))
4114 str_rtx = shallow_copy_rtx (str_rtx);
4115 set_mem_alias_set (str_rtx, 0);
4116 set_mem_expr (str_rtx, 0);
4119 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4120 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4122 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4124 value = expand_and (GET_MODE (str_rtx), value, mask,
4127 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4128 build_int_cst (NULL_TREE, bitpos),
4130 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4131 value, str_rtx, 1, OPTAB_WIDEN);
4132 if (result != str_rtx)
4133 emit_move_insn (str_rtx, result);
4144 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4145 is true, try generating a nontemporal store. */
4148 expand_assignment (tree to, tree from, bool nontemporal)
4153 /* Don't crash if the lhs of the assignment was erroneous. */
4154 if (TREE_CODE (to) == ERROR_MARK)
4156 result = expand_normal (from);
4160 /* Optimize away no-op moves without side-effects. */
4161 if (operand_equal_p (to, from, 0))
4164 /* Assignment of a structure component needs special treatment
4165 if the structure component's rtx is not simply a MEM.
4166 Assignment of an array element at a constant index, and assignment of
4167 an array element in an unaligned packed structure field, has the same
4169 if (handled_component_p (to)
4170 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4172 enum machine_mode mode1;
4173 HOST_WIDE_INT bitsize, bitpos;
4180 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4181 &unsignedp, &volatilep, true);
4183 /* If we are going to use store_bit_field and extract_bit_field,
4184 make sure to_rtx will be safe for multiple use. */
4186 to_rtx = expand_normal (tem);
4192 if (!MEM_P (to_rtx))
4194 /* We can get constant negative offsets into arrays with broken
4195 user code. Translate this to a trap instead of ICEing. */
4196 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4197 expand_builtin_trap ();
4198 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4201 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4202 #ifdef POINTERS_EXTEND_UNSIGNED
4203 if (GET_MODE (offset_rtx) != Pmode)
4204 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4206 if (GET_MODE (offset_rtx) != ptr_mode)
4207 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4210 /* A constant address in TO_RTX can have VOIDmode, we must not try
4211 to call force_reg for that case. Avoid that case. */
4213 && GET_MODE (to_rtx) == BLKmode
4214 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4216 && (bitpos % bitsize) == 0
4217 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4218 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4220 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4224 to_rtx = offset_address (to_rtx, offset_rtx,
4225 highest_pow2_factor_for_target (to,
4229 /* Handle expand_expr of a complex value returning a CONCAT. */
4230 if (GET_CODE (to_rtx) == CONCAT)
4232 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4234 gcc_assert (bitpos == 0);
4235 result = store_expr (from, to_rtx, false, nontemporal);
4239 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4240 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4248 /* If the field is at offset zero, we could have been given the
4249 DECL_RTX of the parent struct. Don't munge it. */
4250 to_rtx = shallow_copy_rtx (to_rtx);
4252 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4254 /* Deal with volatile and readonly fields. The former is only
4255 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4257 MEM_VOLATILE_P (to_rtx) = 1;
4258 if (component_uses_parent_alias_set (to))
4259 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4262 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4266 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4267 TREE_TYPE (tem), get_alias_set (to),
4272 preserve_temp_slots (result);
4278 /* If the rhs is a function call and its value is not an aggregate,
4279 call the function before we start to compute the lhs.
4280 This is needed for correct code for cases such as
4281 val = setjmp (buf) on machines where reference to val
4282 requires loading up part of an address in a separate insn.
4284 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4285 since it might be a promoted variable where the zero- or sign- extension
4286 needs to be done. Handling this in the normal way is safe because no
4287 computation is done before the call. */
4288 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4289 && COMPLETE_TYPE_P (TREE_TYPE (from))
4290 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4291 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4292 && REG_P (DECL_RTL (to))))
4297 value = expand_normal (from);
4299 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4301 /* Handle calls that return values in multiple non-contiguous locations.
4302 The Irix 6 ABI has examples of this. */
4303 if (GET_CODE (to_rtx) == PARALLEL)
4304 emit_group_load (to_rtx, value, TREE_TYPE (from),
4305 int_size_in_bytes (TREE_TYPE (from)));
4306 else if (GET_MODE (to_rtx) == BLKmode)
4307 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4310 if (POINTER_TYPE_P (TREE_TYPE (to)))
4311 value = convert_memory_address (GET_MODE (to_rtx), value);
4312 emit_move_insn (to_rtx, value);
4314 preserve_temp_slots (to_rtx);
4320 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4321 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4324 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4326 /* Don't move directly into a return register. */
4327 if (TREE_CODE (to) == RESULT_DECL
4328 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4333 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4335 if (GET_CODE (to_rtx) == PARALLEL)
4336 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4337 int_size_in_bytes (TREE_TYPE (from)));
4339 emit_move_insn (to_rtx, temp);
4341 preserve_temp_slots (to_rtx);
4347 /* In case we are returning the contents of an object which overlaps
4348 the place the value is being stored, use a safe function when copying
4349 a value through a pointer into a structure value return block. */
4350 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4351 && cfun->returns_struct
4352 && !cfun->returns_pcc_struct)
4357 size = expr_size (from);
4358 from_rtx = expand_normal (from);
4360 emit_library_call (memmove_libfunc, LCT_NORMAL,
4361 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4362 XEXP (from_rtx, 0), Pmode,
4363 convert_to_mode (TYPE_MODE (sizetype),
4364 size, TYPE_UNSIGNED (sizetype)),
4365 TYPE_MODE (sizetype));
4367 preserve_temp_slots (to_rtx);
4373 /* Compute FROM and store the value in the rtx we got. */
4376 result = store_expr (from, to_rtx, 0, nontemporal);
4377 preserve_temp_slots (result);
4383 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4384 succeeded, false otherwise. */
4387 emit_storent_insn (rtx to, rtx from)
4389 enum machine_mode mode = GET_MODE (to), imode;
4390 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4393 if (code == CODE_FOR_nothing)
4396 imode = insn_data[code].operand[0].mode;
4397 if (!insn_data[code].operand[0].predicate (to, imode))
4400 imode = insn_data[code].operand[1].mode;
4401 if (!insn_data[code].operand[1].predicate (from, imode))
4403 from = copy_to_mode_reg (imode, from);
4404 if (!insn_data[code].operand[1].predicate (from, imode))
4408 pattern = GEN_FCN (code) (to, from);
4409 if (pattern == NULL_RTX)
4412 emit_insn (pattern);
4416 /* Generate code for computing expression EXP,
4417 and storing the value into TARGET.
4419 If the mode is BLKmode then we may return TARGET itself.
4420 It turns out that in BLKmode it doesn't cause a problem.
4421 because C has no operators that could combine two different
4422 assignments into the same BLKmode object with different values
4423 with no sequence point. Will other languages need this to
4426 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4427 stack, and block moves may need to be treated specially.
4429 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4432 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4435 rtx alt_rtl = NULL_RTX;
4436 int dont_return_target = 0;
4438 if (VOID_TYPE_P (TREE_TYPE (exp)))
4440 /* C++ can generate ?: expressions with a throw expression in one
4441 branch and an rvalue in the other. Here, we resolve attempts to
4442 store the throw expression's nonexistent result. */
4443 gcc_assert (!call_param_p);
4444 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4447 if (TREE_CODE (exp) == COMPOUND_EXPR)
4449 /* Perform first part of compound expression, then assign from second
4451 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4452 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4453 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4456 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4458 /* For conditional expression, get safe form of the target. Then
4459 test the condition, doing the appropriate assignment on either
4460 side. This avoids the creation of unnecessary temporaries.
4461 For non-BLKmode, it is more efficient not to do this. */
4463 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4465 do_pending_stack_adjust ();
4467 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4468 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4470 emit_jump_insn (gen_jump (lab2));
4473 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4480 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4481 /* If this is a scalar in a register that is stored in a wider mode
4482 than the declared mode, compute the result into its declared mode
4483 and then convert to the wider mode. Our value is the computed
4486 rtx inner_target = 0;
4488 /* We can do the conversion inside EXP, which will often result
4489 in some optimizations. Do the conversion in two steps: first
4490 change the signedness, if needed, then the extend. But don't
4491 do this if the type of EXP is a subtype of something else
4492 since then the conversion might involve more than just
4493 converting modes. */
4494 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4495 && TREE_TYPE (TREE_TYPE (exp)) == 0
4496 && GET_MODE_PRECISION (GET_MODE (target))
4497 == TYPE_PRECISION (TREE_TYPE (exp)))
4499 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4500 != SUBREG_PROMOTED_UNSIGNED_P (target))
4502 /* Some types, e.g. Fortran's logical*4, won't have a signed
4503 version, so use the mode instead. */
4505 = (signed_or_unsigned_type_for
4506 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4508 ntype = lang_hooks.types.type_for_mode
4509 (TYPE_MODE (TREE_TYPE (exp)),
4510 SUBREG_PROMOTED_UNSIGNED_P (target));
4512 exp = fold_convert (ntype, exp);
4515 exp = fold_convert (lang_hooks.types.type_for_mode
4516 (GET_MODE (SUBREG_REG (target)),
4517 SUBREG_PROMOTED_UNSIGNED_P (target)),
4520 inner_target = SUBREG_REG (target);
4523 temp = expand_expr (exp, inner_target, VOIDmode,
4524 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4526 /* If TEMP is a VOIDmode constant, use convert_modes to make
4527 sure that we properly convert it. */
4528 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4530 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4531 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4532 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4533 GET_MODE (target), temp,
4534 SUBREG_PROMOTED_UNSIGNED_P (target));
4537 convert_move (SUBREG_REG (target), temp,
4538 SUBREG_PROMOTED_UNSIGNED_P (target));
4542 else if (TREE_CODE (exp) == STRING_CST
4543 && !nontemporal && !call_param_p
4544 && TREE_STRING_LENGTH (exp) > 0
4545 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4547 /* Optimize initialization of an array with a STRING_CST. */
4548 HOST_WIDE_INT exp_len, str_copy_len;
4551 exp_len = int_expr_size (exp);
4555 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4556 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4559 str_copy_len = TREE_STRING_LENGTH (exp);
4560 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4562 str_copy_len += STORE_MAX_PIECES - 1;
4563 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4565 str_copy_len = MIN (str_copy_len, exp_len);
4566 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4567 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4568 MEM_ALIGN (target), false))
4573 dest_mem = store_by_pieces (dest_mem,
4574 str_copy_len, builtin_strncpy_read_str,
4575 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4576 MEM_ALIGN (target), false,
4577 exp_len > str_copy_len ? 1 : 0);
4578 if (exp_len > str_copy_len)
4579 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4580 GEN_INT (exp_len - str_copy_len),
4589 /* If we want to use a nontemporal store, force the value to
4591 tmp_target = nontemporal ? NULL_RTX : target;
4592 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4594 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4596 /* Return TARGET if it's a specified hardware register.
4597 If TARGET is a volatile mem ref, either return TARGET
4598 or return a reg copied *from* TARGET; ANSI requires this.
4600 Otherwise, if TEMP is not TARGET, return TEMP
4601 if it is constant (for efficiency),
4602 or if we really want the correct value. */
4603 if (!(target && REG_P (target)
4604 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4605 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4606 && ! rtx_equal_p (temp, target)
4607 && CONSTANT_P (temp))
4608 dont_return_target = 1;
4611 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4612 the same as that of TARGET, adjust the constant. This is needed, for
4613 example, in case it is a CONST_DOUBLE and we want only a word-sized
4615 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4616 && TREE_CODE (exp) != ERROR_MARK
4617 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4618 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4619 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4621 /* If value was not generated in the target, store it there.
4622 Convert the value to TARGET's type first if necessary and emit the
4623 pending incrementations that have been queued when expanding EXP.
4624 Note that we cannot emit the whole queue blindly because this will
4625 effectively disable the POST_INC optimization later.
4627 If TEMP and TARGET compare equal according to rtx_equal_p, but
4628 one or both of them are volatile memory refs, we have to distinguish
4630 - expand_expr has used TARGET. In this case, we must not generate
4631 another copy. This can be detected by TARGET being equal according
4633 - expand_expr has not used TARGET - that means that the source just
4634 happens to have the same RTX form. Since temp will have been created
4635 by expand_expr, it will compare unequal according to == .
4636 We must generate a copy in this case, to reach the correct number
4637 of volatile memory references. */
4639 if ((! rtx_equal_p (temp, target)
4640 || (temp != target && (side_effects_p (temp)
4641 || side_effects_p (target))))
4642 && TREE_CODE (exp) != ERROR_MARK
4643 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4644 but TARGET is not valid memory reference, TEMP will differ
4645 from TARGET although it is really the same location. */
4646 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4647 /* If there's nothing to copy, don't bother. Don't call
4648 expr_size unless necessary, because some front-ends (C++)
4649 expr_size-hook must not be given objects that are not
4650 supposed to be bit-copied or bit-initialized. */
4651 && expr_size (exp) != const0_rtx)
4653 if (GET_MODE (temp) != GET_MODE (target)
4654 && GET_MODE (temp) != VOIDmode)
4656 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4657 if (dont_return_target)
4659 /* In this case, we will return TEMP,
4660 so make sure it has the proper mode.
4661 But don't forget to store the value into TARGET. */
4662 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4663 emit_move_insn (target, temp);
4665 else if (GET_MODE (target) == BLKmode
4666 || GET_MODE (temp) == BLKmode)
4667 emit_block_move (target, temp, expr_size (exp),
4669 ? BLOCK_OP_CALL_PARM
4670 : BLOCK_OP_NORMAL));
4672 convert_move (target, temp, unsignedp);
4675 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4677 /* Handle copying a string constant into an array. The string
4678 constant may be shorter than the array. So copy just the string's
4679 actual length, and clear the rest. First get the size of the data
4680 type of the string, which is actually the size of the target. */
4681 rtx size = expr_size (exp);
4683 if (GET_CODE (size) == CONST_INT
4684 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4685 emit_block_move (target, temp, size,
4687 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4690 /* Compute the size of the data to copy from the string. */
4692 = size_binop (MIN_EXPR,
4693 make_tree (sizetype, size),
4694 size_int (TREE_STRING_LENGTH (exp)));
4696 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4698 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4701 /* Copy that much. */
4702 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4703 TYPE_UNSIGNED (sizetype));
4704 emit_block_move (target, temp, copy_size_rtx,
4706 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4708 /* Figure out how much is left in TARGET that we have to clear.
4709 Do all calculations in ptr_mode. */
4710 if (GET_CODE (copy_size_rtx) == CONST_INT)
4712 size = plus_constant (size, -INTVAL (copy_size_rtx));
4713 target = adjust_address (target, BLKmode,
4714 INTVAL (copy_size_rtx));
4718 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4719 copy_size_rtx, NULL_RTX, 0,
4722 #ifdef POINTERS_EXTEND_UNSIGNED
4723 if (GET_MODE (copy_size_rtx) != Pmode)
4724 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4725 TYPE_UNSIGNED (sizetype));
4728 target = offset_address (target, copy_size_rtx,
4729 highest_pow2_factor (copy_size));
4730 label = gen_label_rtx ();
4731 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4732 GET_MODE (size), 0, label);
4735 if (size != const0_rtx)
4736 clear_storage (target, size, BLOCK_OP_NORMAL);
4742 /* Handle calls that return values in multiple non-contiguous locations.
4743 The Irix 6 ABI has examples of this. */
4744 else if (GET_CODE (target) == PARALLEL)
4745 emit_group_load (target, temp, TREE_TYPE (exp),
4746 int_size_in_bytes (TREE_TYPE (exp)));
4747 else if (GET_MODE (temp) == BLKmode)
4748 emit_block_move (target, temp, expr_size (exp),
4750 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4751 else if (nontemporal
4752 && emit_storent_insn (target, temp))
4753 /* If we managed to emit a nontemporal store, there is nothing else to
4758 temp = force_operand (temp, target);
4760 emit_move_insn (target, temp);
4767 /* Helper for categorize_ctor_elements. Identical interface. */
4770 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4771 HOST_WIDE_INT *p_elt_count,
4774 unsigned HOST_WIDE_INT idx;
4775 HOST_WIDE_INT nz_elts, elt_count;
4776 tree value, purpose;
4778 /* Whether CTOR is a valid constant initializer, in accordance with what
4779 initializer_constant_valid_p does. If inferred from the constructor
4780 elements, true until proven otherwise. */
4781 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4782 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4787 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4792 if (TREE_CODE (purpose) == RANGE_EXPR)
4794 tree lo_index = TREE_OPERAND (purpose, 0);
4795 tree hi_index = TREE_OPERAND (purpose, 1);
4797 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4798 mult = (tree_low_cst (hi_index, 1)
4799 - tree_low_cst (lo_index, 1) + 1);
4802 switch (TREE_CODE (value))
4806 HOST_WIDE_INT nz = 0, ic = 0;
4809 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4811 nz_elts += mult * nz;
4812 elt_count += mult * ic;
4814 if (const_from_elts_p && const_p)
4815 const_p = const_elt_p;
4822 if (!initializer_zerop (value))
4828 nz_elts += mult * TREE_STRING_LENGTH (value);
4829 elt_count += mult * TREE_STRING_LENGTH (value);
4833 if (!initializer_zerop (TREE_REALPART (value)))
4835 if (!initializer_zerop (TREE_IMAGPART (value)))
4843 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4845 if (!initializer_zerop (TREE_VALUE (v)))
4856 if (const_from_elts_p && const_p)
4857 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4864 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4865 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4868 bool clear_this = true;
4870 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4872 /* We don't expect more than one element of the union to be
4873 initialized. Not sure what we should do otherwise... */
4874 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4877 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4878 CONSTRUCTOR_ELTS (ctor),
4881 /* ??? We could look at each element of the union, and find the
4882 largest element. Which would avoid comparing the size of the
4883 initialized element against any tail padding in the union.
4884 Doesn't seem worth the effort... */
4885 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4886 TYPE_SIZE (init_sub_type)) == 1)
4888 /* And now we have to find out if the element itself is fully
4889 constructed. E.g. for union { struct { int a, b; } s; } u
4890 = { .s = { .a = 1 } }. */
4891 if (elt_count == count_type_elements (init_sub_type, false))
4896 *p_must_clear = clear_this;
4899 *p_nz_elts += nz_elts;
4900 *p_elt_count += elt_count;
4905 /* Examine CTOR to discover:
4906 * how many scalar fields are set to nonzero values,
4907 and place it in *P_NZ_ELTS;
4908 * how many scalar fields in total are in CTOR,
4909 and place it in *P_ELT_COUNT.
4910 * if a type is a union, and the initializer from the constructor
4911 is not the largest element in the union, then set *p_must_clear.
4913 Return whether or not CTOR is a valid static constant initializer, the same
4914 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4917 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4918 HOST_WIDE_INT *p_elt_count,
4923 *p_must_clear = false;
4926 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4929 /* Count the number of scalars in TYPE. Return -1 on overflow or
4930 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4931 array member at the end of the structure. */
4934 count_type_elements (const_tree type, bool allow_flexarr)
4936 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4937 switch (TREE_CODE (type))
4941 tree telts = array_type_nelts (type);
4942 if (telts && host_integerp (telts, 1))
4944 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4945 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4948 else if (max / n > m)
4956 HOST_WIDE_INT n = 0, t;
4959 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4960 if (TREE_CODE (f) == FIELD_DECL)
4962 t = count_type_elements (TREE_TYPE (f), false);
4965 /* Check for structures with flexible array member. */
4966 tree tf = TREE_TYPE (f);
4968 && TREE_CHAIN (f) == NULL
4969 && TREE_CODE (tf) == ARRAY_TYPE
4971 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4972 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4973 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4974 && int_size_in_bytes (type) >= 0)
4986 case QUAL_UNION_TYPE:
4993 return TYPE_VECTOR_SUBPARTS (type);
4997 case FIXED_POINT_TYPE:
5002 case REFERENCE_TYPE:
5017 /* Return 1 if EXP contains mostly (3/4) zeros. */
5020 mostly_zeros_p (const_tree exp)
5022 if (TREE_CODE (exp) == CONSTRUCTOR)
5025 HOST_WIDE_INT nz_elts, count, elts;
5028 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5032 elts = count_type_elements (TREE_TYPE (exp), false);
5034 return nz_elts < elts / 4;
5037 return initializer_zerop (exp);
5040 /* Return 1 if EXP contains all zeros. */
5043 all_zeros_p (const_tree exp)
5045 if (TREE_CODE (exp) == CONSTRUCTOR)
5048 HOST_WIDE_INT nz_elts, count;
5051 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5052 return nz_elts == 0;
5055 return initializer_zerop (exp);
5058 /* Helper function for store_constructor.
5059 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5060 TYPE is the type of the CONSTRUCTOR, not the element type.
5061 CLEARED is as for store_constructor.
5062 ALIAS_SET is the alias set to use for any stores.
5064 This provides a recursive shortcut back to store_constructor when it isn't
5065 necessary to go through store_field. This is so that we can pass through
5066 the cleared field to let store_constructor know that we may not have to
5067 clear a substructure if the outer structure has already been cleared. */
5070 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5071 HOST_WIDE_INT bitpos, enum machine_mode mode,
5072 tree exp, tree type, int cleared,
5073 alias_set_type alias_set)
5075 if (TREE_CODE (exp) == CONSTRUCTOR
5076 /* We can only call store_constructor recursively if the size and
5077 bit position are on a byte boundary. */
5078 && bitpos % BITS_PER_UNIT == 0
5079 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5080 /* If we have a nonzero bitpos for a register target, then we just
5081 let store_field do the bitfield handling. This is unlikely to
5082 generate unnecessary clear instructions anyways. */
5083 && (bitpos == 0 || MEM_P (target)))
5087 = adjust_address (target,
5088 GET_MODE (target) == BLKmode
5090 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5091 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5094 /* Update the alias set, if required. */
5095 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5096 && MEM_ALIAS_SET (target) != 0)
5098 target = copy_rtx (target);
5099 set_mem_alias_set (target, alias_set);
5102 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5105 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5108 /* Store the value of constructor EXP into the rtx TARGET.
5109 TARGET is either a REG or a MEM; we know it cannot conflict, since
5110 safe_from_p has been called.
5111 CLEARED is true if TARGET is known to have been zero'd.
5112 SIZE is the number of bytes of TARGET we are allowed to modify: this
5113 may not be the same as the size of EXP if we are assigning to a field
5114 which has been packed to exclude padding bits. */
5117 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5119 tree type = TREE_TYPE (exp);
5120 #ifdef WORD_REGISTER_OPERATIONS
5121 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5124 switch (TREE_CODE (type))
5128 case QUAL_UNION_TYPE:
5130 unsigned HOST_WIDE_INT idx;
5133 /* If size is zero or the target is already cleared, do nothing. */
5134 if (size == 0 || cleared)
5136 /* We either clear the aggregate or indicate the value is dead. */
5137 else if ((TREE_CODE (type) == UNION_TYPE
5138 || TREE_CODE (type) == QUAL_UNION_TYPE)
5139 && ! CONSTRUCTOR_ELTS (exp))
5140 /* If the constructor is empty, clear the union. */
5142 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5146 /* If we are building a static constructor into a register,
5147 set the initial value as zero so we can fold the value into
5148 a constant. But if more than one register is involved,
5149 this probably loses. */
5150 else if (REG_P (target) && TREE_STATIC (exp)
5151 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5153 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5157 /* If the constructor has fewer fields than the structure or
5158 if we are initializing the structure to mostly zeros, clear
5159 the whole structure first. Don't do this if TARGET is a
5160 register whose mode size isn't equal to SIZE since
5161 clear_storage can't handle this case. */
5163 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5164 != fields_length (type))
5165 || mostly_zeros_p (exp))
5167 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5170 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5174 if (REG_P (target) && !cleared)
5175 emit_clobber (target);
5177 /* Store each element of the constructor into the
5178 corresponding field of TARGET. */
5179 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5181 enum machine_mode mode;
5182 HOST_WIDE_INT bitsize;
5183 HOST_WIDE_INT bitpos = 0;
5185 rtx to_rtx = target;
5187 /* Just ignore missing fields. We cleared the whole
5188 structure, above, if any fields are missing. */
5192 if (cleared && initializer_zerop (value))
5195 if (host_integerp (DECL_SIZE (field), 1))
5196 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5200 mode = DECL_MODE (field);
5201 if (DECL_BIT_FIELD (field))
5204 offset = DECL_FIELD_OFFSET (field);
5205 if (host_integerp (offset, 0)
5206 && host_integerp (bit_position (field), 0))
5208 bitpos = int_bit_position (field);
5212 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5219 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5220 make_tree (TREE_TYPE (exp),
5223 offset_rtx = expand_normal (offset);
5224 gcc_assert (MEM_P (to_rtx));
5226 #ifdef POINTERS_EXTEND_UNSIGNED
5227 if (GET_MODE (offset_rtx) != Pmode)
5228 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5230 if (GET_MODE (offset_rtx) != ptr_mode)
5231 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5234 to_rtx = offset_address (to_rtx, offset_rtx,
5235 highest_pow2_factor (offset));
5238 #ifdef WORD_REGISTER_OPERATIONS
5239 /* If this initializes a field that is smaller than a
5240 word, at the start of a word, try to widen it to a full
5241 word. This special case allows us to output C++ member
5242 function initializations in a form that the optimizers
5245 && bitsize < BITS_PER_WORD
5246 && bitpos % BITS_PER_WORD == 0
5247 && GET_MODE_CLASS (mode) == MODE_INT
5248 && TREE_CODE (value) == INTEGER_CST
5250 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5252 tree type = TREE_TYPE (value);
5254 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5256 type = lang_hooks.types.type_for_size
5257 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5258 value = fold_convert (type, value);
5261 if (BYTES_BIG_ENDIAN)
5263 = fold_build2 (LSHIFT_EXPR, type, value,
5264 build_int_cst (type,
5265 BITS_PER_WORD - bitsize));
5266 bitsize = BITS_PER_WORD;
5271 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5272 && DECL_NONADDRESSABLE_P (field))
5274 to_rtx = copy_rtx (to_rtx);
5275 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5278 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5279 value, type, cleared,
5280 get_alias_set (TREE_TYPE (field)));
5287 unsigned HOST_WIDE_INT i;
5290 tree elttype = TREE_TYPE (type);
5292 HOST_WIDE_INT minelt = 0;
5293 HOST_WIDE_INT maxelt = 0;
5295 domain = TYPE_DOMAIN (type);
5296 const_bounds_p = (TYPE_MIN_VALUE (domain)
5297 && TYPE_MAX_VALUE (domain)
5298 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5299 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5301 /* If we have constant bounds for the range of the type, get them. */
5304 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5305 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5308 /* If the constructor has fewer elements than the array, clear
5309 the whole array first. Similarly if this is static
5310 constructor of a non-BLKmode object. */
5313 else if (REG_P (target) && TREE_STATIC (exp))
5317 unsigned HOST_WIDE_INT idx;
5319 HOST_WIDE_INT count = 0, zero_count = 0;
5320 need_to_clear = ! const_bounds_p;
5322 /* This loop is a more accurate version of the loop in
5323 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5324 is also needed to check for missing elements. */
5325 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5327 HOST_WIDE_INT this_node_count;
5332 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5334 tree lo_index = TREE_OPERAND (index, 0);
5335 tree hi_index = TREE_OPERAND (index, 1);
5337 if (! host_integerp (lo_index, 1)
5338 || ! host_integerp (hi_index, 1))
5344 this_node_count = (tree_low_cst (hi_index, 1)
5345 - tree_low_cst (lo_index, 1) + 1);
5348 this_node_count = 1;
5350 count += this_node_count;
5351 if (mostly_zeros_p (value))
5352 zero_count += this_node_count;
5355 /* Clear the entire array first if there are any missing
5356 elements, or if the incidence of zero elements is >=
5359 && (count < maxelt - minelt + 1
5360 || 4 * zero_count >= 3 * count))
5364 if (need_to_clear && size > 0)
5367 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5369 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5373 if (!cleared && REG_P (target))
5374 /* Inform later passes that the old value is dead. */
5375 emit_clobber (target);
5377 /* Store each element of the constructor into the
5378 corresponding element of TARGET, determined by counting the
5380 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5382 enum machine_mode mode;
5383 HOST_WIDE_INT bitsize;
5384 HOST_WIDE_INT bitpos;
5386 rtx xtarget = target;
5388 if (cleared && initializer_zerop (value))
5391 unsignedp = TYPE_UNSIGNED (elttype);
5392 mode = TYPE_MODE (elttype);
5393 if (mode == BLKmode)
5394 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5395 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5398 bitsize = GET_MODE_BITSIZE (mode);
5400 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5402 tree lo_index = TREE_OPERAND (index, 0);
5403 tree hi_index = TREE_OPERAND (index, 1);
5404 rtx index_r, pos_rtx;
5405 HOST_WIDE_INT lo, hi, count;
5408 /* If the range is constant and "small", unroll the loop. */
5410 && host_integerp (lo_index, 0)
5411 && host_integerp (hi_index, 0)
5412 && (lo = tree_low_cst (lo_index, 0),
5413 hi = tree_low_cst (hi_index, 0),
5414 count = hi - lo + 1,
5417 || (host_integerp (TYPE_SIZE (elttype), 1)
5418 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5421 lo -= minelt; hi -= minelt;
5422 for (; lo <= hi; lo++)
5424 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5427 && !MEM_KEEP_ALIAS_SET_P (target)
5428 && TREE_CODE (type) == ARRAY_TYPE
5429 && TYPE_NONALIASED_COMPONENT (type))
5431 target = copy_rtx (target);
5432 MEM_KEEP_ALIAS_SET_P (target) = 1;
5435 store_constructor_field
5436 (target, bitsize, bitpos, mode, value, type, cleared,
5437 get_alias_set (elttype));
5442 rtx loop_start = gen_label_rtx ();
5443 rtx loop_end = gen_label_rtx ();
5446 expand_normal (hi_index);
5447 unsignedp = TYPE_UNSIGNED (domain);
5449 index = build_decl (VAR_DECL, NULL_TREE, domain);
5452 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5454 SET_DECL_RTL (index, index_r);
5455 store_expr (lo_index, index_r, 0, false);
5457 /* Build the head of the loop. */
5458 do_pending_stack_adjust ();
5459 emit_label (loop_start);
5461 /* Assign value to element index. */
5463 fold_convert (ssizetype,
5464 fold_build2 (MINUS_EXPR,
5467 TYPE_MIN_VALUE (domain)));
5470 size_binop (MULT_EXPR, position,
5471 fold_convert (ssizetype,
5472 TYPE_SIZE_UNIT (elttype)));
5474 pos_rtx = expand_normal (position);
5475 xtarget = offset_address (target, pos_rtx,
5476 highest_pow2_factor (position));
5477 xtarget = adjust_address (xtarget, mode, 0);
5478 if (TREE_CODE (value) == CONSTRUCTOR)
5479 store_constructor (value, xtarget, cleared,
5480 bitsize / BITS_PER_UNIT);
5482 store_expr (value, xtarget, 0, false);
5484 /* Generate a conditional jump to exit the loop. */
5485 exit_cond = build2 (LT_EXPR, integer_type_node,
5487 jumpif (exit_cond, loop_end);
5489 /* Update the loop counter, and jump to the head of
5491 expand_assignment (index,
5492 build2 (PLUS_EXPR, TREE_TYPE (index),
5493 index, integer_one_node),
5496 emit_jump (loop_start);
5498 /* Build the end of the loop. */
5499 emit_label (loop_end);
5502 else if ((index != 0 && ! host_integerp (index, 0))
5503 || ! host_integerp (TYPE_SIZE (elttype), 1))
5508 index = ssize_int (1);
5511 index = fold_convert (ssizetype,
5512 fold_build2 (MINUS_EXPR,
5515 TYPE_MIN_VALUE (domain)));
5518 size_binop (MULT_EXPR, index,
5519 fold_convert (ssizetype,
5520 TYPE_SIZE_UNIT (elttype)));
5521 xtarget = offset_address (target,
5522 expand_normal (position),
5523 highest_pow2_factor (position));
5524 xtarget = adjust_address (xtarget, mode, 0);
5525 store_expr (value, xtarget, 0, false);
5530 bitpos = ((tree_low_cst (index, 0) - minelt)
5531 * tree_low_cst (TYPE_SIZE (elttype), 1));
5533 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5535 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5536 && TREE_CODE (type) == ARRAY_TYPE
5537 && TYPE_NONALIASED_COMPONENT (type))
5539 target = copy_rtx (target);
5540 MEM_KEEP_ALIAS_SET_P (target) = 1;
5542 store_constructor_field (target, bitsize, bitpos, mode, value,
5543 type, cleared, get_alias_set (elttype));
5551 unsigned HOST_WIDE_INT idx;
5552 constructor_elt *ce;
5556 tree elttype = TREE_TYPE (type);
5557 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5558 enum machine_mode eltmode = TYPE_MODE (elttype);
5559 HOST_WIDE_INT bitsize;
5560 HOST_WIDE_INT bitpos;
5561 rtvec vector = NULL;
5563 alias_set_type alias;
5565 gcc_assert (eltmode != BLKmode);
5567 n_elts = TYPE_VECTOR_SUBPARTS (type);
5568 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5570 enum machine_mode mode = GET_MODE (target);
5572 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5573 if (icode != CODE_FOR_nothing)
5577 vector = rtvec_alloc (n_elts);
5578 for (i = 0; i < n_elts; i++)
5579 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5583 /* If the constructor has fewer elements than the vector,
5584 clear the whole array first. Similarly if this is static
5585 constructor of a non-BLKmode object. */
5588 else if (REG_P (target) && TREE_STATIC (exp))
5592 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5595 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5597 int n_elts_here = tree_low_cst
5598 (int_const_binop (TRUNC_DIV_EXPR,
5599 TYPE_SIZE (TREE_TYPE (value)),
5600 TYPE_SIZE (elttype), 0), 1);
5602 count += n_elts_here;
5603 if (mostly_zeros_p (value))
5604 zero_count += n_elts_here;
5607 /* Clear the entire vector first if there are any missing elements,
5608 or if the incidence of zero elements is >= 75%. */
5609 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5612 if (need_to_clear && size > 0 && !vector)
5615 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5617 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5621 /* Inform later passes that the old value is dead. */
5622 if (!cleared && !vector && REG_P (target))
5623 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5626 alias = MEM_ALIAS_SET (target);
5628 alias = get_alias_set (elttype);
5630 /* Store each element of the constructor into the corresponding
5631 element of TARGET, determined by counting the elements. */
5632 for (idx = 0, i = 0;
5633 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5634 idx++, i += bitsize / elt_size)
5636 HOST_WIDE_INT eltpos;
5637 tree value = ce->value;
5639 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5640 if (cleared && initializer_zerop (value))
5644 eltpos = tree_low_cst (ce->index, 1);
5650 /* Vector CONSTRUCTORs should only be built from smaller
5651 vectors in the case of BLKmode vectors. */
5652 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5653 RTVEC_ELT (vector, eltpos)
5654 = expand_normal (value);
5658 enum machine_mode value_mode =
5659 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5660 ? TYPE_MODE (TREE_TYPE (value))
5662 bitpos = eltpos * elt_size;
5663 store_constructor_field (target, bitsize, bitpos,
5664 value_mode, value, type,
5670 emit_insn (GEN_FCN (icode)
5672 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5681 /* Store the value of EXP (an expression tree)
5682 into a subfield of TARGET which has mode MODE and occupies
5683 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5684 If MODE is VOIDmode, it means that we are storing into a bit-field.
5686 Always return const0_rtx unless we have something particular to
5689 TYPE is the type of the underlying object,
5691 ALIAS_SET is the alias set for the destination. This value will
5692 (in general) be different from that for TARGET, since TARGET is a
5693 reference to the containing structure.
5695 If NONTEMPORAL is true, try generating a nontemporal store. */
5698 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5699 enum machine_mode mode, tree exp, tree type,
5700 alias_set_type alias_set, bool nontemporal)
5702 HOST_WIDE_INT width_mask = 0;
5704 if (TREE_CODE (exp) == ERROR_MARK)
5707 /* If we have nothing to store, do nothing unless the expression has
5710 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5711 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5712 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5714 /* If we are storing into an unaligned field of an aligned union that is
5715 in a register, we may have the mode of TARGET being an integer mode but
5716 MODE == BLKmode. In that case, get an aligned object whose size and
5717 alignment are the same as TARGET and store TARGET into it (we can avoid
5718 the store if the field being stored is the entire width of TARGET). Then
5719 call ourselves recursively to store the field into a BLKmode version of
5720 that object. Finally, load from the object into TARGET. This is not
5721 very efficient in general, but should only be slightly more expensive
5722 than the otherwise-required unaligned accesses. Perhaps this can be
5723 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5724 twice, once with emit_move_insn and once via store_field. */
5727 && (REG_P (target) || GET_CODE (target) == SUBREG))
5729 rtx object = assign_temp (type, 0, 1, 1);
5730 rtx blk_object = adjust_address (object, BLKmode, 0);
5732 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5733 emit_move_insn (object, target);
5735 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5738 emit_move_insn (target, object);
5740 /* We want to return the BLKmode version of the data. */
5744 if (GET_CODE (target) == CONCAT)
5746 /* We're storing into a struct containing a single __complex. */
5748 gcc_assert (!bitpos);
5749 return store_expr (exp, target, 0, nontemporal);
5752 /* If the structure is in a register or if the component
5753 is a bit field, we cannot use addressing to access it.
5754 Use bit-field techniques or SUBREG to store in it. */
5756 if (mode == VOIDmode
5757 || (mode != BLKmode && ! direct_store[(int) mode]
5758 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5759 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5761 || GET_CODE (target) == SUBREG
5762 /* If the field isn't aligned enough to store as an ordinary memref,
5763 store it as a bit field. */
5765 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5766 || bitpos % GET_MODE_ALIGNMENT (mode))
5767 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5768 || (bitpos % BITS_PER_UNIT != 0)))
5769 /* If the RHS and field are a constant size and the size of the
5770 RHS isn't the same size as the bitfield, we must use bitfield
5773 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5774 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5778 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5779 implies a mask operation. If the precision is the same size as
5780 the field we're storing into, that mask is redundant. This is
5781 particularly common with bit field assignments generated by the
5783 if (TREE_CODE (exp) == NOP_EXPR)
5785 tree type = TREE_TYPE (exp);
5786 if (INTEGRAL_TYPE_P (type)
5787 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5788 && bitsize == TYPE_PRECISION (type))
5790 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5791 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5792 exp = TREE_OPERAND (exp, 0);
5796 temp = expand_normal (exp);
5798 /* If BITSIZE is narrower than the size of the type of EXP
5799 we will be narrowing TEMP. Normally, what's wanted are the
5800 low-order bits. However, if EXP's type is a record and this is
5801 big-endian machine, we want the upper BITSIZE bits. */
5802 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5803 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5804 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5805 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5806 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5810 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5812 if (mode != VOIDmode && mode != BLKmode
5813 && mode != TYPE_MODE (TREE_TYPE (exp)))
5814 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5816 /* If the modes of TEMP and TARGET are both BLKmode, both
5817 must be in memory and BITPOS must be aligned on a byte
5818 boundary. If so, we simply do a block copy. Likewise
5819 for a BLKmode-like TARGET. */
5820 if (GET_MODE (temp) == BLKmode
5821 && (GET_MODE (target) == BLKmode
5823 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5824 && (bitpos % BITS_PER_UNIT) == 0
5825 && (bitsize % BITS_PER_UNIT) == 0)))
5827 gcc_assert (MEM_P (target) && MEM_P (temp)
5828 && (bitpos % BITS_PER_UNIT) == 0);
5830 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5831 emit_block_move (target, temp,
5832 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5839 /* Store the value in the bitfield. */
5840 store_bit_field (target, bitsize, bitpos, mode, temp);
5846 /* Now build a reference to just the desired component. */
5847 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5849 if (to_rtx == target)
5850 to_rtx = copy_rtx (to_rtx);
5852 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5853 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5854 set_mem_alias_set (to_rtx, alias_set);
5856 return store_expr (exp, to_rtx, 0, nontemporal);
5860 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5861 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5862 codes and find the ultimate containing object, which we return.
5864 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5865 bit position, and *PUNSIGNEDP to the signedness of the field.
5866 If the position of the field is variable, we store a tree
5867 giving the variable offset (in units) in *POFFSET.
5868 This offset is in addition to the bit position.
5869 If the position is not variable, we store 0 in *POFFSET.
5871 If any of the extraction expressions is volatile,
5872 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5874 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5875 Otherwise, it is a mode that can be used to access the field.
5877 If the field describes a variable-sized object, *PMODE is set to
5878 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5879 this case, but the address of the object can be found.
5881 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5882 look through nodes that serve as markers of a greater alignment than
5883 the one that can be deduced from the expression. These nodes make it
5884 possible for front-ends to prevent temporaries from being created by
5885 the middle-end on alignment considerations. For that purpose, the
5886 normal operating mode at high-level is to always pass FALSE so that
5887 the ultimate containing object is really returned; moreover, the
5888 associated predicate handled_component_p will always return TRUE
5889 on these nodes, thus indicating that they are essentially handled
5890 by get_inner_reference. TRUE should only be passed when the caller
5891 is scanning the expression in order to build another representation
5892 and specifically knows how to handle these nodes; as such, this is
5893 the normal operating mode in the RTL expanders. */
5896 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5897 HOST_WIDE_INT *pbitpos, tree *poffset,
5898 enum machine_mode *pmode, int *punsignedp,
5899 int *pvolatilep, bool keep_aligning)
5902 enum machine_mode mode = VOIDmode;
5903 bool blkmode_bitfield = false;
5904 tree offset = size_zero_node;
5905 tree bit_offset = bitsize_zero_node;
5907 /* First get the mode, signedness, and size. We do this from just the
5908 outermost expression. */
5909 if (TREE_CODE (exp) == COMPONENT_REF)
5911 tree field = TREE_OPERAND (exp, 1);
5912 size_tree = DECL_SIZE (field);
5913 if (!DECL_BIT_FIELD (field))
5914 mode = DECL_MODE (field);
5915 else if (DECL_MODE (field) == BLKmode)
5916 blkmode_bitfield = true;
5918 *punsignedp = DECL_UNSIGNED (field);
5920 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5922 size_tree = TREE_OPERAND (exp, 1);
5923 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5924 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5926 /* For vector types, with the correct size of access, use the mode of
5928 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5929 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5930 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5931 mode = TYPE_MODE (TREE_TYPE (exp));
5935 mode = TYPE_MODE (TREE_TYPE (exp));
5936 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5938 if (mode == BLKmode)
5939 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5941 *pbitsize = GET_MODE_BITSIZE (mode);
5946 if (! host_integerp (size_tree, 1))
5947 mode = BLKmode, *pbitsize = -1;
5949 *pbitsize = tree_low_cst (size_tree, 1);
5952 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5953 and find the ultimate containing object. */
5956 switch (TREE_CODE (exp))
5959 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5960 TREE_OPERAND (exp, 2));
5965 tree field = TREE_OPERAND (exp, 1);
5966 tree this_offset = component_ref_field_offset (exp);
5968 /* If this field hasn't been filled in yet, don't go past it.
5969 This should only happen when folding expressions made during
5970 type construction. */
5971 if (this_offset == 0)
5974 offset = size_binop (PLUS_EXPR, offset, this_offset);
5975 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5976 DECL_FIELD_BIT_OFFSET (field));
5978 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5983 case ARRAY_RANGE_REF:
5985 tree index = TREE_OPERAND (exp, 1);
5986 tree low_bound = array_ref_low_bound (exp);
5987 tree unit_size = array_ref_element_size (exp);
5989 /* We assume all arrays have sizes that are a multiple of a byte.
5990 First subtract the lower bound, if any, in the type of the
5991 index, then convert to sizetype and multiply by the size of
5992 the array element. */
5993 if (! integer_zerop (low_bound))
5994 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5997 offset = size_binop (PLUS_EXPR, offset,
5998 size_binop (MULT_EXPR,
5999 fold_convert (sizetype, index),
6008 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6009 bitsize_int (*pbitsize));
6012 case VIEW_CONVERT_EXPR:
6013 if (keep_aligning && STRICT_ALIGNMENT
6014 && (TYPE_ALIGN (TREE_TYPE (exp))
6015 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6016 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6017 < BIGGEST_ALIGNMENT)
6018 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6019 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6027 /* If any reference in the chain is volatile, the effect is volatile. */
6028 if (TREE_THIS_VOLATILE (exp))
6031 exp = TREE_OPERAND (exp, 0);
6035 /* If OFFSET is constant, see if we can return the whole thing as a
6036 constant bit position. Make sure to handle overflow during
6038 if (host_integerp (offset, 0))
6040 double_int tem = double_int_mul (tree_to_double_int (offset),
6041 uhwi_to_double_int (BITS_PER_UNIT));
6042 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6043 if (double_int_fits_in_shwi_p (tem))
6045 *pbitpos = double_int_to_shwi (tem);
6046 *poffset = offset = NULL_TREE;
6050 /* Otherwise, split it up. */
6053 *pbitpos = tree_low_cst (bit_offset, 0);
6057 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6058 if (mode == VOIDmode
6060 && (*pbitpos % BITS_PER_UNIT) == 0
6061 && (*pbitsize % BITS_PER_UNIT) == 0)
6069 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6070 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6071 EXP is marked as PACKED. */
6074 contains_packed_reference (const_tree exp)
6076 bool packed_p = false;
6080 switch (TREE_CODE (exp))
6084 tree field = TREE_OPERAND (exp, 1);
6085 packed_p = DECL_PACKED (field)
6086 || TYPE_PACKED (TREE_TYPE (field))
6087 || TYPE_PACKED (TREE_TYPE (exp));
6095 case ARRAY_RANGE_REF:
6098 case VIEW_CONVERT_EXPR:
6104 exp = TREE_OPERAND (exp, 0);
6110 /* Return a tree of sizetype representing the size, in bytes, of the element
6111 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6114 array_ref_element_size (tree exp)
6116 tree aligned_size = TREE_OPERAND (exp, 3);
6117 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6119 /* If a size was specified in the ARRAY_REF, it's the size measured
6120 in alignment units of the element type. So multiply by that value. */
6123 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6124 sizetype from another type of the same width and signedness. */
6125 if (TREE_TYPE (aligned_size) != sizetype)
6126 aligned_size = fold_convert (sizetype, aligned_size);
6127 return size_binop (MULT_EXPR, aligned_size,
6128 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6131 /* Otherwise, take the size from that of the element type. Substitute
6132 any PLACEHOLDER_EXPR that we have. */
6134 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6137 /* Return a tree representing the lower bound of the array mentioned in
6138 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6141 array_ref_low_bound (tree exp)
6143 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6145 /* If a lower bound is specified in EXP, use it. */
6146 if (TREE_OPERAND (exp, 2))
6147 return TREE_OPERAND (exp, 2);
6149 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6150 substituting for a PLACEHOLDER_EXPR as needed. */
6151 if (domain_type && TYPE_MIN_VALUE (domain_type))
6152 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6154 /* Otherwise, return a zero of the appropriate type. */
6155 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6158 /* Return a tree representing the upper bound of the array mentioned in
6159 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6162 array_ref_up_bound (tree exp)
6164 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6166 /* If there is a domain type and it has an upper bound, use it, substituting
6167 for a PLACEHOLDER_EXPR as needed. */
6168 if (domain_type && TYPE_MAX_VALUE (domain_type))
6169 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6171 /* Otherwise fail. */
6175 /* Return a tree representing the offset, in bytes, of the field referenced
6176 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6179 component_ref_field_offset (tree exp)
6181 tree aligned_offset = TREE_OPERAND (exp, 2);
6182 tree field = TREE_OPERAND (exp, 1);
6184 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6185 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6189 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6190 sizetype from another type of the same width and signedness. */
6191 if (TREE_TYPE (aligned_offset) != sizetype)
6192 aligned_offset = fold_convert (sizetype, aligned_offset);
6193 return size_binop (MULT_EXPR, aligned_offset,
6194 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6197 /* Otherwise, take the offset from that of the field. Substitute
6198 any PLACEHOLDER_EXPR that we have. */
6200 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6203 /* Return 1 if T is an expression that get_inner_reference handles. */
6206 handled_component_p (const_tree t)
6208 switch (TREE_CODE (t))
6213 case ARRAY_RANGE_REF:
6214 case VIEW_CONVERT_EXPR:
6224 /* Given an rtx VALUE that may contain additions and multiplications, return
6225 an equivalent value that just refers to a register, memory, or constant.
6226 This is done by generating instructions to perform the arithmetic and
6227 returning a pseudo-register containing the value.
6229 The returned value may be a REG, SUBREG, MEM or constant. */
6232 force_operand (rtx value, rtx target)
6235 /* Use subtarget as the target for operand 0 of a binary operation. */
6236 rtx subtarget = get_subtarget (target);
6237 enum rtx_code code = GET_CODE (value);
6239 /* Check for subreg applied to an expression produced by loop optimizer. */
6241 && !REG_P (SUBREG_REG (value))
6242 && !MEM_P (SUBREG_REG (value)))
6245 = simplify_gen_subreg (GET_MODE (value),
6246 force_reg (GET_MODE (SUBREG_REG (value)),
6247 force_operand (SUBREG_REG (value),
6249 GET_MODE (SUBREG_REG (value)),
6250 SUBREG_BYTE (value));
6251 code = GET_CODE (value);
6254 /* Check for a PIC address load. */
6255 if ((code == PLUS || code == MINUS)
6256 && XEXP (value, 0) == pic_offset_table_rtx
6257 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6258 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6259 || GET_CODE (XEXP (value, 1)) == CONST))
6262 subtarget = gen_reg_rtx (GET_MODE (value));
6263 emit_move_insn (subtarget, value);
6267 if (ARITHMETIC_P (value))
6269 op2 = XEXP (value, 1);
6270 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6272 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6275 op2 = negate_rtx (GET_MODE (value), op2);
6278 /* Check for an addition with OP2 a constant integer and our first
6279 operand a PLUS of a virtual register and something else. In that
6280 case, we want to emit the sum of the virtual register and the
6281 constant first and then add the other value. This allows virtual
6282 register instantiation to simply modify the constant rather than
6283 creating another one around this addition. */
6284 if (code == PLUS && GET_CODE (op2) == CONST_INT
6285 && GET_CODE (XEXP (value, 0)) == PLUS
6286 && REG_P (XEXP (XEXP (value, 0), 0))
6287 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6288 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6290 rtx temp = expand_simple_binop (GET_MODE (value), code,
6291 XEXP (XEXP (value, 0), 0), op2,
6292 subtarget, 0, OPTAB_LIB_WIDEN);
6293 return expand_simple_binop (GET_MODE (value), code, temp,
6294 force_operand (XEXP (XEXP (value,
6296 target, 0, OPTAB_LIB_WIDEN);
6299 op1 = force_operand (XEXP (value, 0), subtarget);
6300 op2 = force_operand (op2, NULL_RTX);
6304 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6306 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6307 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6308 target, 1, OPTAB_LIB_WIDEN);
6310 return expand_divmod (0,
6311 FLOAT_MODE_P (GET_MODE (value))
6312 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6313 GET_MODE (value), op1, op2, target, 0);
6315 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6318 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6321 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6324 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6325 target, 0, OPTAB_LIB_WIDEN);
6327 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6328 target, 1, OPTAB_LIB_WIDEN);
6331 if (UNARY_P (value))
6334 target = gen_reg_rtx (GET_MODE (value));
6335 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6342 case FLOAT_TRUNCATE:
6343 convert_move (target, op1, code == ZERO_EXTEND);
6348 expand_fix (target, op1, code == UNSIGNED_FIX);
6352 case UNSIGNED_FLOAT:
6353 expand_float (target, op1, code == UNSIGNED_FLOAT);
6357 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6361 #ifdef INSN_SCHEDULING
6362 /* On machines that have insn scheduling, we want all memory reference to be
6363 explicit, so we need to deal with such paradoxical SUBREGs. */
6364 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6365 && (GET_MODE_SIZE (GET_MODE (value))
6366 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6368 = simplify_gen_subreg (GET_MODE (value),
6369 force_reg (GET_MODE (SUBREG_REG (value)),
6370 force_operand (SUBREG_REG (value),
6372 GET_MODE (SUBREG_REG (value)),
6373 SUBREG_BYTE (value));
6379 /* Subroutine of expand_expr: return nonzero iff there is no way that
6380 EXP can reference X, which is being modified. TOP_P is nonzero if this
6381 call is going to be used to determine whether we need a temporary
6382 for EXP, as opposed to a recursive call to this function.
6384 It is always safe for this routine to return zero since it merely
6385 searches for optimization opportunities. */
6388 safe_from_p (const_rtx x, tree exp, int top_p)
6394 /* If EXP has varying size, we MUST use a target since we currently
6395 have no way of allocating temporaries of variable size
6396 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6397 So we assume here that something at a higher level has prevented a
6398 clash. This is somewhat bogus, but the best we can do. Only
6399 do this when X is BLKmode and when we are at the top level. */
6400 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6401 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6402 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6403 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6404 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6406 && GET_MODE (x) == BLKmode)
6407 /* If X is in the outgoing argument area, it is always safe. */
6409 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6410 || (GET_CODE (XEXP (x, 0)) == PLUS
6411 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6414 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6415 find the underlying pseudo. */
6416 if (GET_CODE (x) == SUBREG)
6419 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6423 /* Now look at our tree code and possibly recurse. */
6424 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6426 case tcc_declaration:
6427 exp_rtl = DECL_RTL_IF_SET (exp);
6433 case tcc_exceptional:
6434 if (TREE_CODE (exp) == TREE_LIST)
6438 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6440 exp = TREE_CHAIN (exp);
6443 if (TREE_CODE (exp) != TREE_LIST)
6444 return safe_from_p (x, exp, 0);
6447 else if (TREE_CODE (exp) == CONSTRUCTOR)
6449 constructor_elt *ce;
6450 unsigned HOST_WIDE_INT idx;
6453 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6455 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6456 || !safe_from_p (x, ce->value, 0))
6460 else if (TREE_CODE (exp) == ERROR_MARK)
6461 return 1; /* An already-visited SAVE_EXPR? */
6466 /* The only case we look at here is the DECL_INITIAL inside a
6468 return (TREE_CODE (exp) != DECL_EXPR
6469 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6470 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6471 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6474 case tcc_comparison:
6475 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6480 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6482 case tcc_expression:
6485 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6486 the expression. If it is set, we conflict iff we are that rtx or
6487 both are in memory. Otherwise, we check all operands of the
6488 expression recursively. */
6490 switch (TREE_CODE (exp))
6493 /* If the operand is static or we are static, we can't conflict.
6494 Likewise if we don't conflict with the operand at all. */
6495 if (staticp (TREE_OPERAND (exp, 0))
6496 || TREE_STATIC (exp)
6497 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6500 /* Otherwise, the only way this can conflict is if we are taking
6501 the address of a DECL a that address if part of X, which is
6503 exp = TREE_OPERAND (exp, 0);
6506 if (!DECL_RTL_SET_P (exp)
6507 || !MEM_P (DECL_RTL (exp)))
6510 exp_rtl = XEXP (DECL_RTL (exp), 0);
6514 case MISALIGNED_INDIRECT_REF:
6515 case ALIGN_INDIRECT_REF:
6518 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6519 get_alias_set (exp)))
6524 /* Assume that the call will clobber all hard registers and
6526 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6531 case WITH_CLEANUP_EXPR:
6532 case CLEANUP_POINT_EXPR:
6533 /* Lowered by gimplify.c. */
6537 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6543 /* If we have an rtx, we do not need to scan our operands. */
6547 nops = TREE_OPERAND_LENGTH (exp);
6548 for (i = 0; i < nops; i++)
6549 if (TREE_OPERAND (exp, i) != 0
6550 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6556 /* Should never get a type here. */
6560 /* If we have an rtl, find any enclosed object. Then see if we conflict
6564 if (GET_CODE (exp_rtl) == SUBREG)
6566 exp_rtl = SUBREG_REG (exp_rtl);
6568 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6572 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6573 are memory and they conflict. */
6574 return ! (rtx_equal_p (x, exp_rtl)
6575 || (MEM_P (x) && MEM_P (exp_rtl)
6576 && true_dependence (exp_rtl, VOIDmode, x,
6577 rtx_addr_varies_p)));
6580 /* If we reach here, it is safe. */
6585 /* Return the highest power of two that EXP is known to be a multiple of.
6586 This is used in updating alignment of MEMs in array references. */
6588 unsigned HOST_WIDE_INT
6589 highest_pow2_factor (const_tree exp)
6591 unsigned HOST_WIDE_INT c0, c1;
6593 switch (TREE_CODE (exp))
6596 /* We can find the lowest bit that's a one. If the low
6597 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6598 We need to handle this case since we can find it in a COND_EXPR,
6599 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6600 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6602 if (TREE_OVERFLOW (exp))
6603 return BIGGEST_ALIGNMENT;
6606 /* Note: tree_low_cst is intentionally not used here,
6607 we don't care about the upper bits. */
6608 c0 = TREE_INT_CST_LOW (exp);
6610 return c0 ? c0 : BIGGEST_ALIGNMENT;
6614 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6615 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6616 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6617 return MIN (c0, c1);
6620 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6621 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6624 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6626 if (integer_pow2p (TREE_OPERAND (exp, 1))
6627 && host_integerp (TREE_OPERAND (exp, 1), 1))
6629 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6630 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6631 return MAX (1, c0 / c1);
6636 /* The highest power of two of a bit-and expression is the maximum of
6637 that of its operands. We typically get here for a complex LHS and
6638 a constant negative power of two on the RHS to force an explicit
6639 alignment, so don't bother looking at the LHS. */
6640 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6644 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6647 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6650 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6651 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6652 return MIN (c0, c1);
6661 /* Similar, except that the alignment requirements of TARGET are
6662 taken into account. Assume it is at least as aligned as its
6663 type, unless it is a COMPONENT_REF in which case the layout of
6664 the structure gives the alignment. */
6666 static unsigned HOST_WIDE_INT
6667 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6669 unsigned HOST_WIDE_INT target_align, factor;
6671 factor = highest_pow2_factor (exp);
6672 if (TREE_CODE (target) == COMPONENT_REF)
6673 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6675 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6676 return MAX (factor, target_align);
6679 /* Return &VAR expression for emulated thread local VAR. */
6682 emutls_var_address (tree var)
6684 tree emuvar = emutls_decl (var);
6685 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6686 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6687 tree arglist = build_tree_list (NULL_TREE, arg);
6688 tree call = build_function_call_expr (fn, arglist);
6689 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6693 /* Subroutine of expand_expr. Expand the two operands of a binary
6694 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6695 The value may be stored in TARGET if TARGET is nonzero. The
6696 MODIFIER argument is as documented by expand_expr. */
6699 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6700 enum expand_modifier modifier)
6702 if (! safe_from_p (target, exp1, 1))
6704 if (operand_equal_p (exp0, exp1, 0))
6706 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6707 *op1 = copy_rtx (*op0);
6711 /* If we need to preserve evaluation order, copy exp0 into its own
6712 temporary variable so that it can't be clobbered by exp1. */
6713 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6714 exp0 = save_expr (exp0);
6715 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6716 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6721 /* Return a MEM that contains constant EXP. DEFER is as for
6722 output_constant_def and MODIFIER is as for expand_expr. */
6725 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6729 mem = output_constant_def (exp, defer);
6730 if (modifier != EXPAND_INITIALIZER)
6731 mem = use_anchored_address (mem);
6735 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6736 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6739 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6740 enum expand_modifier modifier)
6742 rtx result, subtarget;
6744 HOST_WIDE_INT bitsize, bitpos;
6745 int volatilep, unsignedp;
6746 enum machine_mode mode1;
6748 /* If we are taking the address of a constant and are at the top level,
6749 we have to use output_constant_def since we can't call force_const_mem
6751 /* ??? This should be considered a front-end bug. We should not be
6752 generating ADDR_EXPR of something that isn't an LVALUE. The only
6753 exception here is STRING_CST. */
6754 if (CONSTANT_CLASS_P (exp))
6755 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6757 /* Everything must be something allowed by is_gimple_addressable. */
6758 switch (TREE_CODE (exp))
6761 /* This case will happen via recursion for &a->b. */
6762 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6765 /* Recurse and make the output_constant_def clause above handle this. */
6766 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6770 /* The real part of the complex number is always first, therefore
6771 the address is the same as the address of the parent object. */
6774 inner = TREE_OPERAND (exp, 0);
6778 /* The imaginary part of the complex number is always second.
6779 The expression is therefore always offset by the size of the
6782 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6783 inner = TREE_OPERAND (exp, 0);
6787 /* TLS emulation hook - replace __thread VAR's &VAR with
6788 __emutls_get_address (&_emutls.VAR). */
6789 if (! targetm.have_tls
6790 && TREE_CODE (exp) == VAR_DECL
6791 && DECL_THREAD_LOCAL_P (exp))
6793 exp = emutls_var_address (exp);
6794 return expand_expr (exp, target, tmode, modifier);
6799 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6800 expand_expr, as that can have various side effects; LABEL_DECLs for
6801 example, may not have their DECL_RTL set yet. Expand the rtl of
6802 CONSTRUCTORs too, which should yield a memory reference for the
6803 constructor's contents. Assume language specific tree nodes can
6804 be expanded in some interesting way. */
6805 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6807 || TREE_CODE (exp) == CONSTRUCTOR
6808 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6810 result = expand_expr (exp, target, tmode,
6811 modifier == EXPAND_INITIALIZER
6812 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6814 /* If the DECL isn't in memory, then the DECL wasn't properly
6815 marked TREE_ADDRESSABLE, which will be either a front-end
6816 or a tree optimizer bug. */
6817 gcc_assert (MEM_P (result));
6818 result = XEXP (result, 0);
6820 /* ??? Is this needed anymore? */
6821 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6823 assemble_external (exp);
6824 TREE_USED (exp) = 1;
6827 if (modifier != EXPAND_INITIALIZER
6828 && modifier != EXPAND_CONST_ADDRESS)
6829 result = force_operand (result, target);
6833 /* Pass FALSE as the last argument to get_inner_reference although
6834 we are expanding to RTL. The rationale is that we know how to
6835 handle "aligning nodes" here: we can just bypass them because
6836 they won't change the final object whose address will be returned
6837 (they actually exist only for that purpose). */
6838 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6839 &mode1, &unsignedp, &volatilep, false);
6843 /* We must have made progress. */
6844 gcc_assert (inner != exp);
6846 subtarget = offset || bitpos ? NULL_RTX : target;
6847 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6848 inner alignment, force the inner to be sufficiently aligned. */
6849 if (CONSTANT_CLASS_P (inner)
6850 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6852 inner = copy_node (inner);
6853 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6854 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6855 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6857 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6863 if (modifier != EXPAND_NORMAL)
6864 result = force_operand (result, NULL);
6865 tmp = expand_expr (offset, NULL_RTX, tmode,
6866 modifier == EXPAND_INITIALIZER
6867 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6869 result = convert_memory_address (tmode, result);
6870 tmp = convert_memory_address (tmode, tmp);
6872 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6873 result = gen_rtx_PLUS (tmode, result, tmp);
6876 subtarget = bitpos ? NULL_RTX : target;
6877 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6878 1, OPTAB_LIB_WIDEN);
6884 /* Someone beforehand should have rejected taking the address
6885 of such an object. */
6886 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6888 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6889 if (modifier < EXPAND_SUM)
6890 result = force_operand (result, target);
6896 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6897 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6900 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6901 enum expand_modifier modifier)
6903 enum machine_mode rmode;
6906 /* Target mode of VOIDmode says "whatever's natural". */
6907 if (tmode == VOIDmode)
6908 tmode = TYPE_MODE (TREE_TYPE (exp));
6910 /* We can get called with some Weird Things if the user does silliness
6911 like "(short) &a". In that case, convert_memory_address won't do
6912 the right thing, so ignore the given target mode. */
6913 if (tmode != Pmode && tmode != ptr_mode)
6916 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6919 /* Despite expand_expr claims concerning ignoring TMODE when not
6920 strictly convenient, stuff breaks if we don't honor it. Note
6921 that combined with the above, we only do this for pointer modes. */
6922 rmode = GET_MODE (result);
6923 if (rmode == VOIDmode)
6926 result = convert_memory_address (tmode, result);
6931 /* Generate code for computing CONSTRUCTOR EXP.
6932 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6933 is TRUE, instead of creating a temporary variable in memory
6934 NULL is returned and the caller needs to handle it differently. */
6937 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6938 bool avoid_temp_mem)
6940 tree type = TREE_TYPE (exp);
6941 enum machine_mode mode = TYPE_MODE (type);
6943 /* Try to avoid creating a temporary at all. This is possible
6944 if all of the initializer is zero.
6945 FIXME: try to handle all [0..255] initializers we can handle
6947 if (TREE_STATIC (exp)
6948 && !TREE_ADDRESSABLE (exp)
6949 && target != 0 && mode == BLKmode
6950 && all_zeros_p (exp))
6952 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6956 /* All elts simple constants => refer to a constant in memory. But
6957 if this is a non-BLKmode mode, let it store a field at a time
6958 since that should make a CONST_INT or CONST_DOUBLE when we
6959 fold. Likewise, if we have a target we can use, it is best to
6960 store directly into the target unless the type is large enough
6961 that memcpy will be used. If we are making an initializer and
6962 all operands are constant, put it in memory as well.
6964 FIXME: Avoid trying to fill vector constructors piece-meal.
6965 Output them with output_constant_def below unless we're sure
6966 they're zeros. This should go away when vector initializers
6967 are treated like VECTOR_CST instead of arrays. */
6968 if ((TREE_STATIC (exp)
6969 && ((mode == BLKmode
6970 && ! (target != 0 && safe_from_p (target, exp, 1)))
6971 || TREE_ADDRESSABLE (exp)
6972 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6973 && (! MOVE_BY_PIECES_P
6974 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6976 && ! mostly_zeros_p (exp))))
6977 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6978 && TREE_CONSTANT (exp)))
6985 constructor = expand_expr_constant (exp, 1, modifier);
6987 if (modifier != EXPAND_CONST_ADDRESS
6988 && modifier != EXPAND_INITIALIZER
6989 && modifier != EXPAND_SUM)
6990 constructor = validize_mem (constructor);
6995 /* Handle calls that pass values in multiple non-contiguous
6996 locations. The Irix 6 ABI has examples of this. */
6997 if (target == 0 || ! safe_from_p (target, exp, 1)
6998 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7004 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7005 | (TREE_READONLY (exp)
7006 * TYPE_QUAL_CONST))),
7007 0, TREE_ADDRESSABLE (exp), 1);
7010 store_constructor (exp, target, 0, int_expr_size (exp));
7015 /* expand_expr: generate code for computing expression EXP.
7016 An rtx for the computed value is returned. The value is never null.
7017 In the case of a void EXP, const0_rtx is returned.
7019 The value may be stored in TARGET if TARGET is nonzero.
7020 TARGET is just a suggestion; callers must assume that
7021 the rtx returned may not be the same as TARGET.
7023 If TARGET is CONST0_RTX, it means that the value will be ignored.
7025 If TMODE is not VOIDmode, it suggests generating the
7026 result in mode TMODE. But this is done only when convenient.
7027 Otherwise, TMODE is ignored and the value generated in its natural mode.
7028 TMODE is just a suggestion; callers must assume that
7029 the rtx returned may not have mode TMODE.
7031 Note that TARGET may have neither TMODE nor MODE. In that case, it
7032 probably will not be used.
7034 If MODIFIER is EXPAND_SUM then when EXP is an addition
7035 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7036 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7037 products as above, or REG or MEM, or constant.
7038 Ordinarily in such cases we would output mul or add instructions
7039 and then return a pseudo reg containing the sum.
7041 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7042 it also marks a label as absolutely required (it can't be dead).
7043 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7044 This is used for outputting expressions used in initializers.
7046 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7047 with a constant address even if that address is not normally legitimate.
7048 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7050 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7051 a call parameter. Such targets require special care as we haven't yet
7052 marked TARGET so that it's safe from being trashed by libcalls. We
7053 don't want to use TARGET for anything but the final result;
7054 Intermediate values must go elsewhere. Additionally, calls to
7055 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7057 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7058 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7059 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7060 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7063 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7064 enum expand_modifier, rtx *);
7067 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7068 enum expand_modifier modifier, rtx *alt_rtl)
7071 rtx ret, last = NULL;
7073 /* Handle ERROR_MARK before anybody tries to access its type. */
7074 if (TREE_CODE (exp) == ERROR_MARK
7075 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7077 ret = CONST0_RTX (tmode);
7078 return ret ? ret : const0_rtx;
7081 if (flag_non_call_exceptions)
7083 rn = lookup_expr_eh_region (exp);
7085 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7087 last = get_last_insn ();
7090 /* If this is an expression of some kind and it has an associated line
7091 number, then emit the line number before expanding the expression.
7093 We need to save and restore the file and line information so that
7094 errors discovered during expansion are emitted with the right
7095 information. It would be better of the diagnostic routines
7096 used the file/line information embedded in the tree nodes rather
7098 if (cfun && EXPR_HAS_LOCATION (exp))
7100 location_t saved_location = input_location;
7101 input_location = EXPR_LOCATION (exp);
7102 set_curr_insn_source_location (input_location);
7104 /* Record where the insns produced belong. */
7105 set_curr_insn_block (TREE_BLOCK (exp));
7107 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7109 input_location = saved_location;
7113 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7116 /* If using non-call exceptions, mark all insns that may trap.
7117 expand_call() will mark CALL_INSNs before we get to this code,
7118 but it doesn't handle libcalls, and these may trap. */
7122 for (insn = next_real_insn (last); insn;
7123 insn = next_real_insn (insn))
7125 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7126 /* If we want exceptions for non-call insns, any
7127 may_trap_p instruction may throw. */
7128 && GET_CODE (PATTERN (insn)) != CLOBBER
7129 && GET_CODE (PATTERN (insn)) != USE
7130 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7131 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7139 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7140 enum expand_modifier modifier, rtx *alt_rtl)
7142 rtx op0, op1, op2, temp, decl_rtl;
7145 enum machine_mode mode;
7146 enum tree_code code = TREE_CODE (exp);
7148 rtx subtarget, original_target;
7150 tree context, subexp0, subexp1;
7151 bool reduce_bit_field;
7152 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7153 ? reduce_to_bit_field_precision ((expr), \
7158 type = TREE_TYPE (exp);
7159 mode = TYPE_MODE (type);
7160 unsignedp = TYPE_UNSIGNED (type);
7162 ignore = (target == const0_rtx
7163 || ((CONVERT_EXPR_CODE_P (code)
7164 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7165 && TREE_CODE (type) == VOID_TYPE));
7167 /* An operation in what may be a bit-field type needs the
7168 result to be reduced to the precision of the bit-field type,
7169 which is narrower than that of the type's mode. */
7170 reduce_bit_field = (!ignore
7171 && TREE_CODE (type) == INTEGER_TYPE
7172 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7174 /* If we are going to ignore this result, we need only do something
7175 if there is a side-effect somewhere in the expression. If there
7176 is, short-circuit the most common cases here. Note that we must
7177 not call expand_expr with anything but const0_rtx in case this
7178 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7182 if (! TREE_SIDE_EFFECTS (exp))
7185 /* Ensure we reference a volatile object even if value is ignored, but
7186 don't do this if all we are doing is taking its address. */
7187 if (TREE_THIS_VOLATILE (exp)
7188 && TREE_CODE (exp) != FUNCTION_DECL
7189 && mode != VOIDmode && mode != BLKmode
7190 && modifier != EXPAND_CONST_ADDRESS)
7192 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7194 temp = copy_to_reg (temp);
7198 if (TREE_CODE_CLASS (code) == tcc_unary
7199 || code == COMPONENT_REF || code == INDIRECT_REF)
7200 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7203 else if (TREE_CODE_CLASS (code) == tcc_binary
7204 || TREE_CODE_CLASS (code) == tcc_comparison
7205 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7207 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7208 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7211 else if (code == BIT_FIELD_REF)
7213 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7214 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7215 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7222 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7225 /* Use subtarget as the target for operand 0 of a binary operation. */
7226 subtarget = get_subtarget (target);
7227 original_target = target;
7233 tree function = decl_function_context (exp);
7235 temp = label_rtx (exp);
7236 temp = gen_rtx_LABEL_REF (Pmode, temp);
7238 if (function != current_function_decl
7240 LABEL_REF_NONLOCAL_P (temp) = 1;
7242 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7247 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7252 /* If a static var's type was incomplete when the decl was written,
7253 but the type is complete now, lay out the decl now. */
7254 if (DECL_SIZE (exp) == 0
7255 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7256 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7257 layout_decl (exp, 0);
7259 /* TLS emulation hook - replace __thread vars with
7260 *__emutls_get_address (&_emutls.var). */
7261 if (! targetm.have_tls
7262 && TREE_CODE (exp) == VAR_DECL
7263 && DECL_THREAD_LOCAL_P (exp))
7265 exp = build_fold_indirect_ref (emutls_var_address (exp));
7266 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7269 /* ... fall through ... */
7273 decl_rtl = DECL_RTL (exp);
7274 gcc_assert (decl_rtl);
7275 decl_rtl = copy_rtx (decl_rtl);
7277 /* Ensure variable marked as used even if it doesn't go through
7278 a parser. If it hasn't be used yet, write out an external
7280 if (! TREE_USED (exp))
7282 assemble_external (exp);
7283 TREE_USED (exp) = 1;
7286 /* Show we haven't gotten RTL for this yet. */
7289 /* Variables inherited from containing functions should have
7290 been lowered by this point. */
7291 context = decl_function_context (exp);
7292 gcc_assert (!context
7293 || context == current_function_decl
7294 || TREE_STATIC (exp)
7295 /* ??? C++ creates functions that are not TREE_STATIC. */
7296 || TREE_CODE (exp) == FUNCTION_DECL);
7298 /* This is the case of an array whose size is to be determined
7299 from its initializer, while the initializer is still being parsed.
7302 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7303 temp = validize_mem (decl_rtl);
7305 /* If DECL_RTL is memory, we are in the normal case and the
7306 address is not valid, get the address into a register. */
7308 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7311 *alt_rtl = decl_rtl;
7312 decl_rtl = use_anchored_address (decl_rtl);
7313 if (modifier != EXPAND_CONST_ADDRESS
7314 && modifier != EXPAND_SUM
7315 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7316 temp = replace_equiv_address (decl_rtl,
7317 copy_rtx (XEXP (decl_rtl, 0)));
7320 /* If we got something, return it. But first, set the alignment
7321 if the address is a register. */
7324 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7325 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7330 /* If the mode of DECL_RTL does not match that of the decl, it
7331 must be a promoted value. We return a SUBREG of the wanted mode,
7332 but mark it so that we know that it was already extended. */
7334 if (REG_P (decl_rtl)
7335 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7337 enum machine_mode pmode;
7339 /* Get the signedness used for this variable. Ensure we get the
7340 same mode we got when the variable was declared. */
7341 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7342 (TREE_CODE (exp) == RESULT_DECL
7343 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7344 gcc_assert (GET_MODE (decl_rtl) == pmode);
7346 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7347 SUBREG_PROMOTED_VAR_P (temp) = 1;
7348 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7355 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7356 TREE_INT_CST_HIGH (exp), mode);
7362 tree tmp = NULL_TREE;
7363 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7364 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7365 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7366 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7367 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7368 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7369 return const_vector_from_tree (exp);
7370 if (GET_MODE_CLASS (mode) == MODE_INT)
7372 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7374 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7377 tmp = build_constructor_from_list (type,
7378 TREE_VECTOR_CST_ELTS (exp));
7379 return expand_expr (tmp, ignore ? const0_rtx : target,
7384 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7387 /* If optimized, generate immediate CONST_DOUBLE
7388 which will be turned into memory by reload if necessary.
7390 We used to force a register so that loop.c could see it. But
7391 this does not allow gen_* patterns to perform optimizations with
7392 the constants. It also produces two insns in cases like "x = 1.0;".
7393 On most machines, floating-point constants are not permitted in
7394 many insns, so we'd end up copying it to a register in any case.
7396 Now, we do the copying in expand_binop, if appropriate. */
7397 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7398 TYPE_MODE (TREE_TYPE (exp)));
7401 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7402 TYPE_MODE (TREE_TYPE (exp)));
7405 /* Handle evaluating a complex constant in a CONCAT target. */
7406 if (original_target && GET_CODE (original_target) == CONCAT)
7408 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7411 rtarg = XEXP (original_target, 0);
7412 itarg = XEXP (original_target, 1);
7414 /* Move the real and imaginary parts separately. */
7415 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7416 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7419 emit_move_insn (rtarg, op0);
7421 emit_move_insn (itarg, op1);
7423 return original_target;
7426 /* ... fall through ... */
7429 temp = expand_expr_constant (exp, 1, modifier);
7431 /* temp contains a constant address.
7432 On RISC machines where a constant address isn't valid,
7433 make some insns to get that address into a register. */
7434 if (modifier != EXPAND_CONST_ADDRESS
7435 && modifier != EXPAND_INITIALIZER
7436 && modifier != EXPAND_SUM
7437 && ! memory_address_p (mode, XEXP (temp, 0)))
7438 return replace_equiv_address (temp,
7439 copy_rtx (XEXP (temp, 0)));
7444 tree val = TREE_OPERAND (exp, 0);
7445 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7447 if (!SAVE_EXPR_RESOLVED_P (exp))
7449 /* We can indeed still hit this case, typically via builtin
7450 expanders calling save_expr immediately before expanding
7451 something. Assume this means that we only have to deal
7452 with non-BLKmode values. */
7453 gcc_assert (GET_MODE (ret) != BLKmode);
7455 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7456 DECL_ARTIFICIAL (val) = 1;
7457 DECL_IGNORED_P (val) = 1;
7458 TREE_OPERAND (exp, 0) = val;
7459 SAVE_EXPR_RESOLVED_P (exp) = 1;
7461 if (!CONSTANT_P (ret))
7462 ret = copy_to_reg (ret);
7463 SET_DECL_RTL (val, ret);
7470 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7471 expand_goto (TREE_OPERAND (exp, 0));
7473 expand_computed_goto (TREE_OPERAND (exp, 0));
7477 /* If we don't need the result, just ensure we evaluate any
7481 unsigned HOST_WIDE_INT idx;
7484 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7485 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7490 return expand_constructor (exp, target, modifier, false);
7492 case MISALIGNED_INDIRECT_REF:
7493 case ALIGN_INDIRECT_REF:
7496 tree exp1 = TREE_OPERAND (exp, 0);
7498 if (modifier != EXPAND_WRITE)
7502 t = fold_read_from_constant_string (exp);
7504 return expand_expr (t, target, tmode, modifier);
7507 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7508 op0 = memory_address (mode, op0);
7510 if (code == ALIGN_INDIRECT_REF)
7512 int align = TYPE_ALIGN_UNIT (type);
7513 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7514 op0 = memory_address (mode, op0);
7517 temp = gen_rtx_MEM (mode, op0);
7519 set_mem_attributes (temp, exp, 0);
7521 /* Resolve the misalignment now, so that we don't have to remember
7522 to resolve it later. Of course, this only works for reads. */
7523 /* ??? When we get around to supporting writes, we'll have to handle
7524 this in store_expr directly. The vectorizer isn't generating
7525 those yet, however. */
7526 if (code == MISALIGNED_INDIRECT_REF)
7531 gcc_assert (modifier == EXPAND_NORMAL
7532 || modifier == EXPAND_STACK_PARM);
7534 /* The vectorizer should have already checked the mode. */
7535 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7536 gcc_assert (icode != CODE_FOR_nothing);
7538 /* We've already validated the memory, and we're creating a
7539 new pseudo destination. The predicates really can't fail. */
7540 reg = gen_reg_rtx (mode);
7542 /* Nor can the insn generator. */
7543 insn = GEN_FCN (icode) (reg, temp);
7552 case TARGET_MEM_REF:
7554 struct mem_address addr;
7556 get_address_description (exp, &addr);
7557 op0 = addr_for_mem_ref (&addr, true);
7558 op0 = memory_address (mode, op0);
7559 temp = gen_rtx_MEM (mode, op0);
7560 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7567 tree array = TREE_OPERAND (exp, 0);
7568 tree index = TREE_OPERAND (exp, 1);
7570 /* Fold an expression like: "foo"[2].
7571 This is not done in fold so it won't happen inside &.
7572 Don't fold if this is for wide characters since it's too
7573 difficult to do correctly and this is a very rare case. */
7575 if (modifier != EXPAND_CONST_ADDRESS
7576 && modifier != EXPAND_INITIALIZER
7577 && modifier != EXPAND_MEMORY)
7579 tree t = fold_read_from_constant_string (exp);
7582 return expand_expr (t, target, tmode, modifier);
7585 /* If this is a constant index into a constant array,
7586 just get the value from the array. Handle both the cases when
7587 we have an explicit constructor and when our operand is a variable
7588 that was declared const. */
7590 if (modifier != EXPAND_CONST_ADDRESS
7591 && modifier != EXPAND_INITIALIZER
7592 && modifier != EXPAND_MEMORY
7593 && TREE_CODE (array) == CONSTRUCTOR
7594 && ! TREE_SIDE_EFFECTS (array)
7595 && TREE_CODE (index) == INTEGER_CST)
7597 unsigned HOST_WIDE_INT ix;
7600 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7602 if (tree_int_cst_equal (field, index))
7604 if (!TREE_SIDE_EFFECTS (value))
7605 return expand_expr (fold (value), target, tmode, modifier);
7610 else if (optimize >= 1
7611 && modifier != EXPAND_CONST_ADDRESS
7612 && modifier != EXPAND_INITIALIZER
7613 && modifier != EXPAND_MEMORY
7614 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7615 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7616 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7617 && targetm.binds_local_p (array))
7619 if (TREE_CODE (index) == INTEGER_CST)
7621 tree init = DECL_INITIAL (array);
7623 if (TREE_CODE (init) == CONSTRUCTOR)
7625 unsigned HOST_WIDE_INT ix;
7628 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7630 if (tree_int_cst_equal (field, index))
7632 if (TREE_SIDE_EFFECTS (value))
7635 if (TREE_CODE (value) == CONSTRUCTOR)
7637 /* If VALUE is a CONSTRUCTOR, this
7638 optimization is only useful if
7639 this doesn't store the CONSTRUCTOR
7640 into memory. If it does, it is more
7641 efficient to just load the data from
7642 the array directly. */
7643 rtx ret = expand_constructor (value, target,
7645 if (ret == NULL_RTX)
7649 return expand_expr (fold (value), target, tmode,
7653 else if(TREE_CODE (init) == STRING_CST)
7655 tree index1 = index;
7656 tree low_bound = array_ref_low_bound (exp);
7657 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7659 /* Optimize the special-case of a zero lower bound.
7661 We convert the low_bound to sizetype to avoid some problems
7662 with constant folding. (E.g. suppose the lower bound is 1,
7663 and its mode is QI. Without the conversion,l (ARRAY
7664 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7665 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7667 if (! integer_zerop (low_bound))
7668 index1 = size_diffop (index1, fold_convert (sizetype,
7671 if (0 > compare_tree_int (index1,
7672 TREE_STRING_LENGTH (init)))
7674 tree type = TREE_TYPE (TREE_TYPE (init));
7675 enum machine_mode mode = TYPE_MODE (type);
7677 if (GET_MODE_CLASS (mode) == MODE_INT
7678 && GET_MODE_SIZE (mode) == 1)
7679 return gen_int_mode (TREE_STRING_POINTER (init)
7680 [TREE_INT_CST_LOW (index1)],
7687 goto normal_inner_ref;
7690 /* If the operand is a CONSTRUCTOR, we can just extract the
7691 appropriate field if it is present. */
7692 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7694 unsigned HOST_WIDE_INT idx;
7697 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7699 if (field == TREE_OPERAND (exp, 1)
7700 /* We can normally use the value of the field in the
7701 CONSTRUCTOR. However, if this is a bitfield in
7702 an integral mode that we can fit in a HOST_WIDE_INT,
7703 we must mask only the number of bits in the bitfield,
7704 since this is done implicitly by the constructor. If
7705 the bitfield does not meet either of those conditions,
7706 we can't do this optimization. */
7707 && (! DECL_BIT_FIELD (field)
7708 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7709 && (GET_MODE_BITSIZE (DECL_MODE (field))
7710 <= HOST_BITS_PER_WIDE_INT))))
7712 if (DECL_BIT_FIELD (field)
7713 && modifier == EXPAND_STACK_PARM)
7715 op0 = expand_expr (value, target, tmode, modifier);
7716 if (DECL_BIT_FIELD (field))
7718 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7719 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7721 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7723 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7724 op0 = expand_and (imode, op0, op1, target);
7729 = build_int_cst (NULL_TREE,
7730 GET_MODE_BITSIZE (imode) - bitsize);
7732 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7734 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7742 goto normal_inner_ref;
7745 case ARRAY_RANGE_REF:
7748 enum machine_mode mode1, mode2;
7749 HOST_WIDE_INT bitsize, bitpos;
7751 int volatilep = 0, must_force_mem;
7752 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7753 &mode1, &unsignedp, &volatilep, true);
7754 rtx orig_op0, memloc;
7756 /* If we got back the original object, something is wrong. Perhaps
7757 we are evaluating an expression too early. In any event, don't
7758 infinitely recurse. */
7759 gcc_assert (tem != exp);
7761 /* If TEM's type is a union of variable size, pass TARGET to the inner
7762 computation, since it will need a temporary and TARGET is known
7763 to have to do. This occurs in unchecked conversion in Ada. */
7766 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7767 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7769 && modifier != EXPAND_STACK_PARM
7770 ? target : NULL_RTX),
7772 (modifier == EXPAND_INITIALIZER
7773 || modifier == EXPAND_CONST_ADDRESS
7774 || modifier == EXPAND_STACK_PARM)
7775 ? modifier : EXPAND_NORMAL);
7778 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
7780 /* If we have either an offset, a BLKmode result, or a reference
7781 outside the underlying object, we must force it to memory.
7782 Such a case can occur in Ada if we have unchecked conversion
7783 of an expression from a scalar type to an aggregate type or
7784 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
7785 passed a partially uninitialized object or a view-conversion
7786 to a larger size. */
7787 must_force_mem = (offset
7789 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
7791 /* If this is a constant, put it in a register if it is a legitimate
7792 constant and we don't need a memory reference. */
7793 if (CONSTANT_P (op0)
7795 && LEGITIMATE_CONSTANT_P (op0)
7797 op0 = force_reg (mode2, op0);
7799 /* Otherwise, if this is a constant, try to force it to the constant
7800 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
7801 is a legitimate constant. */
7802 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
7803 op0 = validize_mem (memloc);
7805 /* Otherwise, if this is a constant or the object is not in memory
7806 and need be, put it there. */
7807 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
7809 tree nt = build_qualified_type (TREE_TYPE (tem),
7810 (TYPE_QUALS (TREE_TYPE (tem))
7811 | TYPE_QUAL_CONST));
7812 memloc = assign_temp (nt, 1, 1, 1);
7813 emit_move_insn (memloc, op0);
7819 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7822 gcc_assert (MEM_P (op0));
7824 #ifdef POINTERS_EXTEND_UNSIGNED
7825 if (GET_MODE (offset_rtx) != Pmode)
7826 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7828 if (GET_MODE (offset_rtx) != ptr_mode)
7829 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7832 if (GET_MODE (op0) == BLKmode
7833 /* A constant address in OP0 can have VOIDmode, we must
7834 not try to call force_reg in that case. */
7835 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7837 && (bitpos % bitsize) == 0
7838 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7839 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7841 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7845 op0 = offset_address (op0, offset_rtx,
7846 highest_pow2_factor (offset));
7849 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7850 record its alignment as BIGGEST_ALIGNMENT. */
7851 if (MEM_P (op0) && bitpos == 0 && offset != 0
7852 && is_aligning_offset (offset, tem))
7853 set_mem_align (op0, BIGGEST_ALIGNMENT);
7855 /* Don't forget about volatility even if this is a bitfield. */
7856 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7858 if (op0 == orig_op0)
7859 op0 = copy_rtx (op0);
7861 MEM_VOLATILE_P (op0) = 1;
7864 /* The following code doesn't handle CONCAT.
7865 Assume only bitpos == 0 can be used for CONCAT, due to
7866 one element arrays having the same mode as its element. */
7867 if (GET_CODE (op0) == CONCAT)
7869 gcc_assert (bitpos == 0
7870 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7874 /* In cases where an aligned union has an unaligned object
7875 as a field, we might be extracting a BLKmode value from
7876 an integer-mode (e.g., SImode) object. Handle this case
7877 by doing the extract into an object as wide as the field
7878 (which we know to be the width of a basic mode), then
7879 storing into memory, and changing the mode to BLKmode. */
7880 if (mode1 == VOIDmode
7881 || REG_P (op0) || GET_CODE (op0) == SUBREG
7882 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7883 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7884 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7885 && modifier != EXPAND_CONST_ADDRESS
7886 && modifier != EXPAND_INITIALIZER)
7887 /* If the field isn't aligned enough to fetch as a memref,
7888 fetch it as a bit field. */
7889 || (mode1 != BLKmode
7890 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7891 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7893 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7894 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7895 && ((modifier == EXPAND_CONST_ADDRESS
7896 || modifier == EXPAND_INITIALIZER)
7898 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7899 || (bitpos % BITS_PER_UNIT != 0)))
7900 /* If the type and the field are a constant size and the
7901 size of the type isn't the same size as the bitfield,
7902 we must use bitfield operations. */
7904 && TYPE_SIZE (TREE_TYPE (exp))
7905 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7906 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7909 enum machine_mode ext_mode = mode;
7911 if (ext_mode == BLKmode
7912 && ! (target != 0 && MEM_P (op0)
7914 && bitpos % BITS_PER_UNIT == 0))
7915 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7917 if (ext_mode == BLKmode)
7920 target = assign_temp (type, 0, 1, 1);
7925 /* In this case, BITPOS must start at a byte boundary and
7926 TARGET, if specified, must be a MEM. */
7927 gcc_assert (MEM_P (op0)
7928 && (!target || MEM_P (target))
7929 && !(bitpos % BITS_PER_UNIT));
7931 emit_block_move (target,
7932 adjust_address (op0, VOIDmode,
7933 bitpos / BITS_PER_UNIT),
7934 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7936 (modifier == EXPAND_STACK_PARM
7937 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7942 op0 = validize_mem (op0);
7944 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7945 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7947 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7948 (modifier == EXPAND_STACK_PARM
7949 ? NULL_RTX : target),
7950 ext_mode, ext_mode);
7952 /* If the result is a record type and BITSIZE is narrower than
7953 the mode of OP0, an integral mode, and this is a big endian
7954 machine, we must put the field into the high-order bits. */
7955 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7956 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7957 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7958 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7959 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7963 /* If the result type is BLKmode, store the data into a temporary
7964 of the appropriate type, but with the mode corresponding to the
7965 mode for the data we have (op0's mode). It's tempting to make
7966 this a constant type, since we know it's only being stored once,
7967 but that can cause problems if we are taking the address of this
7968 COMPONENT_REF because the MEM of any reference via that address
7969 will have flags corresponding to the type, which will not
7970 necessarily be constant. */
7971 if (mode == BLKmode)
7973 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7976 /* If the reference doesn't use the alias set of its type,
7977 we cannot create the temporary using that type. */
7978 if (component_uses_parent_alias_set (exp))
7980 new_rtx = assign_stack_local (ext_mode, size, 0);
7981 set_mem_alias_set (new_rtx, get_alias_set (exp));
7984 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
7986 emit_move_insn (new_rtx, op0);
7987 op0 = copy_rtx (new_rtx);
7988 PUT_MODE (op0, BLKmode);
7989 set_mem_attributes (op0, exp, 1);
7995 /* If the result is BLKmode, use that to access the object
7997 if (mode == BLKmode)
8000 /* Get a reference to just this component. */
8001 if (modifier == EXPAND_CONST_ADDRESS
8002 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8003 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
8005 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
8007 if (op0 == orig_op0)
8008 op0 = copy_rtx (op0);
8010 set_mem_attributes (op0, exp, 0);
8011 if (REG_P (XEXP (op0, 0)))
8012 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8014 MEM_VOLATILE_P (op0) |= volatilep;
8015 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8016 || modifier == EXPAND_CONST_ADDRESS
8017 || modifier == EXPAND_INITIALIZER)
8019 else if (target == 0)
8020 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8022 convert_move (target, op0, unsignedp);
8027 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8030 /* All valid uses of __builtin_va_arg_pack () are removed during
8032 if (CALL_EXPR_VA_ARG_PACK (exp))
8033 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8035 tree fndecl = get_callee_fndecl (exp), attr;
8038 && (attr = lookup_attribute ("error",
8039 DECL_ATTRIBUTES (fndecl))) != NULL)
8040 error ("%Kcall to %qs declared with attribute error: %s",
8041 exp, lang_hooks.decl_printable_name (fndecl, 1),
8042 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8044 && (attr = lookup_attribute ("warning",
8045 DECL_ATTRIBUTES (fndecl))) != NULL)
8046 warning_at (tree_nonartificial_location (exp),
8047 0, "%Kcall to %qs declared with attribute warning: %s",
8048 exp, lang_hooks.decl_printable_name (fndecl, 1),
8049 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8051 /* Check for a built-in function. */
8052 if (fndecl && DECL_BUILT_IN (fndecl))
8054 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
8055 return expand_builtin (exp, target, subtarget, tmode, ignore);
8058 return expand_call (exp, target, ignore);
8062 if (TREE_OPERAND (exp, 0) == error_mark_node)
8065 if (TREE_CODE (type) == UNION_TYPE)
8067 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8069 /* If both input and output are BLKmode, this conversion isn't doing
8070 anything except possibly changing memory attribute. */
8071 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8073 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8076 result = copy_rtx (result);
8077 set_mem_attributes (result, exp, 0);
8083 if (TYPE_MODE (type) != BLKmode)
8084 target = gen_reg_rtx (TYPE_MODE (type));
8086 target = assign_temp (type, 0, 1, 1);
8090 /* Store data into beginning of memory target. */
8091 store_expr (TREE_OPERAND (exp, 0),
8092 adjust_address (target, TYPE_MODE (valtype), 0),
8093 modifier == EXPAND_STACK_PARM,
8098 gcc_assert (REG_P (target));
8100 /* Store this field into a union of the proper type. */
8101 store_field (target,
8102 MIN ((int_size_in_bytes (TREE_TYPE
8103 (TREE_OPERAND (exp, 0)))
8105 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8106 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8110 /* Return the entire union. */
8114 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8116 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8119 /* If the signedness of the conversion differs and OP0 is
8120 a promoted SUBREG, clear that indication since we now
8121 have to do the proper extension. */
8122 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8123 && GET_CODE (op0) == SUBREG)
8124 SUBREG_PROMOTED_VAR_P (op0) = 0;
8126 return REDUCE_BIT_FIELD (op0);
8129 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8130 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8131 if (GET_MODE (op0) == mode)
8134 /* If OP0 is a constant, just convert it into the proper mode. */
8135 else if (CONSTANT_P (op0))
8137 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8138 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8140 if (modifier == EXPAND_INITIALIZER)
8141 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8142 subreg_lowpart_offset (mode,
8145 op0= convert_modes (mode, inner_mode, op0,
8146 TYPE_UNSIGNED (inner_type));
8149 else if (modifier == EXPAND_INITIALIZER)
8150 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8152 else if (target == 0)
8153 op0 = convert_to_mode (mode, op0,
8154 TYPE_UNSIGNED (TREE_TYPE
8155 (TREE_OPERAND (exp, 0))));
8158 convert_move (target, op0,
8159 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8163 return REDUCE_BIT_FIELD (op0);
8165 case VIEW_CONVERT_EXPR:
8168 /* If we are converting to BLKmode, try to avoid an intermediate
8169 temporary by fetching an inner memory reference. */
8171 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
8172 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode
8173 && handled_component_p (TREE_OPERAND (exp, 0)))
8175 enum machine_mode mode1;
8176 HOST_WIDE_INT bitsize, bitpos;
8181 = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos,
8182 &offset, &mode1, &unsignedp, &volatilep,
8186 /* ??? We should work harder and deal with non-zero offsets. */
8188 && (bitpos % BITS_PER_UNIT) == 0
8190 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
8192 /* See the normal_inner_ref case for the rationale. */
8195 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8196 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8198 && modifier != EXPAND_STACK_PARM
8199 ? target : NULL_RTX),
8201 (modifier == EXPAND_INITIALIZER
8202 || modifier == EXPAND_CONST_ADDRESS
8203 || modifier == EXPAND_STACK_PARM)
8204 ? modifier : EXPAND_NORMAL);
8206 if (MEM_P (orig_op0))
8210 /* Get a reference to just this component. */
8211 if (modifier == EXPAND_CONST_ADDRESS
8212 || modifier == EXPAND_SUM
8213 || modifier == EXPAND_INITIALIZER)
8214 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
8216 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
8218 if (op0 == orig_op0)
8219 op0 = copy_rtx (op0);
8221 set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0);
8222 if (REG_P (XEXP (op0, 0)))
8223 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8225 MEM_VOLATILE_P (op0) |= volatilep;
8231 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8233 /* If the input and output modes are both the same, we are done. */
8234 if (mode == GET_MODE (op0))
8236 /* If neither mode is BLKmode, and both modes are the same size
8237 then we can use gen_lowpart. */
8238 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
8239 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)))
8241 if (GET_CODE (op0) == SUBREG)
8242 op0 = force_reg (GET_MODE (op0), op0);
8243 op0 = gen_lowpart (mode, op0);
8245 /* If both modes are integral, then we can convert from one to the
8247 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
8248 op0 = convert_modes (mode, GET_MODE (op0), op0,
8249 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8250 /* As a last resort, spill op0 to memory, and reload it in a
8252 else if (!MEM_P (op0))
8254 /* If the operand is not a MEM, force it into memory. Since we
8255 are going to be changing the mode of the MEM, don't call
8256 force_const_mem for constants because we don't allow pool
8257 constants to change mode. */
8258 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8260 gcc_assert (!TREE_ADDRESSABLE (exp));
8262 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8264 = assign_stack_temp_for_type
8265 (TYPE_MODE (inner_type),
8266 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8268 emit_move_insn (target, op0);
8272 /* At this point, OP0 is in the correct mode. If the output type is
8273 such that the operand is known to be aligned, indicate that it is.
8274 Otherwise, we need only be concerned about alignment for non-BLKmode
8278 op0 = copy_rtx (op0);
8280 if (TYPE_ALIGN_OK (type))
8281 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8282 else if (STRICT_ALIGNMENT
8284 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
8286 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8287 HOST_WIDE_INT temp_size
8288 = MAX (int_size_in_bytes (inner_type),
8289 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
8291 = assign_stack_temp_for_type (mode, temp_size, 0, type);
8292 rtx new_with_op0_mode
8293 = adjust_address (new_rtx, GET_MODE (op0), 0);
8295 gcc_assert (!TREE_ADDRESSABLE (exp));
8297 if (GET_MODE (op0) == BLKmode)
8298 emit_block_move (new_with_op0_mode, op0,
8299 GEN_INT (GET_MODE_SIZE (mode)),
8300 (modifier == EXPAND_STACK_PARM
8301 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8303 emit_move_insn (new_with_op0_mode, op0);
8308 op0 = adjust_address (op0, mode, 0);
8313 case POINTER_PLUS_EXPR:
8314 /* Even though the sizetype mode and the pointer's mode can be different
8315 expand is able to handle this correctly and get the correct result out
8316 of the PLUS_EXPR code. */
8317 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8318 if sizetype precision is smaller than pointer precision. */
8319 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8320 exp = build2 (PLUS_EXPR, type,
8321 TREE_OPERAND (exp, 0),
8323 fold_convert (ssizetype,
8324 TREE_OPERAND (exp, 1))));
8327 /* Check if this is a case for multiplication and addition. */
8328 if ((TREE_CODE (type) == INTEGER_TYPE
8329 || TREE_CODE (type) == FIXED_POINT_TYPE)
8330 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8332 tree subsubexp0, subsubexp1;
8333 enum tree_code code0, code1, this_code;
8335 subexp0 = TREE_OPERAND (exp, 0);
8336 subsubexp0 = TREE_OPERAND (subexp0, 0);
8337 subsubexp1 = TREE_OPERAND (subexp0, 1);
8338 code0 = TREE_CODE (subsubexp0);
8339 code1 = TREE_CODE (subsubexp1);
8340 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8341 : FIXED_CONVERT_EXPR;
8342 if (code0 == this_code && code1 == this_code
8343 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8344 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8345 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8346 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8347 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8348 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8350 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8351 enum machine_mode innermode = TYPE_MODE (op0type);
8352 bool zextend_p = TYPE_UNSIGNED (op0type);
8353 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8355 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8357 this_optab = zextend_p ? usmadd_widen_optab
8358 : ssmadd_widen_optab;
8359 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8360 && (optab_handler (this_optab, mode)->insn_code
8361 != CODE_FOR_nothing))
8363 expand_operands (TREE_OPERAND (subsubexp0, 0),
8364 TREE_OPERAND (subsubexp1, 0),
8365 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8366 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8367 VOIDmode, EXPAND_NORMAL);
8368 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8371 return REDUCE_BIT_FIELD (temp);
8376 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8377 something else, make sure we add the register to the constant and
8378 then to the other thing. This case can occur during strength
8379 reduction and doing it this way will produce better code if the
8380 frame pointer or argument pointer is eliminated.
8382 fold-const.c will ensure that the constant is always in the inner
8383 PLUS_EXPR, so the only case we need to do anything about is if
8384 sp, ap, or fp is our second argument, in which case we must swap
8385 the innermost first argument and our second argument. */
8387 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8388 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8389 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8390 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8391 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8392 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8394 tree t = TREE_OPERAND (exp, 1);
8396 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8397 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8400 /* If the result is to be ptr_mode and we are adding an integer to
8401 something, we might be forming a constant. So try to use
8402 plus_constant. If it produces a sum and we can't accept it,
8403 use force_operand. This allows P = &ARR[const] to generate
8404 efficient code on machines where a SYMBOL_REF is not a valid
8407 If this is an EXPAND_SUM call, always return the sum. */
8408 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8409 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8411 if (modifier == EXPAND_STACK_PARM)
8413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8414 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8415 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8419 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8421 /* Use immed_double_const to ensure that the constant is
8422 truncated according to the mode of OP1, then sign extended
8423 to a HOST_WIDE_INT. Using the constant directly can result
8424 in non-canonical RTL in a 64x32 cross compile. */
8426 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8428 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8429 op1 = plus_constant (op1, INTVAL (constant_part));
8430 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8431 op1 = force_operand (op1, target);
8432 return REDUCE_BIT_FIELD (op1);
8435 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8436 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8437 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8441 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8442 (modifier == EXPAND_INITIALIZER
8443 ? EXPAND_INITIALIZER : EXPAND_SUM));
8444 if (! CONSTANT_P (op0))
8446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8447 VOIDmode, modifier);
8448 /* Return a PLUS if modifier says it's OK. */
8449 if (modifier == EXPAND_SUM
8450 || modifier == EXPAND_INITIALIZER)
8451 return simplify_gen_binary (PLUS, mode, op0, op1);
8454 /* Use immed_double_const to ensure that the constant is
8455 truncated according to the mode of OP1, then sign extended
8456 to a HOST_WIDE_INT. Using the constant directly can result
8457 in non-canonical RTL in a 64x32 cross compile. */
8459 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8461 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8462 op0 = plus_constant (op0, INTVAL (constant_part));
8463 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8464 op0 = force_operand (op0, target);
8465 return REDUCE_BIT_FIELD (op0);
8469 /* No sense saving up arithmetic to be done
8470 if it's all in the wrong mode to form part of an address.
8471 And force_operand won't know whether to sign-extend or
8473 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8474 || mode != ptr_mode)
8476 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8477 subtarget, &op0, &op1, 0);
8478 if (op0 == const0_rtx)
8480 if (op1 == const0_rtx)
8485 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8486 subtarget, &op0, &op1, modifier);
8487 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8490 /* Check if this is a case for multiplication and subtraction. */
8491 if ((TREE_CODE (type) == INTEGER_TYPE
8492 || TREE_CODE (type) == FIXED_POINT_TYPE)
8493 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8495 tree subsubexp0, subsubexp1;
8496 enum tree_code code0, code1, this_code;
8498 subexp1 = TREE_OPERAND (exp, 1);
8499 subsubexp0 = TREE_OPERAND (subexp1, 0);
8500 subsubexp1 = TREE_OPERAND (subexp1, 1);
8501 code0 = TREE_CODE (subsubexp0);
8502 code1 = TREE_CODE (subsubexp1);
8503 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8504 : FIXED_CONVERT_EXPR;
8505 if (code0 == this_code && code1 == this_code
8506 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8507 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8508 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8509 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8510 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8511 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8513 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8514 enum machine_mode innermode = TYPE_MODE (op0type);
8515 bool zextend_p = TYPE_UNSIGNED (op0type);
8516 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8518 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8520 this_optab = zextend_p ? usmsub_widen_optab
8521 : ssmsub_widen_optab;
8522 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8523 && (optab_handler (this_optab, mode)->insn_code
8524 != CODE_FOR_nothing))
8526 expand_operands (TREE_OPERAND (subsubexp0, 0),
8527 TREE_OPERAND (subsubexp1, 0),
8528 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8529 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8530 VOIDmode, EXPAND_NORMAL);
8531 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8534 return REDUCE_BIT_FIELD (temp);
8539 /* For initializers, we are allowed to return a MINUS of two
8540 symbolic constants. Here we handle all cases when both operands
8542 /* Handle difference of two symbolic constants,
8543 for the sake of an initializer. */
8544 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8545 && really_constant_p (TREE_OPERAND (exp, 0))
8546 && really_constant_p (TREE_OPERAND (exp, 1)))
8548 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8549 NULL_RTX, &op0, &op1, modifier);
8551 /* If the last operand is a CONST_INT, use plus_constant of
8552 the negated constant. Else make the MINUS. */
8553 if (GET_CODE (op1) == CONST_INT)
8554 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8556 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8559 /* No sense saving up arithmetic to be done
8560 if it's all in the wrong mode to form part of an address.
8561 And force_operand won't know whether to sign-extend or
8563 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8564 || mode != ptr_mode)
8567 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8568 subtarget, &op0, &op1, modifier);
8570 /* Convert A - const to A + (-const). */
8571 if (GET_CODE (op1) == CONST_INT)
8573 op1 = negate_rtx (mode, op1);
8574 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8580 /* If this is a fixed-point operation, then we cannot use the code
8581 below because "expand_mult" doesn't support sat/no-sat fixed-point
8583 if (ALL_FIXED_POINT_MODE_P (mode))
8586 /* If first operand is constant, swap them.
8587 Thus the following special case checks need only
8588 check the second operand. */
8589 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8591 tree t1 = TREE_OPERAND (exp, 0);
8592 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8593 TREE_OPERAND (exp, 1) = t1;
8596 /* Attempt to return something suitable for generating an
8597 indexed address, for machines that support that. */
8599 if (modifier == EXPAND_SUM && mode == ptr_mode
8600 && host_integerp (TREE_OPERAND (exp, 1), 0))
8602 tree exp1 = TREE_OPERAND (exp, 1);
8604 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8608 op0 = force_operand (op0, NULL_RTX);
8610 op0 = copy_to_mode_reg (mode, op0);
8612 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8613 gen_int_mode (tree_low_cst (exp1, 0),
8614 TYPE_MODE (TREE_TYPE (exp1)))));
8617 if (modifier == EXPAND_STACK_PARM)
8620 /* Check for multiplying things that have been extended
8621 from a narrower type. If this machine supports multiplying
8622 in that narrower type with a result in the desired type,
8623 do it that way, and avoid the explicit type-conversion. */
8625 subexp0 = TREE_OPERAND (exp, 0);
8626 subexp1 = TREE_OPERAND (exp, 1);
8627 /* First, check if we have a multiplication of one signed and one
8628 unsigned operand. */
8629 if (TREE_CODE (subexp0) == NOP_EXPR
8630 && TREE_CODE (subexp1) == NOP_EXPR
8631 && TREE_CODE (type) == INTEGER_TYPE
8632 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8633 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8634 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8635 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8636 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8637 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8639 enum machine_mode innermode
8640 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8641 this_optab = usmul_widen_optab;
8642 if (mode == GET_MODE_WIDER_MODE (innermode))
8644 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8646 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8647 expand_operands (TREE_OPERAND (subexp0, 0),
8648 TREE_OPERAND (subexp1, 0),
8649 NULL_RTX, &op0, &op1, 0);
8651 expand_operands (TREE_OPERAND (subexp0, 0),
8652 TREE_OPERAND (subexp1, 0),
8653 NULL_RTX, &op1, &op0, 0);
8659 /* Check for a multiplication with matching signedness. */
8660 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8661 && TREE_CODE (type) == INTEGER_TYPE
8662 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8663 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8664 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8665 && int_fits_type_p (TREE_OPERAND (exp, 1),
8666 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8667 /* Don't use a widening multiply if a shift will do. */
8668 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8669 > HOST_BITS_PER_WIDE_INT)
8670 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8672 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8673 && (TYPE_PRECISION (TREE_TYPE
8674 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8675 == TYPE_PRECISION (TREE_TYPE
8677 (TREE_OPERAND (exp, 0), 0))))
8678 /* If both operands are extended, they must either both
8679 be zero-extended or both be sign-extended. */
8680 && (TYPE_UNSIGNED (TREE_TYPE
8681 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8682 == TYPE_UNSIGNED (TREE_TYPE
8684 (TREE_OPERAND (exp, 0), 0)))))))
8686 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8687 enum machine_mode innermode = TYPE_MODE (op0type);
8688 bool zextend_p = TYPE_UNSIGNED (op0type);
8689 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8690 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8692 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8694 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8696 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8697 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8698 TREE_OPERAND (exp, 1),
8699 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8701 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8702 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8703 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8706 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8707 && innermode == word_mode)
8710 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8711 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8712 op1 = convert_modes (innermode, mode,
8713 expand_normal (TREE_OPERAND (exp, 1)),
8716 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8717 temp = expand_binop (mode, other_optab, op0, op1, target,
8718 unsignedp, OPTAB_LIB_WIDEN);
8719 hipart = gen_highpart (innermode, temp);
8720 htem = expand_mult_highpart_adjust (innermode, hipart,
8724 emit_move_insn (hipart, htem);
8725 return REDUCE_BIT_FIELD (temp);
8729 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8730 subtarget, &op0, &op1, 0);
8731 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8733 case TRUNC_DIV_EXPR:
8734 case FLOOR_DIV_EXPR:
8736 case ROUND_DIV_EXPR:
8737 case EXACT_DIV_EXPR:
8738 /* If this is a fixed-point operation, then we cannot use the code
8739 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8741 if (ALL_FIXED_POINT_MODE_P (mode))
8744 if (modifier == EXPAND_STACK_PARM)
8746 /* Possible optimization: compute the dividend with EXPAND_SUM
8747 then if the divisor is constant can optimize the case
8748 where some terms of the dividend have coeffs divisible by it. */
8749 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8750 subtarget, &op0, &op1, 0);
8751 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8756 case TRUNC_MOD_EXPR:
8757 case FLOOR_MOD_EXPR:
8759 case ROUND_MOD_EXPR:
8760 if (modifier == EXPAND_STACK_PARM)
8762 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8763 subtarget, &op0, &op1, 0);
8764 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8766 case FIXED_CONVERT_EXPR:
8767 op0 = expand_normal (TREE_OPERAND (exp, 0));
8768 if (target == 0 || modifier == EXPAND_STACK_PARM)
8769 target = gen_reg_rtx (mode);
8771 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8772 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8773 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8774 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8776 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8779 case FIX_TRUNC_EXPR:
8780 op0 = expand_normal (TREE_OPERAND (exp, 0));
8781 if (target == 0 || modifier == EXPAND_STACK_PARM)
8782 target = gen_reg_rtx (mode);
8783 expand_fix (target, op0, unsignedp);
8787 op0 = expand_normal (TREE_OPERAND (exp, 0));
8788 if (target == 0 || modifier == EXPAND_STACK_PARM)
8789 target = gen_reg_rtx (mode);
8790 /* expand_float can't figure out what to do if FROM has VOIDmode.
8791 So give it the correct mode. With -O, cse will optimize this. */
8792 if (GET_MODE (op0) == VOIDmode)
8793 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8795 expand_float (target, op0,
8796 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8800 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8801 VOIDmode, EXPAND_NORMAL);
8802 if (modifier == EXPAND_STACK_PARM)
8804 temp = expand_unop (mode,
8805 optab_for_tree_code (NEGATE_EXPR, type,
8809 return REDUCE_BIT_FIELD (temp);
8812 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8813 VOIDmode, EXPAND_NORMAL);
8814 if (modifier == EXPAND_STACK_PARM)
8817 /* ABS_EXPR is not valid for complex arguments. */
8818 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8819 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8821 /* Unsigned abs is simply the operand. Testing here means we don't
8822 risk generating incorrect code below. */
8823 if (TYPE_UNSIGNED (type))
8826 return expand_abs (mode, op0, target, unsignedp,
8827 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8831 target = original_target;
8833 || modifier == EXPAND_STACK_PARM
8834 || (MEM_P (target) && MEM_VOLATILE_P (target))
8835 || GET_MODE (target) != mode
8837 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8838 target = gen_reg_rtx (mode);
8839 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8840 target, &op0, &op1, 0);
8842 /* First try to do it with a special MIN or MAX instruction.
8843 If that does not win, use a conditional jump to select the proper
8845 this_optab = optab_for_tree_code (code, type, optab_default);
8846 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8851 /* At this point, a MEM target is no longer useful; we will get better
8854 if (! REG_P (target))
8855 target = gen_reg_rtx (mode);
8857 /* If op1 was placed in target, swap op0 and op1. */
8858 if (target != op0 && target == op1)
8865 /* We generate better code and avoid problems with op1 mentioning
8866 target by forcing op1 into a pseudo if it isn't a constant. */
8867 if (! CONSTANT_P (op1))
8868 op1 = force_reg (mode, op1);
8871 enum rtx_code comparison_code;
8874 if (code == MAX_EXPR)
8875 comparison_code = unsignedp ? GEU : GE;
8877 comparison_code = unsignedp ? LEU : LE;
8879 /* Canonicalize to comparisons against 0. */
8880 if (op1 == const1_rtx)
8882 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8883 or (a != 0 ? a : 1) for unsigned.
8884 For MIN we are safe converting (a <= 1 ? a : 1)
8885 into (a <= 0 ? a : 1) */
8886 cmpop1 = const0_rtx;
8887 if (code == MAX_EXPR)
8888 comparison_code = unsignedp ? NE : GT;
8890 if (op1 == constm1_rtx && !unsignedp)
8892 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8893 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8894 cmpop1 = const0_rtx;
8895 if (code == MIN_EXPR)
8896 comparison_code = LT;
8898 #ifdef HAVE_conditional_move
8899 /* Use a conditional move if possible. */
8900 if (can_conditionally_move_p (mode))
8904 /* ??? Same problem as in expmed.c: emit_conditional_move
8905 forces a stack adjustment via compare_from_rtx, and we
8906 lose the stack adjustment if the sequence we are about
8907 to create is discarded. */
8908 do_pending_stack_adjust ();
8912 /* Try to emit the conditional move. */
8913 insn = emit_conditional_move (target, comparison_code,
8918 /* If we could do the conditional move, emit the sequence,
8922 rtx seq = get_insns ();
8928 /* Otherwise discard the sequence and fall back to code with
8934 emit_move_insn (target, op0);
8936 temp = gen_label_rtx ();
8937 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8938 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8940 emit_move_insn (target, op1);
8945 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8946 VOIDmode, EXPAND_NORMAL);
8947 if (modifier == EXPAND_STACK_PARM)
8949 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8953 /* ??? Can optimize bitwise operations with one arg constant.
8954 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8955 and (a bitwise1 b) bitwise2 b (etc)
8956 but that is probably not worth while. */
8958 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8959 boolean values when we want in all cases to compute both of them. In
8960 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8961 as actual zero-or-1 values and then bitwise anding. In cases where
8962 there cannot be any side effects, better code would be made by
8963 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8964 how to recognize those cases. */
8966 case TRUTH_AND_EXPR:
8967 code = BIT_AND_EXPR;
8972 code = BIT_IOR_EXPR;
8976 case TRUTH_XOR_EXPR:
8977 code = BIT_XOR_EXPR;
8983 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8984 || (GET_MODE_PRECISION (TYPE_MODE (type))
8985 == TYPE_PRECISION (type)));
8990 /* If this is a fixed-point operation, then we cannot use the code
8991 below because "expand_shift" doesn't support sat/no-sat fixed-point
8993 if (ALL_FIXED_POINT_MODE_P (mode))
8996 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8998 if (modifier == EXPAND_STACK_PARM)
9000 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
9001 VOIDmode, EXPAND_NORMAL);
9002 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
9004 if (code == LSHIFT_EXPR)
9005 temp = REDUCE_BIT_FIELD (temp);
9008 /* Could determine the answer when only additive constants differ. Also,
9009 the addition of one can be handled by changing the condition. */
9016 case UNORDERED_EXPR:
9024 temp = do_store_flag (exp,
9025 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9026 tmode != VOIDmode ? tmode : mode);
9030 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
9031 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
9033 && REG_P (original_target)
9034 && (GET_MODE (original_target)
9035 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9037 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
9038 VOIDmode, EXPAND_NORMAL);
9040 /* If temp is constant, we can just compute the result. */
9041 if (GET_CODE (temp) == CONST_INT)
9043 if (INTVAL (temp) != 0)
9044 emit_move_insn (target, const1_rtx);
9046 emit_move_insn (target, const0_rtx);
9051 if (temp != original_target)
9053 enum machine_mode mode1 = GET_MODE (temp);
9054 if (mode1 == VOIDmode)
9055 mode1 = tmode != VOIDmode ? tmode : mode;
9057 temp = copy_to_mode_reg (mode1, temp);
9060 op1 = gen_label_rtx ();
9061 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
9062 GET_MODE (temp), unsignedp, op1);
9063 emit_move_insn (temp, const1_rtx);
9068 /* If no set-flag instruction, must generate a conditional store
9069 into a temporary variable. Drop through and handle this
9071 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9072 are occassionally created by folding during expansion. */
9073 case TRUTH_ANDIF_EXPR:
9074 case TRUTH_ORIF_EXPR:
9077 || modifier == EXPAND_STACK_PARM
9078 || ! safe_from_p (target, exp, 1)
9079 /* Make sure we don't have a hard reg (such as function's return
9080 value) live across basic blocks, if not optimizing. */
9081 || (!optimize && REG_P (target)
9082 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9083 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9086 emit_move_insn (target, const0_rtx);
9088 op1 = gen_label_rtx ();
9089 jumpifnot (exp, op1);
9092 emit_move_insn (target, const1_rtx);
9095 return ignore ? const0_rtx : target;
9097 case TRUTH_NOT_EXPR:
9098 if (modifier == EXPAND_STACK_PARM)
9100 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9101 VOIDmode, EXPAND_NORMAL);
9102 /* The parser is careful to generate TRUTH_NOT_EXPR
9103 only with operands that are always zero or one. */
9104 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9105 target, 1, OPTAB_LIB_WIDEN);
9109 case STATEMENT_LIST:
9111 tree_stmt_iterator iter;
9113 gcc_assert (ignore);
9115 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9116 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9121 /* A COND_EXPR with its type being VOID_TYPE represents a
9122 conditional jump and is handled in
9123 expand_gimple_cond_expr. */
9124 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9126 /* Note that COND_EXPRs whose type is a structure or union
9127 are required to be constructed to contain assignments of
9128 a temporary variable, so that we can evaluate them here
9129 for side effect only. If type is void, we must do likewise. */
9131 gcc_assert (!TREE_ADDRESSABLE (type)
9133 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9134 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9136 /* If we are not to produce a result, we have no target. Otherwise,
9137 if a target was specified use it; it will not be used as an
9138 intermediate target unless it is safe. If no target, use a
9141 if (modifier != EXPAND_STACK_PARM
9143 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9144 && GET_MODE (original_target) == mode
9145 #ifdef HAVE_conditional_move
9146 && (! can_conditionally_move_p (mode)
9147 || REG_P (original_target))
9149 && !MEM_P (original_target))
9150 temp = original_target;
9152 temp = assign_temp (type, 0, 0, 1);
9154 do_pending_stack_adjust ();
9156 op0 = gen_label_rtx ();
9157 op1 = gen_label_rtx ();
9158 jumpifnot (TREE_OPERAND (exp, 0), op0);
9159 store_expr (TREE_OPERAND (exp, 1), temp,
9160 modifier == EXPAND_STACK_PARM,
9163 emit_jump_insn (gen_jump (op1));
9166 store_expr (TREE_OPERAND (exp, 2), temp,
9167 modifier == EXPAND_STACK_PARM,
9175 target = expand_vec_cond_expr (exp, target);
9180 tree lhs = TREE_OPERAND (exp, 0);
9181 tree rhs = TREE_OPERAND (exp, 1);
9182 gcc_assert (ignore);
9184 /* Check for |= or &= of a bitfield of size one into another bitfield
9185 of size 1. In this case, (unless we need the result of the
9186 assignment) we can do this more efficiently with a
9187 test followed by an assignment, if necessary.
9189 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9190 things change so we do, this code should be enhanced to
9192 if (TREE_CODE (lhs) == COMPONENT_REF
9193 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9194 || TREE_CODE (rhs) == BIT_AND_EXPR)
9195 && TREE_OPERAND (rhs, 0) == lhs
9196 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9197 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9198 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9200 rtx label = gen_label_rtx ();
9201 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9202 do_jump (TREE_OPERAND (rhs, 1),
9205 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9206 MOVE_NONTEMPORAL (exp));
9207 do_pending_stack_adjust ();
9212 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9217 if (!TREE_OPERAND (exp, 0))
9218 expand_null_return ();
9220 expand_return (TREE_OPERAND (exp, 0));
9224 return expand_expr_addr_expr (exp, target, tmode, modifier);
9227 /* Get the rtx code of the operands. */
9228 op0 = expand_normal (TREE_OPERAND (exp, 0));
9229 op1 = expand_normal (TREE_OPERAND (exp, 1));
9232 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9234 /* Move the real (op0) and imaginary (op1) parts to their location. */
9235 write_complex_part (target, op0, false);
9236 write_complex_part (target, op1, true);
9241 op0 = expand_normal (TREE_OPERAND (exp, 0));
9242 return read_complex_part (op0, false);
9245 op0 = expand_normal (TREE_OPERAND (exp, 0));
9246 return read_complex_part (op0, true);
9249 expand_resx_expr (exp);
9252 case TRY_CATCH_EXPR:
9254 case EH_FILTER_EXPR:
9255 case TRY_FINALLY_EXPR:
9256 /* Lowered by tree-eh.c. */
9259 case WITH_CLEANUP_EXPR:
9260 case CLEANUP_POINT_EXPR:
9262 case CASE_LABEL_EXPR:
9268 case PREINCREMENT_EXPR:
9269 case PREDECREMENT_EXPR:
9270 case POSTINCREMENT_EXPR:
9271 case POSTDECREMENT_EXPR:
9274 /* Lowered by gimplify.c. */
9277 case CHANGE_DYNAMIC_TYPE_EXPR:
9278 /* This is ignored at the RTL level. The tree level set
9279 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9280 overkill for the RTL layer but is all that we can
9285 return get_exception_pointer ();
9288 return get_exception_filter ();
9291 /* Function descriptors are not valid except for as
9292 initialization constants, and should not be expanded. */
9300 expand_label (TREE_OPERAND (exp, 0));
9304 expand_asm_expr (exp);
9307 case WITH_SIZE_EXPR:
9308 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9309 have pulled out the size to use in whatever context it needed. */
9310 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9313 case REALIGN_LOAD_EXPR:
9315 tree oprnd0 = TREE_OPERAND (exp, 0);
9316 tree oprnd1 = TREE_OPERAND (exp, 1);
9317 tree oprnd2 = TREE_OPERAND (exp, 2);
9320 this_optab = optab_for_tree_code (code, type, optab_default);
9321 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9322 op2 = expand_normal (oprnd2);
9323 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9331 tree oprnd0 = TREE_OPERAND (exp, 0);
9332 tree oprnd1 = TREE_OPERAND (exp, 1);
9333 tree oprnd2 = TREE_OPERAND (exp, 2);
9336 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9337 op2 = expand_normal (oprnd2);
9338 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9343 case WIDEN_SUM_EXPR:
9345 tree oprnd0 = TREE_OPERAND (exp, 0);
9346 tree oprnd1 = TREE_OPERAND (exp, 1);
9348 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9349 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9354 case REDUC_MAX_EXPR:
9355 case REDUC_MIN_EXPR:
9356 case REDUC_PLUS_EXPR:
9358 op0 = expand_normal (TREE_OPERAND (exp, 0));
9359 this_optab = optab_for_tree_code (code, type, optab_default);
9360 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9365 case VEC_EXTRACT_EVEN_EXPR:
9366 case VEC_EXTRACT_ODD_EXPR:
9368 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9369 NULL_RTX, &op0, &op1, 0);
9370 this_optab = optab_for_tree_code (code, type, optab_default);
9371 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9377 case VEC_INTERLEAVE_HIGH_EXPR:
9378 case VEC_INTERLEAVE_LOW_EXPR:
9380 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9381 NULL_RTX, &op0, &op1, 0);
9382 this_optab = optab_for_tree_code (code, type, optab_default);
9383 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9389 case VEC_LSHIFT_EXPR:
9390 case VEC_RSHIFT_EXPR:
9392 target = expand_vec_shift_expr (exp, target);
9396 case VEC_UNPACK_HI_EXPR:
9397 case VEC_UNPACK_LO_EXPR:
9399 op0 = expand_normal (TREE_OPERAND (exp, 0));
9400 this_optab = optab_for_tree_code (code, type, optab_default);
9401 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9407 case VEC_UNPACK_FLOAT_HI_EXPR:
9408 case VEC_UNPACK_FLOAT_LO_EXPR:
9410 op0 = expand_normal (TREE_OPERAND (exp, 0));
9411 /* The signedness is determined from input operand. */
9412 this_optab = optab_for_tree_code (code,
9413 TREE_TYPE (TREE_OPERAND (exp, 0)),
9415 temp = expand_widen_pattern_expr
9416 (exp, op0, NULL_RTX, NULL_RTX,
9417 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9423 case VEC_WIDEN_MULT_HI_EXPR:
9424 case VEC_WIDEN_MULT_LO_EXPR:
9426 tree oprnd0 = TREE_OPERAND (exp, 0);
9427 tree oprnd1 = TREE_OPERAND (exp, 1);
9429 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9430 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9432 gcc_assert (target);
9436 case VEC_PACK_TRUNC_EXPR:
9437 case VEC_PACK_SAT_EXPR:
9438 case VEC_PACK_FIX_TRUNC_EXPR:
9439 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9442 case COMPOUND_LITERAL_EXPR:
9444 /* Initialize the anonymous variable declared in the compound
9445 literal, then return the variable. */
9446 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9448 /* Create RTL for this variable. */
9449 if (!DECL_RTL_SET_P (decl))
9451 if (DECL_HARD_REGISTER (decl))
9452 /* The user specified an assembler name for this variable.
9454 rest_of_decl_compilation (decl, 0, 0);
9459 return expand_expr_real (decl, original_target, tmode,
9467 /* Here to do an ordinary binary operator. */
9469 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9470 subtarget, &op0, &op1, 0);
9472 this_optab = optab_for_tree_code (code, type, optab_default);
9474 if (modifier == EXPAND_STACK_PARM)
9476 temp = expand_binop (mode, this_optab, op0, op1, target,
9477 unsignedp, OPTAB_LIB_WIDEN);
9479 return REDUCE_BIT_FIELD (temp);
9481 #undef REDUCE_BIT_FIELD
9483 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9484 signedness of TYPE), possibly returning the result in TARGET. */
9486 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9488 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9489 if (target && GET_MODE (target) != GET_MODE (exp))
9491 /* For constant values, reduce using build_int_cst_type. */
9492 if (GET_CODE (exp) == CONST_INT)
9494 HOST_WIDE_INT value = INTVAL (exp);
9495 tree t = build_int_cst_type (type, value);
9496 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9498 else if (TYPE_UNSIGNED (type))
9501 if (prec < HOST_BITS_PER_WIDE_INT)
9502 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9505 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9506 ((unsigned HOST_WIDE_INT) 1
9507 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9509 return expand_and (GET_MODE (exp), exp, mask, target);
9513 tree count = build_int_cst (NULL_TREE,
9514 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9515 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9516 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9520 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9521 when applied to the address of EXP produces an address known to be
9522 aligned more than BIGGEST_ALIGNMENT. */
9525 is_aligning_offset (const_tree offset, const_tree exp)
9527 /* Strip off any conversions. */
9528 while (CONVERT_EXPR_P (offset))
9529 offset = TREE_OPERAND (offset, 0);
9531 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9532 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9533 if (TREE_CODE (offset) != BIT_AND_EXPR
9534 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9535 || compare_tree_int (TREE_OPERAND (offset, 1),
9536 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9537 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9540 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9541 It must be NEGATE_EXPR. Then strip any more conversions. */
9542 offset = TREE_OPERAND (offset, 0);
9543 while (CONVERT_EXPR_P (offset))
9544 offset = TREE_OPERAND (offset, 0);
9546 if (TREE_CODE (offset) != NEGATE_EXPR)
9549 offset = TREE_OPERAND (offset, 0);
9550 while (CONVERT_EXPR_P (offset))
9551 offset = TREE_OPERAND (offset, 0);
9553 /* This must now be the address of EXP. */
9554 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9557 /* Return the tree node if an ARG corresponds to a string constant or zero
9558 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9559 in bytes within the string that ARG is accessing. The type of the
9560 offset will be `sizetype'. */
9563 string_constant (tree arg, tree *ptr_offset)
9565 tree array, offset, lower_bound;
9568 if (TREE_CODE (arg) == ADDR_EXPR)
9570 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9572 *ptr_offset = size_zero_node;
9573 return TREE_OPERAND (arg, 0);
9575 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9577 array = TREE_OPERAND (arg, 0);
9578 offset = size_zero_node;
9580 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9582 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9583 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9584 if (TREE_CODE (array) != STRING_CST
9585 && TREE_CODE (array) != VAR_DECL)
9588 /* Check if the array has a nonzero lower bound. */
9589 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9590 if (!integer_zerop (lower_bound))
9592 /* If the offset and base aren't both constants, return 0. */
9593 if (TREE_CODE (lower_bound) != INTEGER_CST)
9595 if (TREE_CODE (offset) != INTEGER_CST)
9597 /* Adjust offset by the lower bound. */
9598 offset = size_diffop (fold_convert (sizetype, offset),
9599 fold_convert (sizetype, lower_bound));
9605 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9607 tree arg0 = TREE_OPERAND (arg, 0);
9608 tree arg1 = TREE_OPERAND (arg, 1);
9613 if (TREE_CODE (arg0) == ADDR_EXPR
9614 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9615 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9617 array = TREE_OPERAND (arg0, 0);
9620 else if (TREE_CODE (arg1) == ADDR_EXPR
9621 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9622 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9624 array = TREE_OPERAND (arg1, 0);
9633 if (TREE_CODE (array) == STRING_CST)
9635 *ptr_offset = fold_convert (sizetype, offset);
9638 else if (TREE_CODE (array) == VAR_DECL)
9642 /* Variables initialized to string literals can be handled too. */
9643 if (DECL_INITIAL (array) == NULL_TREE
9644 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9647 /* If they are read-only, non-volatile and bind locally. */
9648 if (! TREE_READONLY (array)
9649 || TREE_SIDE_EFFECTS (array)
9650 || ! targetm.binds_local_p (array))
9653 /* Avoid const char foo[4] = "abcde"; */
9654 if (DECL_SIZE_UNIT (array) == NULL_TREE
9655 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9656 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9657 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9660 /* If variable is bigger than the string literal, OFFSET must be constant
9661 and inside of the bounds of the string literal. */
9662 offset = fold_convert (sizetype, offset);
9663 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9664 && (! host_integerp (offset, 1)
9665 || compare_tree_int (offset, length) >= 0))
9668 *ptr_offset = offset;
9669 return DECL_INITIAL (array);
9675 /* Generate code to calculate EXP using a store-flag instruction
9676 and return an rtx for the result. EXP is either a comparison
9677 or a TRUTH_NOT_EXPR whose operand is a comparison.
9679 If TARGET is nonzero, store the result there if convenient.
9681 Return zero if there is no suitable set-flag instruction
9682 available on this machine.
9684 Once expand_expr has been called on the arguments of the comparison,
9685 we are committed to doing the store flag, since it is not safe to
9686 re-evaluate the expression. We emit the store-flag insn by calling
9687 emit_store_flag, but only expand the arguments if we have a reason
9688 to believe that emit_store_flag will be successful. If we think that
9689 it will, but it isn't, we have to simulate the store-flag with a
9690 set/jump/set sequence. */
9693 do_store_flag (tree exp, rtx target, enum machine_mode mode)
9696 tree arg0, arg1, type;
9698 enum machine_mode operand_mode;
9702 rtx subtarget = target;
9705 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9706 result at the end. We can't simply invert the test since it would
9707 have already been inverted if it were valid. This case occurs for
9708 some floating-point comparisons. */
9710 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9711 invert = 1, exp = TREE_OPERAND (exp, 0);
9713 arg0 = TREE_OPERAND (exp, 0);
9714 arg1 = TREE_OPERAND (exp, 1);
9716 /* Don't crash if the comparison was erroneous. */
9717 if (arg0 == error_mark_node || arg1 == error_mark_node)
9720 type = TREE_TYPE (arg0);
9721 operand_mode = TYPE_MODE (type);
9722 unsignedp = TYPE_UNSIGNED (type);
9724 /* We won't bother with BLKmode store-flag operations because it would mean
9725 passing a lot of information to emit_store_flag. */
9726 if (operand_mode == BLKmode)
9729 /* We won't bother with store-flag operations involving function pointers
9730 when function pointers must be canonicalized before comparisons. */
9731 #ifdef HAVE_canonicalize_funcptr_for_compare
9732 if (HAVE_canonicalize_funcptr_for_compare
9733 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9734 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9736 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9737 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9738 == FUNCTION_TYPE))))
9745 /* Get the rtx comparison code to use. We know that EXP is a comparison
9746 operation of some type. Some comparisons against 1 and -1 can be
9747 converted to comparisons with zero. Do so here so that the tests
9748 below will be aware that we have a comparison with zero. These
9749 tests will not catch constants in the first operand, but constants
9750 are rarely passed as the first operand. */
9752 switch (TREE_CODE (exp))
9761 if (integer_onep (arg1))
9762 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9764 code = unsignedp ? LTU : LT;
9767 if (! unsignedp && integer_all_onesp (arg1))
9768 arg1 = integer_zero_node, code = LT;
9770 code = unsignedp ? LEU : LE;
9773 if (! unsignedp && integer_all_onesp (arg1))
9774 arg1 = integer_zero_node, code = GE;
9776 code = unsignedp ? GTU : GT;
9779 if (integer_onep (arg1))
9780 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9782 code = unsignedp ? GEU : GE;
9785 case UNORDERED_EXPR:
9814 /* Put a constant second. */
9815 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9816 || TREE_CODE (arg0) == FIXED_CST)
9818 tem = arg0; arg0 = arg1; arg1 = tem;
9819 code = swap_condition (code);
9822 /* If this is an equality or inequality test of a single bit, we can
9823 do this by shifting the bit being tested to the low-order bit and
9824 masking the result with the constant 1. If the condition was EQ,
9825 we xor it with 1. This does not require an scc insn and is faster
9826 than an scc insn even if we have it.
9828 The code to make this transformation was moved into fold_single_bit_test,
9829 so we just call into the folder and expand its result. */
9831 if ((code == NE || code == EQ)
9832 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9833 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9835 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9836 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9838 target, VOIDmode, EXPAND_NORMAL);
9841 /* Now see if we are likely to be able to do this. Return if not. */
9842 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9845 if (! get_subtarget (target)
9846 || GET_MODE (subtarget) != operand_mode)
9849 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9852 target = gen_reg_rtx (mode);
9854 result = emit_store_flag (target, code, op0, op1,
9855 operand_mode, unsignedp, 1);
9860 result = expand_binop (mode, xor_optab, result, const1_rtx,
9861 result, 0, OPTAB_LIB_WIDEN);
9865 /* If this failed, we have to do this with set/compare/jump/set code. */
9867 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9868 target = gen_reg_rtx (GET_MODE (target));
9870 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9871 label = gen_label_rtx ();
9872 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9875 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9882 /* Stubs in case we haven't got a casesi insn. */
9884 # define HAVE_casesi 0
9885 # define gen_casesi(a, b, c, d, e) (0)
9886 # define CODE_FOR_casesi CODE_FOR_nothing
9889 /* If the machine does not have a case insn that compares the bounds,
9890 this means extra overhead for dispatch tables, which raises the
9891 threshold for using them. */
9892 #ifndef CASE_VALUES_THRESHOLD
9893 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9894 #endif /* CASE_VALUES_THRESHOLD */
9897 case_values_threshold (void)
9899 return CASE_VALUES_THRESHOLD;
9902 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9903 0 otherwise (i.e. if there is no casesi instruction). */
9905 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9906 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9907 rtx fallback_label ATTRIBUTE_UNUSED)
9909 enum machine_mode index_mode = SImode;
9910 int index_bits = GET_MODE_BITSIZE (index_mode);
9911 rtx op1, op2, index;
9912 enum machine_mode op_mode;
9917 /* Convert the index to SImode. */
9918 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9920 enum machine_mode omode = TYPE_MODE (index_type);
9921 rtx rangertx = expand_normal (range);
9923 /* We must handle the endpoints in the original mode. */
9924 index_expr = build2 (MINUS_EXPR, index_type,
9925 index_expr, minval);
9926 minval = integer_zero_node;
9927 index = expand_normal (index_expr);
9929 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9930 omode, 1, default_label);
9931 /* Now we can safely truncate. */
9932 index = convert_to_mode (index_mode, index, 0);
9936 if (TYPE_MODE (index_type) != index_mode)
9938 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9939 index_expr = fold_convert (index_type, index_expr);
9942 index = expand_normal (index_expr);
9945 do_pending_stack_adjust ();
9947 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9948 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9950 index = copy_to_mode_reg (op_mode, index);
9952 op1 = expand_normal (minval);
9954 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9955 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9956 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9957 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9959 op1 = copy_to_mode_reg (op_mode, op1);
9961 op2 = expand_normal (range);
9963 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9964 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9965 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9966 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9968 op2 = copy_to_mode_reg (op_mode, op2);
9970 emit_jump_insn (gen_casesi (index, op1, op2,
9971 table_label, !default_label
9972 ? fallback_label : default_label));
9976 /* Attempt to generate a tablejump instruction; same concept. */
9977 #ifndef HAVE_tablejump
9978 #define HAVE_tablejump 0
9979 #define gen_tablejump(x, y) (0)
9982 /* Subroutine of the next function.
9984 INDEX is the value being switched on, with the lowest value
9985 in the table already subtracted.
9986 MODE is its expected mode (needed if INDEX is constant).
9987 RANGE is the length of the jump table.
9988 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9990 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9991 index value is out of range. */
9994 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9999 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10000 cfun->cfg->max_jumptable_ents = INTVAL (range);
10002 /* Do an unsigned comparison (in the proper mode) between the index
10003 expression and the value which represents the length of the range.
10004 Since we just finished subtracting the lower bound of the range
10005 from the index expression, this comparison allows us to simultaneously
10006 check that the original index expression value is both greater than
10007 or equal to the minimum value of the range and less than or equal to
10008 the maximum value of the range. */
10011 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10014 /* If index is in range, it must fit in Pmode.
10015 Convert to Pmode so we can index with it. */
10017 index = convert_to_mode (Pmode, index, 1);
10019 /* Don't let a MEM slip through, because then INDEX that comes
10020 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10021 and break_out_memory_refs will go to work on it and mess it up. */
10022 #ifdef PIC_CASE_VECTOR_ADDRESS
10023 if (flag_pic && !REG_P (index))
10024 index = copy_to_mode_reg (Pmode, index);
10027 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10028 GET_MODE_SIZE, because this indicates how large insns are. The other
10029 uses should all be Pmode, because they are addresses. This code
10030 could fail if addresses and insns are not the same size. */
10031 index = gen_rtx_PLUS (Pmode,
10032 gen_rtx_MULT (Pmode, index,
10033 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10034 gen_rtx_LABEL_REF (Pmode, table_label));
10035 #ifdef PIC_CASE_VECTOR_ADDRESS
10037 index = PIC_CASE_VECTOR_ADDRESS (index);
10040 index = memory_address (CASE_VECTOR_MODE, index);
10041 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10042 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10043 convert_move (temp, vector, 0);
10045 emit_jump_insn (gen_tablejump (temp, table_label));
10047 /* If we are generating PIC code or if the table is PC-relative, the
10048 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10049 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10054 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10055 rtx table_label, rtx default_label)
10059 if (! HAVE_tablejump)
10062 index_expr = fold_build2 (MINUS_EXPR, index_type,
10063 fold_convert (index_type, index_expr),
10064 fold_convert (index_type, minval));
10065 index = expand_normal (index_expr);
10066 do_pending_stack_adjust ();
10068 do_tablejump (index, TYPE_MODE (index_type),
10069 convert_modes (TYPE_MODE (index_type),
10070 TYPE_MODE (TREE_TYPE (range)),
10071 expand_normal (range),
10072 TYPE_UNSIGNED (TREE_TYPE (range))),
10073 table_label, default_label);
10077 /* Nonzero if the mode is a valid vector mode for this architecture.
10078 This returns nonzero even if there is no hardware support for the
10079 vector mode, but we can emulate with narrower modes. */
10082 vector_mode_valid_p (enum machine_mode mode)
10084 enum mode_class mclass = GET_MODE_CLASS (mode);
10085 enum machine_mode innermode;
10087 /* Doh! What's going on? */
10088 if (mclass != MODE_VECTOR_INT
10089 && mclass != MODE_VECTOR_FLOAT
10090 && mclass != MODE_VECTOR_FRACT
10091 && mclass != MODE_VECTOR_UFRACT
10092 && mclass != MODE_VECTOR_ACCUM
10093 && mclass != MODE_VECTOR_UACCUM)
10096 /* Hardware support. Woo hoo! */
10097 if (targetm.vector_mode_supported_p (mode))
10100 innermode = GET_MODE_INNER (mode);
10102 /* We should probably return 1 if requesting V4DI and we have no DI,
10103 but we have V2DI, but this is probably very unlikely. */
10105 /* If we have support for the inner mode, we can safely emulate it.
10106 We may not have V2DI, but me can emulate with a pair of DIs. */
10107 return targetm.scalar_mode_supported_p (innermode);
10110 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10112 const_vector_from_tree (tree exp)
10117 enum machine_mode inner, mode;
10119 mode = TYPE_MODE (TREE_TYPE (exp));
10121 if (initializer_zerop (exp))
10122 return CONST0_RTX (mode);
10124 units = GET_MODE_NUNITS (mode);
10125 inner = GET_MODE_INNER (mode);
10127 v = rtvec_alloc (units);
10129 link = TREE_VECTOR_CST_ELTS (exp);
10130 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10132 elt = TREE_VALUE (link);
10134 if (TREE_CODE (elt) == REAL_CST)
10135 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10137 else if (TREE_CODE (elt) == FIXED_CST)
10138 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10141 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10142 TREE_INT_CST_HIGH (elt),
10146 /* Initialize remaining elements to 0. */
10147 for (; i < units; ++i)
10148 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10150 return gen_rtx_CONST_VECTOR (mode, v);
10152 #include "gt-expr.h"