1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
52 #include "common/common-target.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* This structure is used by move_by_pieces to describe the move to
94 struct move_by_pieces_d
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces_d
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
206 init_expr_target (void)
209 enum machine_mode mode;
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
246 if (! HARD_REGNO_MODE_OK (regno, mode))
249 SET_REGNO (reg, regno);
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
288 PUT_MODE (mem, srcmode);
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
296 /* This is run at the start of compiling a function. */
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
311 convert_move (rtx to, rtx from, int unsignedp)
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
329 /* If the source and destination are already the same, then there's
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_SIZE (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
349 emit_move_insn (to, from);
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
362 emit_move_insn (to, from);
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
391 /* Try converting directly if the insn is supported. */
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
410 insns = get_insns ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
442 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
443 != CODE_FOR_nothing);
445 if (to_mode == full_mode)
447 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
455 new_from, from, UNKNOWN);
457 /* else proceed to integer conversions below. */
458 from_mode = full_mode;
462 /* Make sure both are fixed-point modes or both are not. */
463 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
464 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
465 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
467 /* If we widen from_mode to to_mode and they are in the same class,
468 we won't saturate the result.
469 Otherwise, always saturate the result to play safe. */
470 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
471 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
472 expand_fixed_convert (to, from, 0, 0);
474 expand_fixed_convert (to, from, 0, 1);
478 /* Now both modes are integers. */
480 /* Handle expanding beyond a word. */
481 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
482 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
489 enum machine_mode lowpart_mode;
490 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
492 /* Try converting directly if the insn is supported. */
493 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
496 /* If FROM is a SUBREG, put it into a register. Do this
497 so that we always generate the same set of insns for
498 better cse'ing; if an intermediate assignment occurred,
499 we won't be doing the operation directly on the SUBREG. */
500 if (optimize > 0 && GET_CODE (from) == SUBREG)
501 from = force_reg (from_mode, from);
502 emit_unop_insn (code, to, from, equiv_code);
505 /* Next, try converting via full word. */
506 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
507 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
508 != CODE_FOR_nothing))
510 rtx word_to = gen_reg_rtx (word_mode);
513 if (reg_overlap_mentioned_p (to, from))
514 from = force_reg (from_mode, from);
517 convert_move (word_to, from, unsignedp);
518 emit_unop_insn (code, to, word_to, equiv_code);
522 /* No special multiword conversion insn; do it by hand. */
525 /* Since we will turn this into a no conflict block, we must ensure
526 that the source does not overlap the target. */
528 if (reg_overlap_mentioned_p (to, from))
529 from = force_reg (from_mode, from);
531 /* Get a copy of FROM widened to a word, if necessary. */
532 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
533 lowpart_mode = word_mode;
535 lowpart_mode = from_mode;
537 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
539 lowpart = gen_lowpart (lowpart_mode, to);
540 emit_move_insn (lowpart, lowfrom);
542 /* Compute the value to put in each remaining word. */
544 fill_value = const0_rtx;
546 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
547 LT, lowfrom, const0_rtx,
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
556 gcc_assert (subword);
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
562 insns = get_insns ();
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
613 emit_unop_insn (code, to, from, equiv_code);
618 enum machine_mode intermediate;
622 /* Search for a mode to convert via. */
623 for (intermediate = from_mode; intermediate != VOIDmode;
624 intermediate = GET_MODE_WIDER_MODE (intermediate))
625 if (((can_extend_p (to_mode, intermediate, unsignedp)
627 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
628 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
629 GET_MODE_BITSIZE (intermediate))))
630 && (can_extend_p (intermediate, from_mode, unsignedp)
631 != CODE_FOR_nothing))
633 convert_move (to, convert_to_mode (intermediate, from,
634 unsignedp), unsignedp);
638 /* No suitable intermediate mode.
639 Generate what we need with shifts. */
640 shift_amount = (GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
801 enum machine_mode tmode;
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
808 enum machine_mode tmode, xmode;
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
829 enum machine_mode tmode, mode = VOIDmode;
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 to_addr_mode = VOIDmode;
901 #ifdef STACK_GROWS_DOWNWARD
907 data.to_addr = to_addr;
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
989 gcc_assert (!data.reverse);
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1008 to1 = adjust_address (data.to, QImode, data.offset);
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1023 unsigned HOST_WIDE_INT n_insns = 0;
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1027 while (max_size > 1)
1029 enum machine_mode mode;
1030 enum insn_code icode;
1032 mode = widest_int_mode_for_size (max_size);
1034 if (mode == VOIDmode)
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041 max_size = GET_MODE_SIZE (mode);
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1059 while (data->len >= size)
1062 data->offset -= size;
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1070 to1 = adjust_address (data->to, mode, data->offset);
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1077 from1 = adjust_address (data->from, mode, data->offset);
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1087 emit_insn ((*genfun) (to1, from1));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1102 if (! data->reverse)
1103 data->offset += size;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1185 /* Since x and y are passed to a libcall, mark the corresponding
1186 tree EXPR as addressable. */
1187 tree y_expr = MEM_EXPR (y);
1188 tree x_expr = MEM_EXPR (x);
1190 mark_addressable (y_expr);
1192 mark_addressable (x_expr);
1193 retval = emit_block_move_via_libcall (x, y, size,
1194 method == BLOCK_OP_TAILCALL);
1198 emit_block_move_via_loop (x, y, size, align);
1200 if (method == BLOCK_OP_CALL_PARM)
1207 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1209 return emit_block_move_hints (x, y, size, method, 0, -1);
1212 /* A subroutine of emit_block_move. Returns true if calling the
1213 block move libcall will not clobber any parameters which may have
1214 already been placed on the stack. */
1217 block_move_libcall_safe_for_call_parm (void)
1219 #if defined (REG_PARM_STACK_SPACE)
1223 /* If arguments are pushed on the stack, then they're safe. */
1227 /* If registers go on the stack anyway, any argument is sure to clobber
1228 an outgoing argument. */
1229 #if defined (REG_PARM_STACK_SPACE)
1230 fn = emit_block_move_libcall_fn (false);
1231 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1232 depend on its argument. */
1234 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1235 && REG_PARM_STACK_SPACE (fn) != 0)
1239 /* If any argument goes in memory, then it might clobber an outgoing
1242 CUMULATIVE_ARGS args_so_far_v;
1243 cumulative_args_t args_so_far;
1246 fn = emit_block_move_libcall_fn (false);
1247 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1248 args_so_far = pack_cumulative_args (&args_so_far_v);
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1256 if (!tmp || !REG_P (tmp))
1258 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1260 targetm.calls.function_arg_advance (args_so_far, mode,
1267 /* A subroutine of emit_block_move. Expand a movmem pattern;
1268 return true if successful. */
1271 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1272 unsigned int expected_align, HOST_WIDE_INT expected_size)
1274 int save_volatile_ok = volatile_ok;
1275 enum machine_mode mode;
1277 if (expected_align < align)
1278 expected_align = align;
1280 /* Since this is a move insn, we don't care about volatility. */
1283 /* Try the most limited insn first, because there's no point
1284 including more than one in the machine description unless
1285 the more limited one has some advantage. */
1287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1288 mode = GET_MODE_WIDER_MODE (mode))
1290 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1292 if (code != CODE_FOR_nothing
1293 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1294 here because if SIZE is less than the mode mask, as it is
1295 returned by the macro, it will definitely be less than the
1296 actual mode mask. */
1297 && ((CONST_INT_P (size)
1298 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1299 <= (GET_MODE_MASK (mode) >> 1)))
1300 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1302 struct expand_operand ops[6];
1305 /* ??? When called via emit_block_move_for_call, it'd be
1306 nice if there were some way to inform the backend, so
1307 that it doesn't fail the expansion because it thinks
1308 emitting the libcall would be more efficient. */
1309 nops = insn_data[(int) code].n_generator_args;
1310 gcc_assert (nops == 4 || nops == 6);
1312 create_fixed_operand (&ops[0], x);
1313 create_fixed_operand (&ops[1], y);
1314 /* The check above guarantees that this size conversion is valid. */
1315 create_convert_operand_to (&ops[2], size, mode, true);
1316 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1319 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1320 create_integer_operand (&ops[5], expected_size);
1322 if (maybe_expand_insn (code, nops, ops))
1324 volatile_ok = save_volatile_ok;
1330 volatile_ok = save_volatile_ok;
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1340 rtx dst_addr, src_addr;
1341 tree call_expr, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1358 size_mode = TYPE_MODE (sizetype);
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1369 size_tree = make_tree (sizetype, size);
1371 fn = emit_block_move_libcall_fn (true);
1372 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1373 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1375 retval = expand_normal (call_expr);
1380 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1381 for the function we use for block copies. The first time FOR_CALL
1382 is true, we call assemble_external. */
1384 static GTY(()) tree block_move_fn;
1387 init_block_move_fn (const char *asmspec)
1393 fn = get_identifier ("memcpy");
1394 args = build_function_type_list (ptr_type_node, ptr_type_node,
1395 const_ptr_type_node, sizetype,
1398 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1399 DECL_EXTERNAL (fn) = 1;
1400 TREE_PUBLIC (fn) = 1;
1401 DECL_ARTIFICIAL (fn) = 1;
1402 TREE_NOTHROW (fn) = 1;
1403 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1404 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1410 set_user_assembler_name (block_move_fn, asmspec);
1414 emit_block_move_libcall_fn (int for_call)
1416 static bool emitted_extern;
1419 init_block_move_fn (NULL);
1421 if (for_call && !emitted_extern)
1423 emitted_extern = true;
1424 make_decl_rtl (block_move_fn);
1425 assemble_external (block_move_fn);
1428 return block_move_fn;
1431 /* A subroutine of emit_block_move. Copy the data via an explicit
1432 loop. This is used only when libcalls are forbidden. */
1433 /* ??? It'd be nice to copy in hunks larger than QImode. */
1436 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1437 unsigned int align ATTRIBUTE_UNUSED)
1439 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1440 enum machine_mode x_addr_mode
1441 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1442 enum machine_mode y_addr_mode
1443 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1444 enum machine_mode iter_mode;
1446 iter_mode = GET_MODE (size);
1447 if (iter_mode == VOIDmode)
1448 iter_mode = word_mode;
1450 top_label = gen_label_rtx ();
1451 cmp_label = gen_label_rtx ();
1452 iter = gen_reg_rtx (iter_mode);
1454 emit_move_insn (iter, const0_rtx);
1456 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1457 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1458 do_pending_stack_adjust ();
1460 emit_jump (cmp_label);
1461 emit_label (top_label);
1463 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1464 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1466 if (x_addr_mode != y_addr_mode)
1467 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1468 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1470 x = change_address (x, QImode, x_addr);
1471 y = change_address (y, QImode, y_addr);
1473 emit_move_insn (x, y);
1475 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1476 true, OPTAB_LIB_WIDEN);
1478 emit_move_insn (iter, tmp);
1480 emit_label (cmp_label);
1482 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1486 /* Copy all or part of a value X into registers starting at REGNO.
1487 The number of registers to be filled is NREGS. */
1490 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1493 #ifdef HAVE_load_multiple
1501 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1502 x = validize_mem (force_const_mem (mode, x));
1504 /* See if the machine can do this with a load multiple insn. */
1505 #ifdef HAVE_load_multiple
1506 if (HAVE_load_multiple)
1508 last = get_last_insn ();
1509 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1517 delete_insns_since (last);
1521 for (i = 0; i < nregs; i++)
1522 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1523 operand_subword_force (x, i, mode));
1526 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1527 The number of registers to be filled is NREGS. */
1530 move_block_from_reg (int regno, rtx x, int nregs)
1537 /* See if the machine can do this with a store multiple insn. */
1538 #ifdef HAVE_store_multiple
1539 if (HAVE_store_multiple)
1541 rtx last = get_last_insn ();
1542 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1550 delete_insns_since (last);
1554 for (i = 0; i < nregs; i++)
1556 rtx tem = operand_subword (x, i, 1, BLKmode);
1560 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1564 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1565 ORIG, where ORIG is a non-consecutive group of registers represented by
1566 a PARALLEL. The clone is identical to the original except in that the
1567 original set of registers is replaced by a new set of pseudo registers.
1568 The new set has the same modes as the original set. */
1571 gen_group_rtx (rtx orig)
1576 gcc_assert (GET_CODE (orig) == PARALLEL);
1578 length = XVECLEN (orig, 0);
1579 tmps = XALLOCAVEC (rtx, length);
1581 /* Skip a NULL entry in first slot. */
1582 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1587 for (; i < length; i++)
1589 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1590 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1592 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1595 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1598 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1599 except that values are placed in TMPS[i], and must later be moved
1600 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1603 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1607 enum machine_mode m = GET_MODE (orig_src);
1609 gcc_assert (GET_CODE (dst) == PARALLEL);
1612 && !SCALAR_INT_MODE_P (m)
1613 && !MEM_P (orig_src)
1614 && GET_CODE (orig_src) != CONCAT)
1616 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1617 if (imode == BLKmode)
1618 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1620 src = gen_reg_rtx (imode);
1621 if (imode != BLKmode)
1622 src = gen_lowpart (GET_MODE (orig_src), src);
1623 emit_move_insn (src, orig_src);
1624 /* ...and back again. */
1625 if (imode != BLKmode)
1626 src = gen_lowpart (imode, src);
1627 emit_group_load_1 (tmps, dst, src, type, ssize);
1631 /* Check for a NULL entry, used to indicate that the parameter goes
1632 both on the stack and in registers. */
1633 if (XEXP (XVECEXP (dst, 0, 0), 0))
1638 /* Process the pieces. */
1639 for (i = start; i < XVECLEN (dst, 0); i++)
1641 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1642 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1643 unsigned int bytelen = GET_MODE_SIZE (mode);
1646 /* Handle trailing fragments that run over the size of the struct. */
1647 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1649 /* Arrange to shift the fragment to where it belongs.
1650 extract_bit_field loads to the lsb of the reg. */
1652 #ifdef BLOCK_REG_PADDING
1653 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1654 == (BYTES_BIG_ENDIAN ? upward : downward)
1659 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1660 bytelen = ssize - bytepos;
1661 gcc_assert (bytelen > 0);
1664 /* If we won't be loading directly from memory, protect the real source
1665 from strange tricks we might play; but make sure that the source can
1666 be loaded directly into the destination. */
1668 if (!MEM_P (orig_src)
1669 && (!CONSTANT_P (orig_src)
1670 || (GET_MODE (orig_src) != mode
1671 && GET_MODE (orig_src) != VOIDmode)))
1673 if (GET_MODE (orig_src) == VOIDmode)
1674 src = gen_reg_rtx (mode);
1676 src = gen_reg_rtx (GET_MODE (orig_src));
1678 emit_move_insn (src, orig_src);
1681 /* Optimize the access just a bit. */
1683 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1684 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1685 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1686 && bytelen == GET_MODE_SIZE (mode))
1688 tmps[i] = gen_reg_rtx (mode);
1689 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1691 else if (COMPLEX_MODE_P (mode)
1692 && GET_MODE (src) == mode
1693 && bytelen == GET_MODE_SIZE (mode))
1694 /* Let emit_move_complex do the bulk of the work. */
1696 else if (GET_CODE (src) == CONCAT)
1698 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1699 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1701 if ((bytepos == 0 && bytelen == slen0)
1702 || (bytepos != 0 && bytepos + bytelen <= slen))
1704 /* The following assumes that the concatenated objects all
1705 have the same size. In this case, a simple calculation
1706 can be used to determine the object and the bit field
1708 tmps[i] = XEXP (src, bytepos / slen0);
1709 if (! CONSTANT_P (tmps[i])
1710 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1711 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1712 (bytepos % slen0) * BITS_PER_UNIT,
1713 1, false, NULL_RTX, mode, mode);
1719 gcc_assert (!bytepos);
1720 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1721 emit_move_insn (mem, src);
1722 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1723 0, 1, false, NULL_RTX, mode, mode);
1726 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1727 SIMD register, which is currently broken. While we get GCC
1728 to emit proper RTL for these cases, let's dump to memory. */
1729 else if (VECTOR_MODE_P (GET_MODE (dst))
1732 int slen = GET_MODE_SIZE (GET_MODE (src));
1735 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1736 emit_move_insn (mem, src);
1737 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1739 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1740 && XVECLEN (dst, 0) > 1)
1741 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1742 else if (CONSTANT_P (src))
1744 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1752 gcc_assert (2 * len == ssize);
1753 split_double (src, &first, &second);
1760 else if (REG_P (src) && GET_MODE (src) == mode)
1763 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1764 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1768 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1773 /* Emit code to move a block SRC of type TYPE to a block DST,
1774 where DST is non-consecutive registers represented by a PARALLEL.
1775 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1779 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1784 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1785 emit_group_load_1 (tmps, dst, src, type, ssize);
1787 /* Copy the extracted pieces into the proper (probable) hard regs. */
1788 for (i = 0; i < XVECLEN (dst, 0); i++)
1790 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1793 emit_move_insn (d, tmps[i]);
1797 /* Similar, but load SRC into new pseudos in a format that looks like
1798 PARALLEL. This can later be fed to emit_group_move to get things
1799 in the right place. */
1802 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1807 vec = rtvec_alloc (XVECLEN (parallel, 0));
1808 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1810 /* Convert the vector to look just like the original PARALLEL, except
1811 with the computed values. */
1812 for (i = 0; i < XVECLEN (parallel, 0); i++)
1814 rtx e = XVECEXP (parallel, 0, i);
1815 rtx d = XEXP (e, 0);
1819 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1820 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1822 RTVEC_ELT (vec, i) = e;
1825 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1828 /* Emit code to move a block SRC to block DST, where SRC and DST are
1829 non-consecutive groups of registers, each represented by a PARALLEL. */
1832 emit_group_move (rtx dst, rtx src)
1836 gcc_assert (GET_CODE (src) == PARALLEL
1837 && GET_CODE (dst) == PARALLEL
1838 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1840 /* Skip first entry if NULL. */
1841 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1842 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1843 XEXP (XVECEXP (src, 0, i), 0));
1846 /* Move a group of registers represented by a PARALLEL into pseudos. */
1849 emit_group_move_into_temps (rtx src)
1851 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1854 for (i = 0; i < XVECLEN (src, 0); i++)
1856 rtx e = XVECEXP (src, 0, i);
1857 rtx d = XEXP (e, 0);
1860 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1867 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1868 where SRC is non-consecutive registers represented by a PARALLEL.
1869 SSIZE represents the total size of block ORIG_DST, or -1 if not
1873 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1876 int start, finish, i;
1877 enum machine_mode m = GET_MODE (orig_dst);
1879 gcc_assert (GET_CODE (src) == PARALLEL);
1881 if (!SCALAR_INT_MODE_P (m)
1882 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1884 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1885 if (imode == BLKmode)
1886 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1888 dst = gen_reg_rtx (imode);
1889 emit_group_store (dst, src, type, ssize);
1890 if (imode != BLKmode)
1891 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1892 emit_move_insn (orig_dst, dst);
1896 /* Check for a NULL entry, used to indicate that the parameter goes
1897 both on the stack and in registers. */
1898 if (XEXP (XVECEXP (src, 0, 0), 0))
1902 finish = XVECLEN (src, 0);
1904 tmps = XALLOCAVEC (rtx, finish);
1906 /* Copy the (probable) hard regs into pseudos. */
1907 for (i = start; i < finish; i++)
1909 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1910 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1912 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1913 emit_move_insn (tmps[i], reg);
1919 /* If we won't be storing directly into memory, protect the real destination
1920 from strange tricks we might play. */
1922 if (GET_CODE (dst) == PARALLEL)
1926 /* We can get a PARALLEL dst if there is a conditional expression in
1927 a return statement. In that case, the dst and src are the same,
1928 so no action is necessary. */
1929 if (rtx_equal_p (dst, src))
1932 /* It is unclear if we can ever reach here, but we may as well handle
1933 it. Allocate a temporary, and split this into a store/load to/from
1936 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1937 emit_group_store (temp, src, type, ssize);
1938 emit_group_load (dst, temp, type, ssize);
1941 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1943 enum machine_mode outer = GET_MODE (dst);
1944 enum machine_mode inner;
1945 HOST_WIDE_INT bytepos;
1949 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1950 dst = gen_reg_rtx (outer);
1952 /* Make life a bit easier for combine. */
1953 /* If the first element of the vector is the low part
1954 of the destination mode, use a paradoxical subreg to
1955 initialize the destination. */
1958 inner = GET_MODE (tmps[start]);
1959 bytepos = subreg_lowpart_offset (inner, outer);
1960 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1962 temp = simplify_gen_subreg (outer, tmps[start],
1966 emit_move_insn (dst, temp);
1973 /* If the first element wasn't the low part, try the last. */
1975 && start < finish - 1)
1977 inner = GET_MODE (tmps[finish - 1]);
1978 bytepos = subreg_lowpart_offset (inner, outer);
1979 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1981 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1985 emit_move_insn (dst, temp);
1992 /* Otherwise, simply initialize the result to zero. */
1994 emit_move_insn (dst, CONST0_RTX (outer));
1997 /* Process the pieces. */
1998 for (i = start; i < finish; i++)
2000 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2001 enum machine_mode mode = GET_MODE (tmps[i]);
2002 unsigned int bytelen = GET_MODE_SIZE (mode);
2003 unsigned int adj_bytelen = bytelen;
2006 /* Handle trailing fragments that run over the size of the struct. */
2007 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2008 adj_bytelen = ssize - bytepos;
2010 if (GET_CODE (dst) == CONCAT)
2012 if (bytepos + adj_bytelen
2013 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2014 dest = XEXP (dst, 0);
2015 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2017 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2018 dest = XEXP (dst, 1);
2022 enum machine_mode dest_mode = GET_MODE (dest);
2023 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2025 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2027 if (GET_MODE_ALIGNMENT (dest_mode)
2028 >= GET_MODE_ALIGNMENT (tmp_mode))
2030 dest = assign_stack_temp (dest_mode,
2031 GET_MODE_SIZE (dest_mode),
2033 emit_move_insn (adjust_address (dest,
2041 dest = assign_stack_temp (tmp_mode,
2042 GET_MODE_SIZE (tmp_mode),
2044 emit_move_insn (dest, tmps[i]);
2045 dst = adjust_address (dest, dest_mode, bytepos);
2051 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2053 /* store_bit_field always takes its value from the lsb.
2054 Move the fragment to the lsb if it's not already there. */
2056 #ifdef BLOCK_REG_PADDING
2057 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2058 == (BYTES_BIG_ENDIAN ? upward : downward)
2064 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2065 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2068 bytelen = adj_bytelen;
2071 /* Optimize the access just a bit. */
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2108 tgtblk = assign_temp (build_qualified_type (type,
2110 | TYPE_QUAL_CONST)),
2112 preserve_temp_slots (tgtblk);
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2134 : BYTES_BIG_ENDIAN))
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2147 copy_mode = word_mode;
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2167 /* We need a new destination operand each time bitpos is on
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2187 use_reg (rtx *call_fusage, rtx reg)
2189 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2204 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2215 use_group_regs (rtx *call_fusage, rtx regs)
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && REG_P (reg))
2227 use_reg (call_fusage, reg);
2231 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2232 assigment and the code of the expresion on the RHS is CODE. Return
2236 get_def_for_expr (tree name, enum tree_code code)
2240 if (TREE_CODE (name) != SSA_NAME)
2243 def_stmt = get_gimple_for_ssa_name (name);
2245 || gimple_assign_rhs_code (def_stmt) != code)
2252 /* Determine whether the LEN bytes generated by CONSTFUN can be
2253 stored to memory using several move instructions. CONSTFUNDATA is
2254 a pointer which will be passed as argument in every CONSTFUN call.
2255 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2256 a memset operation and false if it's a copy of a constant string.
2257 Return nonzero if a call to store_by_pieces should succeed. */
2260 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2261 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2262 void *constfundata, unsigned int align, bool memsetp)
2264 unsigned HOST_WIDE_INT l;
2265 unsigned int max_size;
2266 HOST_WIDE_INT offset = 0;
2267 enum machine_mode mode;
2268 enum insn_code icode;
2270 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2271 rtx cst ATTRIBUTE_UNUSED;
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2281 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2291 max_size = STORE_MAX_PIECES + 1;
2292 while (max_size > 1)
2294 mode = widest_int_mode_for_size (max_size);
2296 if (mode == VOIDmode)
2299 icode = optab_handler (mov_optab, mode);
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2303 unsigned int size = GET_MODE_SIZE (mode);
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!targetm.legitimate_constant_p (mode, cst))
2321 max_size = GET_MODE_SIZE (mode);
2324 /* The code above should have handled everything. */
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2345 enum machine_mode to_addr_mode
2346 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2347 struct store_by_pieces_d data;
2351 gcc_assert (endp != 2);
2356 ? SET_BY_PIECES_P (len, align)
2357 : STORE_BY_PIECES_P (len, align));
2358 data.constfun = constfun;
2359 data.constfundata = constfundata;
2362 store_by_pieces_1 (&data, align);
2367 gcc_assert (!data.reverse);
2372 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2373 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2375 data.to_addr = copy_to_mode_reg (to_addr_mode,
2376 plus_constant (data.to_addr,
2379 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2386 to1 = adjust_address (data.to, QImode, data.offset);
2394 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2398 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2400 struct store_by_pieces_d data;
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2409 store_by_pieces_1 (&data, align);
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2416 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2417 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2418 enum machine_mode mode ATTRIBUTE_UNUSED)
2423 /* Subroutine of clear_by_pieces and store_by_pieces.
2424 Generate several move instructions to store LEN bytes of block TO. (A MEM
2425 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2428 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2429 unsigned int align ATTRIBUTE_UNUSED)
2431 enum machine_mode to_addr_mode
2432 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2433 rtx to_addr = XEXP (data->to, 0);
2434 unsigned int max_size = STORE_MAX_PIECES + 1;
2435 enum insn_code icode;
2438 data->to_addr = to_addr;
2440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2443 data->explicit_inc_to = 0;
2445 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2447 data->offset = data->len;
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data->autinc_to
2453 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2455 /* Determine the main mode we'll be using.
2456 MODE might not be used depending on the definitions of the
2457 USE_* macros below. */
2458 enum machine_mode mode ATTRIBUTE_UNUSED
2459 = widest_int_mode_for_size (max_size);
2461 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2463 data->to_addr = copy_to_mode_reg (to_addr_mode,
2464 plus_constant (to_addr, data->len));
2465 data->autinc_to = 1;
2466 data->explicit_inc_to = -1;
2469 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2470 && ! data->autinc_to)
2472 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = 1;
2477 if ( !data->autinc_to && CONSTANT_P (to_addr))
2478 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2481 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2483 /* First store what we can in the largest integer mode, then go to
2484 successively smaller modes. */
2486 while (max_size > 1)
2488 enum machine_mode mode = widest_int_mode_for_size (max_size);
2490 if (mode == VOIDmode)
2493 icode = optab_handler (mov_optab, mode);
2494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2495 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497 max_size = GET_MODE_SIZE (mode);
2500 /* The code above should have handled everything. */
2501 gcc_assert (!data->len);
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2509 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2510 struct store_by_pieces_d *data)
2512 unsigned int size = GET_MODE_SIZE (mode);
2515 while (data->len >= size)
2518 data->offset -= size;
2520 if (data->autinc_to)
2521 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2524 to1 = adjust_address (data->to, mode, data->offset);
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536 if (! data->reverse)
2537 data->offset += size;
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes. */
2547 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2548 unsigned int expected_align, HOST_WIDE_INT expected_size)
2550 enum machine_mode mode = GET_MODE (object);
2553 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2555 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2556 just move a zero. Otherwise, do this a piece at a time. */
2558 && CONST_INT_P (size)
2559 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2561 rtx zero = CONST0_RTX (mode);
2564 emit_move_insn (object, zero);
2568 if (COMPLEX_MODE_P (mode))
2570 zero = CONST0_RTX (GET_MODE_INNER (mode));
2573 write_complex_part (object, zero, 0);
2574 write_complex_part (object, zero, 1);
2580 if (size == const0_rtx)
2583 align = MEM_ALIGN (object);
2585 if (CONST_INT_P (size)
2586 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2587 clear_by_pieces (object, INTVAL (size), align);
2588 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2589 expected_align, expected_size))
2591 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2592 return set_storage_via_libcall (object, size, const0_rtx,
2593 method == BLOCK_OP_TAILCALL);
2601 clear_storage (rtx object, rtx size, enum block_op_methods method)
2603 return clear_storage_hints (object, size, method, 0, -1);
2607 /* A subroutine of clear_storage. Expand a call to memset.
2608 Return the return value of memset, 0 otherwise. */
2611 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2613 tree call_expr, fn, object_tree, size_tree, val_tree;
2614 enum machine_mode size_mode;
2617 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2618 place those into new pseudos into a VAR_DECL and use them later. */
2620 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2622 size_mode = TYPE_MODE (sizetype);
2623 size = convert_to_mode (size_mode, size, 1);
2624 size = copy_to_mode_reg (size_mode, size);
2626 /* It is incorrect to use the libcall calling conventions to call
2627 memset in this context. This could be a user call to memset and
2628 the user may wish to examine the return value from memset. For
2629 targets where libcalls and normal calls have different conventions
2630 for returning pointers, we could end up generating incorrect code. */
2632 object_tree = make_tree (ptr_type_node, object);
2633 if (!CONST_INT_P (val))
2634 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2635 size_tree = make_tree (sizetype, size);
2636 val_tree = make_tree (integer_type_node, val);
2638 fn = clear_storage_libcall_fn (true);
2639 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2640 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2642 retval = expand_normal (call_expr);
2647 /* A subroutine of set_storage_via_libcall. Create the tree node
2648 for the function we use for block clears. The first time FOR_CALL
2649 is true, we call assemble_external. */
2651 tree block_clear_fn;
2654 init_block_clear_fn (const char *asmspec)
2656 if (!block_clear_fn)
2660 fn = get_identifier ("memset");
2661 args = build_function_type_list (ptr_type_node, ptr_type_node,
2662 integer_type_node, sizetype,
2665 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2666 DECL_EXTERNAL (fn) = 1;
2667 TREE_PUBLIC (fn) = 1;
2668 DECL_ARTIFICIAL (fn) = 1;
2669 TREE_NOTHROW (fn) = 1;
2670 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2671 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2673 block_clear_fn = fn;
2677 set_user_assembler_name (block_clear_fn, asmspec);
2681 clear_storage_libcall_fn (int for_call)
2683 static bool emitted_extern;
2685 if (!block_clear_fn)
2686 init_block_clear_fn (NULL);
2688 if (for_call && !emitted_extern)
2690 emitted_extern = true;
2691 make_decl_rtl (block_clear_fn);
2692 assemble_external (block_clear_fn);
2695 return block_clear_fn;
2698 /* Expand a setmem pattern; return true if successful. */
2701 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2702 unsigned int expected_align, HOST_WIDE_INT expected_size)
2704 /* Try the most limited insn first, because there's no point
2705 including more than one in the machine description unless
2706 the more limited one has some advantage. */
2708 enum machine_mode mode;
2710 if (expected_align < align)
2711 expected_align = align;
2713 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2714 mode = GET_MODE_WIDER_MODE (mode))
2716 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2718 if (code != CODE_FOR_nothing
2719 /* We don't need MODE to be narrower than
2720 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2721 the mode mask, as it is returned by the macro, it will
2722 definitely be less than the actual mode mask. */
2723 && ((CONST_INT_P (size)
2724 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2725 <= (GET_MODE_MASK (mode) >> 1)))
2726 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2728 struct expand_operand ops[6];
2731 nops = insn_data[(int) code].n_generator_args;
2732 gcc_assert (nops == 4 || nops == 6);
2734 create_fixed_operand (&ops[0], object);
2735 /* The check above guarantees that this size conversion is valid. */
2736 create_convert_operand_to (&ops[1], size, mode, true);
2737 create_convert_operand_from (&ops[2], val, byte_mode, true);
2738 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2741 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2742 create_integer_operand (&ops[5], expected_size);
2744 if (maybe_expand_insn (code, nops, ops))
2753 /* Write to one of the components of the complex value CPLX. Write VAL to
2754 the real part if IMAG_P is false, and the imaginary part if its true. */
2757 write_complex_part (rtx cplx, rtx val, bool imag_p)
2759 enum machine_mode cmode;
2760 enum machine_mode imode;
2763 if (GET_CODE (cplx) == CONCAT)
2765 emit_move_insn (XEXP (cplx, imag_p), val);
2769 cmode = GET_MODE (cplx);
2770 imode = GET_MODE_INNER (cmode);
2771 ibitsize = GET_MODE_BITSIZE (imode);
2773 /* For MEMs simplify_gen_subreg may generate an invalid new address
2774 because, e.g., the original address is considered mode-dependent
2775 by the target, which restricts simplify_subreg from invoking
2776 adjust_address_nv. Instead of preparing fallback support for an
2777 invalid address, we call adjust_address_nv directly. */
2780 emit_move_insn (adjust_address_nv (cplx, imode,
2781 imag_p ? GET_MODE_SIZE (imode) : 0),
2786 /* If the sub-object is at least word sized, then we know that subregging
2787 will work. This special case is important, since store_bit_field
2788 wants to operate on integer modes, and there's rarely an OImode to
2789 correspond to TCmode. */
2790 if (ibitsize >= BITS_PER_WORD
2791 /* For hard regs we have exact predicates. Assume we can split
2792 the original object if it spans an even number of hard regs.
2793 This special case is important for SCmode on 64-bit platforms
2794 where the natural size of floating-point regs is 32-bit. */
2796 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2797 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2799 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2800 imag_p ? GET_MODE_SIZE (imode) : 0);
2803 emit_move_insn (part, val);
2807 /* simplify_gen_subreg may fail for sub-word MEMs. */
2808 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2811 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2814 /* Extract one of the components of the complex value CPLX. Extract the
2815 real part if IMAG_P is false, and the imaginary part if it's true. */
2818 read_complex_part (rtx cplx, bool imag_p)
2820 enum machine_mode cmode, imode;
2823 if (GET_CODE (cplx) == CONCAT)
2824 return XEXP (cplx, imag_p);
2826 cmode = GET_MODE (cplx);
2827 imode = GET_MODE_INNER (cmode);
2828 ibitsize = GET_MODE_BITSIZE (imode);
2830 /* Special case reads from complex constants that got spilled to memory. */
2831 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2833 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2834 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2836 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2837 if (CONSTANT_CLASS_P (part))
2838 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2842 /* For MEMs simplify_gen_subreg may generate an invalid new address
2843 because, e.g., the original address is considered mode-dependent
2844 by the target, which restricts simplify_subreg from invoking
2845 adjust_address_nv. Instead of preparing fallback support for an
2846 invalid address, we call adjust_address_nv directly. */
2848 return adjust_address_nv (cplx, imode,
2849 imag_p ? GET_MODE_SIZE (imode) : 0);
2851 /* If the sub-object is at least word sized, then we know that subregging
2852 will work. This special case is important, since extract_bit_field
2853 wants to operate on integer modes, and there's rarely an OImode to
2854 correspond to TCmode. */
2855 if (ibitsize >= BITS_PER_WORD
2856 /* For hard regs we have exact predicates. Assume we can split
2857 the original object if it spans an even number of hard regs.
2858 This special case is important for SCmode on 64-bit platforms
2859 where the natural size of floating-point regs is 32-bit. */
2861 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2862 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2864 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2865 imag_p ? GET_MODE_SIZE (imode) : 0);
2869 /* simplify_gen_subreg may fail for sub-word MEMs. */
2870 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2873 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2874 true, false, NULL_RTX, imode, imode);
2877 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2878 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2879 represented in NEW_MODE. If FORCE is true, this will never happen, as
2880 we'll force-create a SUBREG if needed. */
2883 emit_move_change_mode (enum machine_mode new_mode,
2884 enum machine_mode old_mode, rtx x, bool force)
2888 if (push_operand (x, GET_MODE (x)))
2890 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2891 MEM_COPY_ATTRIBUTES (ret, x);
2895 /* We don't have to worry about changing the address since the
2896 size in bytes is supposed to be the same. */
2897 if (reload_in_progress)
2899 /* Copy the MEM to change the mode and move any
2900 substitutions from the old MEM to the new one. */
2901 ret = adjust_address_nv (x, new_mode, 0);
2902 copy_replacements (x, ret);
2905 ret = adjust_address (x, new_mode, 0);
2909 /* Note that we do want simplify_subreg's behavior of validating
2910 that the new mode is ok for a hard register. If we were to use
2911 simplify_gen_subreg, we would create the subreg, but would
2912 probably run into the target not being able to implement it. */
2913 /* Except, of course, when FORCE is true, when this is exactly what
2914 we want. Which is needed for CCmodes on some targets. */
2916 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2918 ret = simplify_subreg (new_mode, x, old_mode, 0);
2924 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2925 an integer mode of the same size as MODE. Returns the instruction
2926 emitted, or NULL if such a move could not be generated. */
2929 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2931 enum machine_mode imode;
2932 enum insn_code code;
2934 /* There must exist a mode of the exact size we require. */
2935 imode = int_mode_for_mode (mode);
2936 if (imode == BLKmode)
2939 /* The target must support moves in this mode. */
2940 code = optab_handler (mov_optab, imode);
2941 if (code == CODE_FOR_nothing)
2944 x = emit_move_change_mode (imode, mode, x, force);
2947 y = emit_move_change_mode (imode, mode, y, force);
2950 return emit_insn (GEN_FCN (code) (x, y));
2953 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2954 Return an equivalent MEM that does not use an auto-increment. */
2957 emit_move_resolve_push (enum machine_mode mode, rtx x)
2959 enum rtx_code code = GET_CODE (XEXP (x, 0));
2960 HOST_WIDE_INT adjust;
2963 adjust = GET_MODE_SIZE (mode);
2964 #ifdef PUSH_ROUNDING
2965 adjust = PUSH_ROUNDING (adjust);
2967 if (code == PRE_DEC || code == POST_DEC)
2969 else if (code == PRE_MODIFY || code == POST_MODIFY)
2971 rtx expr = XEXP (XEXP (x, 0), 1);
2974 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2975 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2976 val = INTVAL (XEXP (expr, 1));
2977 if (GET_CODE (expr) == MINUS)
2979 gcc_assert (adjust == val || adjust == -val);
2983 /* Do not use anti_adjust_stack, since we don't want to update
2984 stack_pointer_delta. */
2985 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2986 GEN_INT (adjust), stack_pointer_rtx,
2987 0, OPTAB_LIB_WIDEN);
2988 if (temp != stack_pointer_rtx)
2989 emit_move_insn (stack_pointer_rtx, temp);
2996 temp = stack_pointer_rtx;
3001 temp = plus_constant (stack_pointer_rtx, -adjust);
3007 return replace_equiv_address (x, temp);
3010 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3011 X is known to satisfy push_operand, and MODE is known to be complex.
3012 Returns the last instruction emitted. */
3015 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3017 enum machine_mode submode = GET_MODE_INNER (mode);
3020 #ifdef PUSH_ROUNDING
3021 unsigned int submodesize = GET_MODE_SIZE (submode);
3023 /* In case we output to the stack, but the size is smaller than the
3024 machine can push exactly, we need to use move instructions. */
3025 if (PUSH_ROUNDING (submodesize) != submodesize)
3027 x = emit_move_resolve_push (mode, x);
3028 return emit_move_insn (x, y);
3032 /* Note that the real part always precedes the imag part in memory
3033 regardless of machine's endianness. */
3034 switch (GET_CODE (XEXP (x, 0)))
3048 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3049 read_complex_part (y, imag_first));
3050 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3051 read_complex_part (y, !imag_first));
3054 /* A subroutine of emit_move_complex. Perform the move from Y to X
3055 via two moves of the parts. Returns the last instruction emitted. */
3058 emit_move_complex_parts (rtx x, rtx y)
3060 /* Show the output dies here. This is necessary for SUBREGs
3061 of pseudos since we cannot track their lifetimes correctly;
3062 hard regs shouldn't appear here except as return values. */
3063 if (!reload_completed && !reload_in_progress
3064 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3067 write_complex_part (x, read_complex_part (y, false), false);
3068 write_complex_part (x, read_complex_part (y, true), true);
3070 return get_last_insn ();
3073 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3074 MODE is known to be complex. Returns the last instruction emitted. */
3077 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3081 /* Need to take special care for pushes, to maintain proper ordering
3082 of the data, and possibly extra padding. */
3083 if (push_operand (x, mode))
3084 return emit_move_complex_push (mode, x, y);
3086 /* See if we can coerce the target into moving both values at once. */
3088 /* Move floating point as parts. */
3089 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3090 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3092 /* Not possible if the values are inherently not adjacent. */
3093 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3095 /* Is possible if both are registers (or subregs of registers). */
3096 else if (register_operand (x, mode) && register_operand (y, mode))
3098 /* If one of the operands is a memory, and alignment constraints
3099 are friendly enough, we may be able to do combined memory operations.
3100 We do not attempt this if Y is a constant because that combination is
3101 usually better with the by-parts thing below. */
3102 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3103 && (!STRICT_ALIGNMENT
3104 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3113 /* For memory to memory moves, optimal behavior can be had with the
3114 existing block move logic. */
3115 if (MEM_P (x) && MEM_P (y))
3117 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3118 BLOCK_OP_NO_LIBCALL);
3119 return get_last_insn ();
3122 ret = emit_move_via_integer (mode, x, y, true);
3127 return emit_move_complex_parts (x, y);
3130 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3131 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3134 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3138 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3141 enum insn_code code = optab_handler (mov_optab, CCmode);
3142 if (code != CODE_FOR_nothing)
3144 x = emit_move_change_mode (CCmode, mode, x, true);
3145 y = emit_move_change_mode (CCmode, mode, y, true);
3146 return emit_insn (GEN_FCN (code) (x, y));
3150 /* Otherwise, find the MODE_INT mode of the same width. */
3151 ret = emit_move_via_integer (mode, x, y, false);
3152 gcc_assert (ret != NULL);
3156 /* Return true if word I of OP lies entirely in the
3157 undefined bits of a paradoxical subreg. */
3160 undefined_operand_subword_p (const_rtx op, int i)
3162 enum machine_mode innermode, innermostmode;
3164 if (GET_CODE (op) != SUBREG)
3166 innermode = GET_MODE (op);
3167 innermostmode = GET_MODE (SUBREG_REG (op));
3168 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3169 /* The SUBREG_BYTE represents offset, as if the value were stored in
3170 memory, except for a paradoxical subreg where we define
3171 SUBREG_BYTE to be 0; undo this exception as in
3173 if (SUBREG_BYTE (op) == 0
3174 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3176 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3177 if (WORDS_BIG_ENDIAN)
3178 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3179 if (BYTES_BIG_ENDIAN)
3180 offset += difference % UNITS_PER_WORD;
3182 if (offset >= GET_MODE_SIZE (innermostmode)
3183 || offset <= -GET_MODE_SIZE (word_mode))
3188 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3189 MODE is any multi-word or full-word mode that lacks a move_insn
3190 pattern. Note that you will get better code if you define such
3191 patterns, even if they must turn into multiple assembler instructions. */
3194 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3201 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3203 /* If X is a push on the stack, do the push now and replace
3204 X with a reference to the stack pointer. */
3205 if (push_operand (x, mode))
3206 x = emit_move_resolve_push (mode, x);
3208 /* If we are in reload, see if either operand is a MEM whose address
3209 is scheduled for replacement. */
3210 if (reload_in_progress && MEM_P (x)
3211 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3212 x = replace_equiv_address_nv (x, inner);
3213 if (reload_in_progress && MEM_P (y)
3214 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3215 y = replace_equiv_address_nv (y, inner);
3219 need_clobber = false;
3221 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3224 rtx xpart = operand_subword (x, i, 1, mode);
3227 /* Do not generate code for a move if it would come entirely
3228 from the undefined bits of a paradoxical subreg. */
3229 if (undefined_operand_subword_p (y, i))
3232 ypart = operand_subword (y, i, 1, mode);
3234 /* If we can't get a part of Y, put Y into memory if it is a
3235 constant. Otherwise, force it into a register. Then we must
3236 be able to get a part of Y. */
3237 if (ypart == 0 && CONSTANT_P (y))
3239 y = use_anchored_address (force_const_mem (mode, y));
3240 ypart = operand_subword (y, i, 1, mode);
3242 else if (ypart == 0)
3243 ypart = operand_subword_force (y, i, mode);
3245 gcc_assert (xpart && ypart);
3247 need_clobber |= (GET_CODE (xpart) == SUBREG);
3249 last_insn = emit_move_insn (xpart, ypart);
3255 /* Show the output dies here. This is necessary for SUBREGs
3256 of pseudos since we cannot track their lifetimes correctly;
3257 hard regs shouldn't appear here except as return values.
3258 We never want to emit such a clobber after reload. */
3260 && ! (reload_in_progress || reload_completed)
3261 && need_clobber != 0)
3269 /* Low level part of emit_move_insn.
3270 Called just like emit_move_insn, but assumes X and Y
3271 are basically valid. */
3274 emit_move_insn_1 (rtx x, rtx y)
3276 enum machine_mode mode = GET_MODE (x);
3277 enum insn_code code;
3279 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3281 code = optab_handler (mov_optab, mode);
3282 if (code != CODE_FOR_nothing)
3283 return emit_insn (GEN_FCN (code) (x, y));
3285 /* Expand complex moves by moving real part and imag part. */
3286 if (COMPLEX_MODE_P (mode))
3287 return emit_move_complex (mode, x, y);
3289 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3290 || ALL_FIXED_POINT_MODE_P (mode))
3292 rtx result = emit_move_via_integer (mode, x, y, true);
3294 /* If we can't find an integer mode, use multi words. */
3298 return emit_move_multi_word (mode, x, y);
3301 if (GET_MODE_CLASS (mode) == MODE_CC)
3302 return emit_move_ccmode (mode, x, y);
3304 /* Try using a move pattern for the corresponding integer mode. This is
3305 only safe when simplify_subreg can convert MODE constants into integer
3306 constants. At present, it can only do this reliably if the value
3307 fits within a HOST_WIDE_INT. */
3308 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3310 rtx ret = emit_move_via_integer (mode, x, y, false);
3315 return emit_move_multi_word (mode, x, y);
3318 /* Generate code to copy Y into X.
3319 Both Y and X must have the same mode, except that
3320 Y can be a constant with VOIDmode.
3321 This mode cannot be BLKmode; use emit_block_move for that.
3323 Return the last instruction emitted. */
3326 emit_move_insn (rtx x, rtx y)
3328 enum machine_mode mode = GET_MODE (x);
3329 rtx y_cst = NULL_RTX;
3332 gcc_assert (mode != BLKmode
3333 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3338 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3339 && (last_insn = compress_float_constant (x, y)))
3344 if (!targetm.legitimate_constant_p (mode, y))
3346 y = force_const_mem (mode, y);
3348 /* If the target's cannot_force_const_mem prevented the spill,
3349 assume that the target's move expanders will also take care
3350 of the non-legitimate constant. */
3354 y = use_anchored_address (y);
3358 /* If X or Y are memory references, verify that their addresses are valid
3361 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3363 && ! push_operand (x, GET_MODE (x))))
3364 x = validize_mem (x);
3367 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3368 MEM_ADDR_SPACE (y)))
3369 y = validize_mem (y);
3371 gcc_assert (mode != BLKmode);
3373 last_insn = emit_move_insn_1 (x, y);
3375 if (y_cst && REG_P (x)
3376 && (set = single_set (last_insn)) != NULL_RTX
3377 && SET_DEST (set) == x
3378 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3379 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3384 /* If Y is representable exactly in a narrower mode, and the target can
3385 perform the extension directly from constant or memory, then emit the
3386 move as an extension. */
3389 compress_float_constant (rtx x, rtx y)
3391 enum machine_mode dstmode = GET_MODE (x);
3392 enum machine_mode orig_srcmode = GET_MODE (y);
3393 enum machine_mode srcmode;
3395 int oldcost, newcost;
3396 bool speed = optimize_insn_for_speed_p ();
3398 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3400 if (targetm.legitimate_constant_p (dstmode, y))
3401 oldcost = rtx_cost (y, SET, speed);
3403 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3405 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3406 srcmode != orig_srcmode;
3407 srcmode = GET_MODE_WIDER_MODE (srcmode))
3410 rtx trunc_y, last_insn;
3412 /* Skip if the target can't extend this way. */
3413 ic = can_extend_p (dstmode, srcmode, 0);
3414 if (ic == CODE_FOR_nothing)
3417 /* Skip if the narrowed value isn't exact. */
3418 if (! exact_real_truncate (srcmode, &r))
3421 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3423 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3425 /* Skip if the target needs extra instructions to perform
3427 if (!insn_operand_matches (ic, 1, trunc_y))
3429 /* This is valid, but may not be cheaper than the original. */
3430 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3431 if (oldcost < newcost)
3434 else if (float_extend_from_mem[dstmode][srcmode])
3436 trunc_y = force_const_mem (srcmode, trunc_y);
3437 /* This is valid, but may not be cheaper than the original. */
3438 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3439 if (oldcost < newcost)
3441 trunc_y = validize_mem (trunc_y);
3446 /* For CSE's benefit, force the compressed constant pool entry
3447 into a new pseudo. This constant may be used in different modes,
3448 and if not, combine will put things back together for us. */
3449 trunc_y = force_reg (srcmode, trunc_y);
3450 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3451 last_insn = get_last_insn ();
3454 set_unique_reg_note (last_insn, REG_EQUAL, y);
3462 /* Pushing data onto the stack. */
3464 /* Push a block of length SIZE (perhaps variable)
3465 and return an rtx to address the beginning of the block.
3466 The value may be virtual_outgoing_args_rtx.
3468 EXTRA is the number of bytes of padding to push in addition to SIZE.
3469 BELOW nonzero means this padding comes at low addresses;
3470 otherwise, the padding comes at high addresses. */
3473 push_block (rtx size, int extra, int below)
3477 size = convert_modes (Pmode, ptr_mode, size, 1);
3478 if (CONSTANT_P (size))
3479 anti_adjust_stack (plus_constant (size, extra));
3480 else if (REG_P (size) && extra == 0)
3481 anti_adjust_stack (size);
3484 temp = copy_to_mode_reg (Pmode, size);
3486 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3487 temp, 0, OPTAB_LIB_WIDEN);
3488 anti_adjust_stack (temp);
3491 #ifndef STACK_GROWS_DOWNWARD
3497 temp = virtual_outgoing_args_rtx;
3498 if (extra != 0 && below)
3499 temp = plus_constant (temp, extra);
3503 if (CONST_INT_P (size))
3504 temp = plus_constant (virtual_outgoing_args_rtx,
3505 -INTVAL (size) - (below ? 0 : extra));
3506 else if (extra != 0 && !below)
3507 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3508 negate_rtx (Pmode, plus_constant (size, extra)));
3510 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3511 negate_rtx (Pmode, size));
3514 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3517 #ifdef PUSH_ROUNDING
3519 /* Emit single push insn. */
3522 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3525 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3527 enum insn_code icode;
3529 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3530 /* If there is push pattern, use it. Otherwise try old way of throwing
3531 MEM representing push operation to move expander. */
3532 icode = optab_handler (push_optab, mode);
3533 if (icode != CODE_FOR_nothing)
3535 struct expand_operand ops[1];
3537 create_input_operand (&ops[0], x, mode);
3538 if (maybe_expand_insn (icode, 1, ops))
3541 if (GET_MODE_SIZE (mode) == rounded_size)
3542 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3543 /* If we are to pad downward, adjust the stack pointer first and
3544 then store X into the stack location using an offset. This is
3545 because emit_move_insn does not know how to pad; it does not have
3547 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3549 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3550 HOST_WIDE_INT offset;
3552 emit_move_insn (stack_pointer_rtx,
3553 expand_binop (Pmode,
3554 #ifdef STACK_GROWS_DOWNWARD
3560 GEN_INT (rounded_size),
3561 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3563 offset = (HOST_WIDE_INT) padding_size;
3564 #ifdef STACK_GROWS_DOWNWARD
3565 if (STACK_PUSH_CODE == POST_DEC)
3566 /* We have already decremented the stack pointer, so get the
3568 offset += (HOST_WIDE_INT) rounded_size;
3570 if (STACK_PUSH_CODE == POST_INC)
3571 /* We have already incremented the stack pointer, so get the
3573 offset -= (HOST_WIDE_INT) rounded_size;
3575 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3579 #ifdef STACK_GROWS_DOWNWARD
3580 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3581 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3582 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3584 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3585 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3586 GEN_INT (rounded_size));
3588 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3591 dest = gen_rtx_MEM (mode, dest_addr);
3595 set_mem_attributes (dest, type, 1);
3597 if (flag_optimize_sibling_calls)
3598 /* Function incoming arguments may overlap with sibling call
3599 outgoing arguments and we cannot allow reordering of reads
3600 from function arguments with stores to outgoing arguments
3601 of sibling calls. */
3602 set_mem_alias_set (dest, 0);
3604 emit_move_insn (dest, x);
3608 /* Generate code to push X onto the stack, assuming it has mode MODE and
3610 MODE is redundant except when X is a CONST_INT (since they don't
3612 SIZE is an rtx for the size of data to be copied (in bytes),
3613 needed only if X is BLKmode.
3615 ALIGN (in bits) is maximum alignment we can assume.
3617 If PARTIAL and REG are both nonzero, then copy that many of the first
3618 bytes of X into registers starting with REG, and push the rest of X.
3619 The amount of space pushed is decreased by PARTIAL bytes.
3620 REG must be a hard register in this case.
3621 If REG is zero but PARTIAL is not, take any all others actions for an
3622 argument partially in registers, but do not actually load any
3625 EXTRA is the amount in bytes of extra space to leave next to this arg.
3626 This is ignored if an argument block has already been allocated.
3628 On a machine that lacks real push insns, ARGS_ADDR is the address of
3629 the bottom of the argument block for this call. We use indexing off there
3630 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3631 argument block has not been preallocated.
3633 ARGS_SO_FAR is the size of args previously pushed for this call.
3635 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3636 for arguments passed in registers. If nonzero, it will be the number
3637 of bytes required. */
3640 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3641 unsigned int align, int partial, rtx reg, int extra,
3642 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3646 enum direction stack_direction
3647 #ifdef STACK_GROWS_DOWNWARD
3653 /* Decide where to pad the argument: `downward' for below,
3654 `upward' for above, or `none' for don't pad it.
3655 Default is below for small data on big-endian machines; else above. */
3656 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3658 /* Invert direction if stack is post-decrement.
3660 if (STACK_PUSH_CODE == POST_DEC)
3661 if (where_pad != none)
3662 where_pad = (where_pad == downward ? upward : downward);
3667 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3669 /* Copy a block into the stack, entirely or partially. */
3676 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3677 used = partial - offset;
3679 if (mode != BLKmode)
3681 /* A value is to be stored in an insufficiently aligned
3682 stack slot; copy via a suitably aligned slot if
3684 size = GEN_INT (GET_MODE_SIZE (mode));
3685 if (!MEM_P (xinner))
3687 temp = assign_temp (type, 0, 1, 1);
3688 emit_move_insn (temp, xinner);
3695 /* USED is now the # of bytes we need not copy to the stack
3696 because registers will take care of them. */
3699 xinner = adjust_address (xinner, BLKmode, used);
3701 /* If the partial register-part of the arg counts in its stack size,
3702 skip the part of stack space corresponding to the registers.
3703 Otherwise, start copying to the beginning of the stack space,
3704 by setting SKIP to 0. */
3705 skip = (reg_parm_stack_space == 0) ? 0 : used;
3707 #ifdef PUSH_ROUNDING
3708 /* Do it with several push insns if that doesn't take lots of insns
3709 and if there is no difficulty with push insns that skip bytes
3710 on the stack for alignment purposes. */
3713 && CONST_INT_P (size)
3715 && MEM_ALIGN (xinner) >= align
3716 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3717 /* Here we avoid the case of a structure whose weak alignment
3718 forces many pushes of a small amount of data,
3719 and such small pushes do rounding that causes trouble. */
3720 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3721 || align >= BIGGEST_ALIGNMENT
3722 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3723 == (align / BITS_PER_UNIT)))
3724 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3726 /* Push padding now if padding above and stack grows down,
3727 or if padding below and stack grows up.
3728 But if space already allocated, this has already been done. */
3729 if (extra && args_addr == 0
3730 && where_pad != none && where_pad != stack_direction)
3731 anti_adjust_stack (GEN_INT (extra));
3733 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3736 #endif /* PUSH_ROUNDING */
3740 /* Otherwise make space on the stack and copy the data
3741 to the address of that space. */
3743 /* Deduct words put into registers from the size we must copy. */
3746 if (CONST_INT_P (size))
3747 size = GEN_INT (INTVAL (size) - used);
3749 size = expand_binop (GET_MODE (size), sub_optab, size,
3750 GEN_INT (used), NULL_RTX, 0,
3754 /* Get the address of the stack space.
3755 In this case, we do not deal with EXTRA separately.
3756 A single stack adjust will do. */
3759 temp = push_block (size, extra, where_pad == downward);
3762 else if (CONST_INT_P (args_so_far))
3763 temp = memory_address (BLKmode,
3764 plus_constant (args_addr,
3765 skip + INTVAL (args_so_far)));
3767 temp = memory_address (BLKmode,
3768 plus_constant (gen_rtx_PLUS (Pmode,
3773 if (!ACCUMULATE_OUTGOING_ARGS)
3775 /* If the source is referenced relative to the stack pointer,
3776 copy it to another register to stabilize it. We do not need
3777 to do this if we know that we won't be changing sp. */
3779 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3780 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3781 temp = copy_to_reg (temp);
3784 target = gen_rtx_MEM (BLKmode, temp);
3786 /* We do *not* set_mem_attributes here, because incoming arguments
3787 may overlap with sibling call outgoing arguments and we cannot
3788 allow reordering of reads from function arguments with stores
3789 to outgoing arguments of sibling calls. We do, however, want
3790 to record the alignment of the stack slot. */
3791 /* ALIGN may well be better aligned than TYPE, e.g. due to
3792 PARM_BOUNDARY. Assume the caller isn't lying. */
3793 set_mem_align (target, align);
3795 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3798 else if (partial > 0)
3800 /* Scalar partly in registers. */
3802 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3805 /* # bytes of start of argument
3806 that we must make space for but need not store. */
3807 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3808 int args_offset = INTVAL (args_so_far);
3811 /* Push padding now if padding above and stack grows down,
3812 or if padding below and stack grows up.
3813 But if space already allocated, this has already been done. */
3814 if (extra && args_addr == 0
3815 && where_pad != none && where_pad != stack_direction)
3816 anti_adjust_stack (GEN_INT (extra));
3818 /* If we make space by pushing it, we might as well push
3819 the real data. Otherwise, we can leave OFFSET nonzero
3820 and leave the space uninitialized. */
3824 /* Now NOT_STACK gets the number of words that we don't need to
3825 allocate on the stack. Convert OFFSET to words too. */
3826 not_stack = (partial - offset) / UNITS_PER_WORD;
3827 offset /= UNITS_PER_WORD;
3829 /* If the partial register-part of the arg counts in its stack size,
3830 skip the part of stack space corresponding to the registers.
3831 Otherwise, start copying to the beginning of the stack space,
3832 by setting SKIP to 0. */
3833 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3835 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
3836 x = validize_mem (force_const_mem (mode, x));
3838 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3839 SUBREGs of such registers are not allowed. */
3840 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3841 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3842 x = copy_to_reg (x);
3844 /* Loop over all the words allocated on the stack for this arg. */
3845 /* We can do it by words, because any scalar bigger than a word
3846 has a size a multiple of a word. */
3847 #ifndef PUSH_ARGS_REVERSED
3848 for (i = not_stack; i < size; i++)
3850 for (i = size - 1; i >= not_stack; i--)
3852 if (i >= not_stack + offset)
3853 emit_push_insn (operand_subword_force (x, i, mode),
3854 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3856 GEN_INT (args_offset + ((i - not_stack + skip)
3858 reg_parm_stack_space, alignment_pad);
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra && args_addr == 0
3869 && where_pad != none && where_pad != stack_direction)
3870 anti_adjust_stack (GEN_INT (extra));
3872 #ifdef PUSH_ROUNDING
3873 if (args_addr == 0 && PUSH_ARGS)
3874 emit_single_push_insn (mode, x, type);
3878 if (CONST_INT_P (args_so_far))
3880 = memory_address (mode,
3881 plus_constant (args_addr,
3882 INTVAL (args_so_far)));
3884 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3886 dest = gen_rtx_MEM (mode, addr);
3888 /* We do *not* set_mem_attributes here, because incoming arguments
3889 may overlap with sibling call outgoing arguments and we cannot
3890 allow reordering of reads from function arguments with stores
3891 to outgoing arguments of sibling calls. We do, however, want
3892 to record the alignment of the stack slot. */
3893 /* ALIGN may well be better aligned than TYPE, e.g. due to
3894 PARM_BOUNDARY. Assume the caller isn't lying. */
3895 set_mem_align (dest, align);
3897 emit_move_insn (dest, x);
3901 /* If part should go in registers, copy that part
3902 into the appropriate registers. Do this now, at the end,
3903 since mem-to-mem copies above may do function calls. */
3904 if (partial > 0 && reg != 0)
3906 /* Handle calls that pass values in multiple non-contiguous locations.
3907 The Irix 6 ABI has examples of this. */
3908 if (GET_CODE (reg) == PARALLEL)
3909 emit_group_load (reg, x, type, -1);
3912 gcc_assert (partial % UNITS_PER_WORD == 0);
3913 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3917 if (extra && args_addr == 0 && where_pad == stack_direction)
3918 anti_adjust_stack (GEN_INT (extra));
3920 if (alignment_pad && args_addr == 0)
3921 anti_adjust_stack (alignment_pad);
3924 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3928 get_subtarget (rtx x)
3932 /* Only registers can be subtargets. */
3934 /* Don't use hard regs to avoid extending their life. */
3935 || REGNO (x) < FIRST_PSEUDO_REGISTER
3939 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3940 FIELD is a bitfield. Returns true if the optimization was successful,
3941 and there's nothing else to do. */
3944 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3945 unsigned HOST_WIDE_INT bitpos,
3946 enum machine_mode mode1, rtx str_rtx,
3949 enum machine_mode str_mode = GET_MODE (str_rtx);
3950 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3955 enum tree_code code;
3957 if (mode1 != VOIDmode
3958 || bitsize >= BITS_PER_WORD
3959 || str_bitsize > BITS_PER_WORD
3960 || TREE_SIDE_EFFECTS (to)
3961 || TREE_THIS_VOLATILE (to))
3965 if (TREE_CODE (src) != SSA_NAME)
3967 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3970 srcstmt = get_gimple_for_ssa_name (src);
3972 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
3975 code = gimple_assign_rhs_code (srcstmt);
3977 op0 = gimple_assign_rhs1 (srcstmt);
3979 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3980 to find its initialization. Hopefully the initialization will
3981 be from a bitfield load. */
3982 if (TREE_CODE (op0) == SSA_NAME)
3984 gimple op0stmt = get_gimple_for_ssa_name (op0);
3986 /* We want to eventually have OP0 be the same as TO, which
3987 should be a bitfield. */
3989 || !is_gimple_assign (op0stmt)
3990 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
3992 op0 = gimple_assign_rhs1 (op0stmt);
3995 op1 = gimple_assign_rhs2 (srcstmt);
3997 if (!operand_equal_p (to, op0, 0))
4000 if (MEM_P (str_rtx))
4002 unsigned HOST_WIDE_INT offset1;
4004 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4005 str_mode = word_mode;
4006 str_mode = get_best_mode (bitsize, bitpos,
4007 MEM_ALIGN (str_rtx), str_mode, 0);
4008 if (str_mode == VOIDmode)
4010 str_bitsize = GET_MODE_BITSIZE (str_mode);
4013 bitpos %= str_bitsize;
4014 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4015 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4017 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4020 /* If the bit field covers the whole REG/MEM, store_field
4021 will likely generate better code. */
4022 if (bitsize >= str_bitsize)
4025 /* We can't handle fields split across multiple entities. */
4026 if (bitpos + bitsize > str_bitsize)
4029 if (BYTES_BIG_ENDIAN)
4030 bitpos = str_bitsize - bitpos - bitsize;
4036 /* For now, just optimize the case of the topmost bitfield
4037 where we don't need to do any masking and also
4038 1 bit bitfields where xor can be used.
4039 We might win by one instruction for the other bitfields
4040 too if insv/extv instructions aren't used, so that
4041 can be added later. */
4042 if (bitpos + bitsize != str_bitsize
4043 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4046 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4047 value = convert_modes (str_mode,
4048 TYPE_MODE (TREE_TYPE (op1)), value,
4049 TYPE_UNSIGNED (TREE_TYPE (op1)));
4051 /* We may be accessing data outside the field, which means
4052 we can alias adjacent data. */
4053 if (MEM_P (str_rtx))
4055 str_rtx = shallow_copy_rtx (str_rtx);
4056 set_mem_alias_set (str_rtx, 0);
4057 set_mem_expr (str_rtx, 0);
4060 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4061 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4063 value = expand_and (str_mode, value, const1_rtx, NULL);
4066 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4067 bitpos, NULL_RTX, 1);
4068 result = expand_binop (str_mode, binop, str_rtx,
4069 value, str_rtx, 1, OPTAB_WIDEN);
4070 if (result != str_rtx)
4071 emit_move_insn (str_rtx, result);
4076 if (TREE_CODE (op1) != INTEGER_CST)
4078 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4079 value = convert_modes (GET_MODE (str_rtx),
4080 TYPE_MODE (TREE_TYPE (op1)), value,
4081 TYPE_UNSIGNED (TREE_TYPE (op1)));
4083 /* We may be accessing data outside the field, which means
4084 we can alias adjacent data. */
4085 if (MEM_P (str_rtx))
4087 str_rtx = shallow_copy_rtx (str_rtx);
4088 set_mem_alias_set (str_rtx, 0);
4089 set_mem_expr (str_rtx, 0);
4092 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4093 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4095 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4097 value = expand_and (GET_MODE (str_rtx), value, mask,
4100 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4101 bitpos, NULL_RTX, 1);
4102 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4103 value, str_rtx, 1, OPTAB_WIDEN);
4104 if (result != str_rtx)
4105 emit_move_insn (str_rtx, result);
4116 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4117 is true, try generating a nontemporal store. */
4120 expand_assignment (tree to, tree from, bool nontemporal)
4124 enum machine_mode mode;
4126 enum insn_code icode;
4128 /* Don't crash if the lhs of the assignment was erroneous. */
4129 if (TREE_CODE (to) == ERROR_MARK)
4131 expand_normal (from);
4135 /* Optimize away no-op moves without side-effects. */
4136 if (operand_equal_p (to, from, 0))
4139 mode = TYPE_MODE (TREE_TYPE (to));
4140 if ((TREE_CODE (to) == MEM_REF
4141 || TREE_CODE (to) == TARGET_MEM_REF)
4143 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4144 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4145 < (signed) GET_MODE_ALIGNMENT (mode))
4146 && ((icode = optab_handler (movmisalign_optab, mode))
4147 != CODE_FOR_nothing))
4149 struct expand_operand ops[2];
4150 enum machine_mode address_mode;
4153 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4154 reg = force_not_mem (reg);
4156 if (TREE_CODE (to) == MEM_REF)
4159 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4160 tree base = TREE_OPERAND (to, 0);
4161 address_mode = targetm.addr_space.address_mode (as);
4162 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4163 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4164 if (!integer_zerop (TREE_OPERAND (to, 1)))
4167 = immed_double_int_const (mem_ref_offset (to), address_mode);
4168 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4170 op0 = memory_address_addr_space (mode, op0, as);
4171 mem = gen_rtx_MEM (mode, op0);
4172 set_mem_attributes (mem, to, 0);
4173 set_mem_addr_space (mem, as);
4175 else if (TREE_CODE (to) == TARGET_MEM_REF)
4177 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4178 struct mem_address addr;
4180 get_address_description (to, &addr);
4181 op0 = addr_for_mem_ref (&addr, as, true);
4182 op0 = memory_address_addr_space (mode, op0, as);
4183 mem = gen_rtx_MEM (mode, op0);
4184 set_mem_attributes (mem, to, 0);
4185 set_mem_addr_space (mem, as);
4189 if (TREE_THIS_VOLATILE (to))
4190 MEM_VOLATILE_P (mem) = 1;
4192 create_fixed_operand (&ops[0], mem);
4193 create_input_operand (&ops[1], reg, mode);
4194 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4195 silently be omitted. */
4196 expand_insn (icode, 2, ops);
4200 /* Assignment of a structure component needs special treatment
4201 if the structure component's rtx is not simply a MEM.
4202 Assignment of an array element at a constant index, and assignment of
4203 an array element in an unaligned packed structure field, has the same
4205 if (handled_component_p (to)
4206 /* ??? We only need to handle MEM_REF here if the access is not
4207 a full access of the base object. */
4208 || (TREE_CODE (to) == MEM_REF
4209 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4210 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4212 enum machine_mode mode1;
4213 HOST_WIDE_INT bitsize, bitpos;
4220 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4221 &unsignedp, &volatilep, true);
4223 /* If we are going to use store_bit_field and extract_bit_field,
4224 make sure to_rtx will be safe for multiple use. */
4226 to_rtx = expand_normal (tem);
4228 /* If the bitfield is volatile, we want to access it in the
4229 field's mode, not the computed mode.
4230 If a MEM has VOIDmode (external with incomplete type),
4231 use BLKmode for it instead. */
4234 if (volatilep && flag_strict_volatile_bitfields > 0)
4235 to_rtx = adjust_address (to_rtx, mode1, 0);
4236 else if (GET_MODE (to_rtx) == VOIDmode)
4237 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4242 enum machine_mode address_mode;
4245 if (!MEM_P (to_rtx))
4247 /* We can get constant negative offsets into arrays with broken
4248 user code. Translate this to a trap instead of ICEing. */
4249 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4250 expand_builtin_trap ();
4251 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4254 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4256 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4257 if (GET_MODE (offset_rtx) != address_mode)
4258 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4260 /* A constant address in TO_RTX can have VOIDmode, we must not try
4261 to call force_reg for that case. Avoid that case. */
4263 && GET_MODE (to_rtx) == BLKmode
4264 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4266 && (bitpos % bitsize) == 0
4267 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4268 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4270 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4274 to_rtx = offset_address (to_rtx, offset_rtx,
4275 highest_pow2_factor_for_target (to,
4279 /* No action is needed if the target is not a memory and the field
4280 lies completely outside that target. This can occur if the source
4281 code contains an out-of-bounds access to a small array. */
4283 && GET_MODE (to_rtx) != BLKmode
4284 && (unsigned HOST_WIDE_INT) bitpos
4285 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4287 expand_normal (from);
4290 /* Handle expand_expr of a complex value returning a CONCAT. */
4291 else if (GET_CODE (to_rtx) == CONCAT)
4293 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4294 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4296 && bitsize == mode_bitsize)
4297 result = store_expr (from, to_rtx, false, nontemporal);
4298 else if (bitsize == mode_bitsize / 2
4299 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4300 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4302 else if (bitpos + bitsize <= mode_bitsize / 2)
4303 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4304 mode1, from, TREE_TYPE (tem),
4305 get_alias_set (to), nontemporal);
4306 else if (bitpos >= mode_bitsize / 2)
4307 result = store_field (XEXP (to_rtx, 1), bitsize,
4308 bitpos - mode_bitsize / 2, mode1, from,
4309 TREE_TYPE (tem), get_alias_set (to),
4311 else if (bitpos == 0 && bitsize == mode_bitsize)
4314 result = expand_normal (from);
4315 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4316 TYPE_MODE (TREE_TYPE (from)), 0);
4317 emit_move_insn (XEXP (to_rtx, 0),
4318 read_complex_part (from_rtx, false));
4319 emit_move_insn (XEXP (to_rtx, 1),
4320 read_complex_part (from_rtx, true));
4324 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4325 GET_MODE_SIZE (GET_MODE (to_rtx)),
4327 write_complex_part (temp, XEXP (to_rtx, 0), false);
4328 write_complex_part (temp, XEXP (to_rtx, 1), true);
4329 result = store_field (temp, bitsize, bitpos, mode1, from,
4330 TREE_TYPE (tem), get_alias_set (to),
4332 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4333 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4340 /* If the field is at offset zero, we could have been given the
4341 DECL_RTX of the parent struct. Don't munge it. */
4342 to_rtx = shallow_copy_rtx (to_rtx);
4344 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4346 /* Deal with volatile and readonly fields. The former is only
4347 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4349 MEM_VOLATILE_P (to_rtx) = 1;
4350 if (component_uses_parent_alias_set (to))
4351 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4354 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4358 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4359 TREE_TYPE (tem), get_alias_set (to),
4364 preserve_temp_slots (result);
4370 /* If the rhs is a function call and its value is not an aggregate,
4371 call the function before we start to compute the lhs.
4372 This is needed for correct code for cases such as
4373 val = setjmp (buf) on machines where reference to val
4374 requires loading up part of an address in a separate insn.
4376 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4377 since it might be a promoted variable where the zero- or sign- extension
4378 needs to be done. Handling this in the normal way is safe because no
4379 computation is done before the call. The same is true for SSA names. */
4380 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4381 && COMPLETE_TYPE_P (TREE_TYPE (from))
4382 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4383 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4384 && REG_P (DECL_RTL (to)))
4385 || TREE_CODE (to) == SSA_NAME))
4390 value = expand_normal (from);
4392 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4394 /* Handle calls that return values in multiple non-contiguous locations.
4395 The Irix 6 ABI has examples of this. */
4396 if (GET_CODE (to_rtx) == PARALLEL)
4397 emit_group_load (to_rtx, value, TREE_TYPE (from),
4398 int_size_in_bytes (TREE_TYPE (from)));
4399 else if (GET_MODE (to_rtx) == BLKmode)
4400 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4403 if (POINTER_TYPE_P (TREE_TYPE (to)))
4404 value = convert_memory_address_addr_space
4405 (GET_MODE (to_rtx), value,
4406 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4408 emit_move_insn (to_rtx, value);
4410 preserve_temp_slots (to_rtx);
4416 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4417 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4420 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4422 /* Don't move directly into a return register. */
4423 if (TREE_CODE (to) == RESULT_DECL
4424 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4429 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4431 if (GET_CODE (to_rtx) == PARALLEL)
4432 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4433 int_size_in_bytes (TREE_TYPE (from)));
4435 emit_move_insn (to_rtx, temp);
4437 preserve_temp_slots (to_rtx);
4443 /* In case we are returning the contents of an object which overlaps
4444 the place the value is being stored, use a safe function when copying
4445 a value through a pointer into a structure value return block. */
4446 if (TREE_CODE (to) == RESULT_DECL
4447 && TREE_CODE (from) == INDIRECT_REF
4448 && ADDR_SPACE_GENERIC_P
4449 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4450 && refs_may_alias_p (to, from)
4451 && cfun->returns_struct
4452 && !cfun->returns_pcc_struct)
4457 size = expr_size (from);
4458 from_rtx = expand_normal (from);
4460 emit_library_call (memmove_libfunc, LCT_NORMAL,
4461 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4462 XEXP (from_rtx, 0), Pmode,
4463 convert_to_mode (TYPE_MODE (sizetype),
4464 size, TYPE_UNSIGNED (sizetype)),
4465 TYPE_MODE (sizetype));
4467 preserve_temp_slots (to_rtx);
4473 /* Compute FROM and store the value in the rtx we got. */
4476 result = store_expr (from, to_rtx, 0, nontemporal);
4477 preserve_temp_slots (result);
4483 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4484 succeeded, false otherwise. */
4487 emit_storent_insn (rtx to, rtx from)
4489 struct expand_operand ops[2];
4490 enum machine_mode mode = GET_MODE (to);
4491 enum insn_code code = optab_handler (storent_optab, mode);
4493 if (code == CODE_FOR_nothing)
4496 create_fixed_operand (&ops[0], to);
4497 create_input_operand (&ops[1], from, mode);
4498 return maybe_expand_insn (code, 2, ops);
4501 /* Generate code for computing expression EXP,
4502 and storing the value into TARGET.
4504 If the mode is BLKmode then we may return TARGET itself.
4505 It turns out that in BLKmode it doesn't cause a problem.
4506 because C has no operators that could combine two different
4507 assignments into the same BLKmode object with different values
4508 with no sequence point. Will other languages need this to
4511 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4512 stack, and block moves may need to be treated specially.
4514 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4517 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4520 rtx alt_rtl = NULL_RTX;
4521 location_t loc = EXPR_LOCATION (exp);
4523 if (VOID_TYPE_P (TREE_TYPE (exp)))
4525 /* C++ can generate ?: expressions with a throw expression in one
4526 branch and an rvalue in the other. Here, we resolve attempts to
4527 store the throw expression's nonexistent result. */
4528 gcc_assert (!call_param_p);
4529 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4532 if (TREE_CODE (exp) == COMPOUND_EXPR)
4534 /* Perform first part of compound expression, then assign from second
4536 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4537 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4538 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4541 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4543 /* For conditional expression, get safe form of the target. Then
4544 test the condition, doing the appropriate assignment on either
4545 side. This avoids the creation of unnecessary temporaries.
4546 For non-BLKmode, it is more efficient not to do this. */
4548 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4550 do_pending_stack_adjust ();
4552 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4553 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4555 emit_jump_insn (gen_jump (lab2));
4558 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4565 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4566 /* If this is a scalar in a register that is stored in a wider mode
4567 than the declared mode, compute the result into its declared mode
4568 and then convert to the wider mode. Our value is the computed
4571 rtx inner_target = 0;
4573 /* We can do the conversion inside EXP, which will often result
4574 in some optimizations. Do the conversion in two steps: first
4575 change the signedness, if needed, then the extend. But don't
4576 do this if the type of EXP is a subtype of something else
4577 since then the conversion might involve more than just
4578 converting modes. */
4579 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4580 && TREE_TYPE (TREE_TYPE (exp)) == 0
4581 && GET_MODE_PRECISION (GET_MODE (target))
4582 == TYPE_PRECISION (TREE_TYPE (exp)))
4584 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4585 != SUBREG_PROMOTED_UNSIGNED_P (target))
4587 /* Some types, e.g. Fortran's logical*4, won't have a signed
4588 version, so use the mode instead. */
4590 = (signed_or_unsigned_type_for
4591 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4593 ntype = lang_hooks.types.type_for_mode
4594 (TYPE_MODE (TREE_TYPE (exp)),
4595 SUBREG_PROMOTED_UNSIGNED_P (target));
4597 exp = fold_convert_loc (loc, ntype, exp);
4600 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4601 (GET_MODE (SUBREG_REG (target)),
4602 SUBREG_PROMOTED_UNSIGNED_P (target)),
4605 inner_target = SUBREG_REG (target);
4608 temp = expand_expr (exp, inner_target, VOIDmode,
4609 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4611 /* If TEMP is a VOIDmode constant, use convert_modes to make
4612 sure that we properly convert it. */
4613 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4615 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4616 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4617 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4618 GET_MODE (target), temp,
4619 SUBREG_PROMOTED_UNSIGNED_P (target));
4622 convert_move (SUBREG_REG (target), temp,
4623 SUBREG_PROMOTED_UNSIGNED_P (target));
4627 else if ((TREE_CODE (exp) == STRING_CST
4628 || (TREE_CODE (exp) == MEM_REF
4629 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4630 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4632 && integer_zerop (TREE_OPERAND (exp, 1))))
4633 && !nontemporal && !call_param_p
4636 /* Optimize initialization of an array with a STRING_CST. */
4637 HOST_WIDE_INT exp_len, str_copy_len;
4639 tree str = TREE_CODE (exp) == STRING_CST
4640 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4642 exp_len = int_expr_size (exp);
4646 if (TREE_STRING_LENGTH (str) <= 0)
4649 str_copy_len = strlen (TREE_STRING_POINTER (str));
4650 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4653 str_copy_len = TREE_STRING_LENGTH (str);
4654 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4655 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4657 str_copy_len += STORE_MAX_PIECES - 1;
4658 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4660 str_copy_len = MIN (str_copy_len, exp_len);
4661 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4662 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4663 MEM_ALIGN (target), false))
4668 dest_mem = store_by_pieces (dest_mem,
4669 str_copy_len, builtin_strncpy_read_str,
4671 TREE_STRING_POINTER (str)),
4672 MEM_ALIGN (target), false,
4673 exp_len > str_copy_len ? 1 : 0);
4674 if (exp_len > str_copy_len)
4675 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4676 GEN_INT (exp_len - str_copy_len),
4685 /* If we want to use a nontemporal store, force the value to
4687 tmp_target = nontemporal ? NULL_RTX : target;
4688 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4690 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4694 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4695 the same as that of TARGET, adjust the constant. This is needed, for
4696 example, in case it is a CONST_DOUBLE and we want only a word-sized
4698 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4699 && TREE_CODE (exp) != ERROR_MARK
4700 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4701 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4702 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4704 /* If value was not generated in the target, store it there.
4705 Convert the value to TARGET's type first if necessary and emit the
4706 pending incrementations that have been queued when expanding EXP.
4707 Note that we cannot emit the whole queue blindly because this will
4708 effectively disable the POST_INC optimization later.
4710 If TEMP and TARGET compare equal according to rtx_equal_p, but
4711 one or both of them are volatile memory refs, we have to distinguish
4713 - expand_expr has used TARGET. In this case, we must not generate
4714 another copy. This can be detected by TARGET being equal according
4716 - expand_expr has not used TARGET - that means that the source just
4717 happens to have the same RTX form. Since temp will have been created
4718 by expand_expr, it will compare unequal according to == .
4719 We must generate a copy in this case, to reach the correct number
4720 of volatile memory references. */
4722 if ((! rtx_equal_p (temp, target)
4723 || (temp != target && (side_effects_p (temp)
4724 || side_effects_p (target))))
4725 && TREE_CODE (exp) != ERROR_MARK
4726 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4727 but TARGET is not valid memory reference, TEMP will differ
4728 from TARGET although it is really the same location. */
4730 && rtx_equal_p (alt_rtl, target)
4731 && !side_effects_p (alt_rtl)
4732 && !side_effects_p (target))
4733 /* If there's nothing to copy, don't bother. Don't call
4734 expr_size unless necessary, because some front-ends (C++)
4735 expr_size-hook must not be given objects that are not
4736 supposed to be bit-copied or bit-initialized. */
4737 && expr_size (exp) != const0_rtx)
4739 if (GET_MODE (temp) != GET_MODE (target)
4740 && GET_MODE (temp) != VOIDmode)
4742 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4743 if (GET_MODE (target) == BLKmode
4744 && GET_MODE (temp) == BLKmode)
4745 emit_block_move (target, temp, expr_size (exp),
4747 ? BLOCK_OP_CALL_PARM
4748 : BLOCK_OP_NORMAL));
4749 else if (GET_MODE (target) == BLKmode)
4750 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4751 0, GET_MODE (temp), temp);
4753 convert_move (target, temp, unsignedp);
4756 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4758 /* Handle copying a string constant into an array. The string
4759 constant may be shorter than the array. So copy just the string's
4760 actual length, and clear the rest. First get the size of the data
4761 type of the string, which is actually the size of the target. */
4762 rtx size = expr_size (exp);
4764 if (CONST_INT_P (size)
4765 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4766 emit_block_move (target, temp, size,
4768 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4771 enum machine_mode pointer_mode
4772 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4773 enum machine_mode address_mode
4774 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4776 /* Compute the size of the data to copy from the string. */
4778 = size_binop_loc (loc, MIN_EXPR,
4779 make_tree (sizetype, size),
4780 size_int (TREE_STRING_LENGTH (exp)));
4782 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4784 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4787 /* Copy that much. */
4788 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4789 TYPE_UNSIGNED (sizetype));
4790 emit_block_move (target, temp, copy_size_rtx,
4792 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4794 /* Figure out how much is left in TARGET that we have to clear.
4795 Do all calculations in pointer_mode. */
4796 if (CONST_INT_P (copy_size_rtx))
4798 size = plus_constant (size, -INTVAL (copy_size_rtx));
4799 target = adjust_address (target, BLKmode,
4800 INTVAL (copy_size_rtx));
4804 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4805 copy_size_rtx, NULL_RTX, 0,
4808 if (GET_MODE (copy_size_rtx) != address_mode)
4809 copy_size_rtx = convert_to_mode (address_mode,
4811 TYPE_UNSIGNED (sizetype));
4813 target = offset_address (target, copy_size_rtx,
4814 highest_pow2_factor (copy_size));
4815 label = gen_label_rtx ();
4816 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4817 GET_MODE (size), 0, label);
4820 if (size != const0_rtx)
4821 clear_storage (target, size, BLOCK_OP_NORMAL);
4827 /* Handle calls that return values in multiple non-contiguous locations.
4828 The Irix 6 ABI has examples of this. */
4829 else if (GET_CODE (target) == PARALLEL)
4830 emit_group_load (target, temp, TREE_TYPE (exp),
4831 int_size_in_bytes (TREE_TYPE (exp)));
4832 else if (GET_MODE (temp) == BLKmode)
4833 emit_block_move (target, temp, expr_size (exp),
4835 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4836 else if (nontemporal
4837 && emit_storent_insn (target, temp))
4838 /* If we managed to emit a nontemporal store, there is nothing else to
4843 temp = force_operand (temp, target);
4845 emit_move_insn (target, temp);
4852 /* Helper for categorize_ctor_elements. Identical interface. */
4855 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4856 HOST_WIDE_INT *p_elt_count,
4859 unsigned HOST_WIDE_INT idx;
4860 HOST_WIDE_INT nz_elts, elt_count;
4861 tree value, purpose;
4863 /* Whether CTOR is a valid constant initializer, in accordance with what
4864 initializer_constant_valid_p does. If inferred from the constructor
4865 elements, true until proven otherwise. */
4866 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4867 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4872 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4874 HOST_WIDE_INT mult = 1;
4876 if (TREE_CODE (purpose) == RANGE_EXPR)
4878 tree lo_index = TREE_OPERAND (purpose, 0);
4879 tree hi_index = TREE_OPERAND (purpose, 1);
4881 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4882 mult = (tree_low_cst (hi_index, 1)
4883 - tree_low_cst (lo_index, 1) + 1);
4886 switch (TREE_CODE (value))
4890 HOST_WIDE_INT nz = 0, ic = 0;
4893 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4895 nz_elts += mult * nz;
4896 elt_count += mult * ic;
4898 if (const_from_elts_p && const_p)
4899 const_p = const_elt_p;
4906 if (!initializer_zerop (value))
4912 nz_elts += mult * TREE_STRING_LENGTH (value);
4913 elt_count += mult * TREE_STRING_LENGTH (value);
4917 if (!initializer_zerop (TREE_REALPART (value)))
4919 if (!initializer_zerop (TREE_IMAGPART (value)))
4927 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4929 if (!initializer_zerop (TREE_VALUE (v)))
4938 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4941 nz_elts += mult * tc;
4942 elt_count += mult * tc;
4944 if (const_from_elts_p && const_p)
4945 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4953 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4954 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4957 bool clear_this = true;
4959 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4961 /* We don't expect more than one element of the union to be
4962 initialized. Not sure what we should do otherwise... */
4963 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4966 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4967 CONSTRUCTOR_ELTS (ctor),
4970 /* ??? We could look at each element of the union, and find the
4971 largest element. Which would avoid comparing the size of the
4972 initialized element against any tail padding in the union.
4973 Doesn't seem worth the effort... */
4974 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4975 TYPE_SIZE (init_sub_type)) == 1)
4977 /* And now we have to find out if the element itself is fully
4978 constructed. E.g. for union { struct { int a, b; } s; } u
4979 = { .s = { .a = 1 } }. */
4980 if (elt_count == count_type_elements (init_sub_type, false))
4985 *p_must_clear = clear_this;
4988 *p_nz_elts += nz_elts;
4989 *p_elt_count += elt_count;
4994 /* Examine CTOR to discover:
4995 * how many scalar fields are set to nonzero values,
4996 and place it in *P_NZ_ELTS;
4997 * how many scalar fields in total are in CTOR,
4998 and place it in *P_ELT_COUNT.
4999 * if a type is a union, and the initializer from the constructor
5000 is not the largest element in the union, then set *p_must_clear.
5002 Return whether or not CTOR is a valid static constant initializer, the same
5003 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5006 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5007 HOST_WIDE_INT *p_elt_count,
5012 *p_must_clear = false;
5015 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5018 /* Count the number of scalars in TYPE. Return -1 on overflow or
5019 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5020 array member at the end of the structure. */
5023 count_type_elements (const_tree type, bool allow_flexarr)
5025 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5026 switch (TREE_CODE (type))
5030 tree telts = array_type_nelts (type);
5031 if (telts && host_integerp (telts, 1))
5033 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5034 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5037 else if (max / n > m)
5045 HOST_WIDE_INT n = 0, t;
5048 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5049 if (TREE_CODE (f) == FIELD_DECL)
5051 t = count_type_elements (TREE_TYPE (f), false);
5054 /* Check for structures with flexible array member. */
5055 tree tf = TREE_TYPE (f);
5057 && DECL_CHAIN (f) == NULL
5058 && TREE_CODE (tf) == ARRAY_TYPE
5060 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5061 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5062 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5063 && int_size_in_bytes (type) >= 0)
5075 case QUAL_UNION_TYPE:
5082 return TYPE_VECTOR_SUBPARTS (type);
5086 case FIXED_POINT_TYPE:
5091 case REFERENCE_TYPE:
5106 /* Return 1 if EXP contains mostly (3/4) zeros. */
5109 mostly_zeros_p (const_tree exp)
5111 if (TREE_CODE (exp) == CONSTRUCTOR)
5114 HOST_WIDE_INT nz_elts, count, elts;
5117 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5121 elts = count_type_elements (TREE_TYPE (exp), false);
5123 return nz_elts < elts / 4;
5126 return initializer_zerop (exp);
5129 /* Return 1 if EXP contains all zeros. */
5132 all_zeros_p (const_tree exp)
5134 if (TREE_CODE (exp) == CONSTRUCTOR)
5137 HOST_WIDE_INT nz_elts, count;
5140 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5141 return nz_elts == 0;
5144 return initializer_zerop (exp);
5147 /* Helper function for store_constructor.
5148 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5149 TYPE is the type of the CONSTRUCTOR, not the element type.
5150 CLEARED is as for store_constructor.
5151 ALIAS_SET is the alias set to use for any stores.
5153 This provides a recursive shortcut back to store_constructor when it isn't
5154 necessary to go through store_field. This is so that we can pass through
5155 the cleared field to let store_constructor know that we may not have to
5156 clear a substructure if the outer structure has already been cleared. */
5159 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5160 HOST_WIDE_INT bitpos, enum machine_mode mode,
5161 tree exp, tree type, int cleared,
5162 alias_set_type alias_set)
5164 if (TREE_CODE (exp) == CONSTRUCTOR
5165 /* We can only call store_constructor recursively if the size and
5166 bit position are on a byte boundary. */
5167 && bitpos % BITS_PER_UNIT == 0
5168 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5169 /* If we have a nonzero bitpos for a register target, then we just
5170 let store_field do the bitfield handling. This is unlikely to
5171 generate unnecessary clear instructions anyways. */
5172 && (bitpos == 0 || MEM_P (target)))
5176 = adjust_address (target,
5177 GET_MODE (target) == BLKmode
5179 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5180 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5183 /* Update the alias set, if required. */
5184 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5185 && MEM_ALIAS_SET (target) != 0)
5187 target = copy_rtx (target);
5188 set_mem_alias_set (target, alias_set);
5191 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5194 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5197 /* Store the value of constructor EXP into the rtx TARGET.
5198 TARGET is either a REG or a MEM; we know it cannot conflict, since
5199 safe_from_p has been called.
5200 CLEARED is true if TARGET is known to have been zero'd.
5201 SIZE is the number of bytes of TARGET we are allowed to modify: this
5202 may not be the same as the size of EXP if we are assigning to a field
5203 which has been packed to exclude padding bits. */
5206 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5208 tree type = TREE_TYPE (exp);
5209 #ifdef WORD_REGISTER_OPERATIONS
5210 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5213 switch (TREE_CODE (type))
5217 case QUAL_UNION_TYPE:
5219 unsigned HOST_WIDE_INT idx;
5222 /* If size is zero or the target is already cleared, do nothing. */
5223 if (size == 0 || cleared)
5225 /* We either clear the aggregate or indicate the value is dead. */
5226 else if ((TREE_CODE (type) == UNION_TYPE
5227 || TREE_CODE (type) == QUAL_UNION_TYPE)
5228 && ! CONSTRUCTOR_ELTS (exp))
5229 /* If the constructor is empty, clear the union. */
5231 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5235 /* If we are building a static constructor into a register,
5236 set the initial value as zero so we can fold the value into
5237 a constant. But if more than one register is involved,
5238 this probably loses. */
5239 else if (REG_P (target) && TREE_STATIC (exp)
5240 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5242 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5246 /* If the constructor has fewer fields than the structure or
5247 if we are initializing the structure to mostly zeros, clear
5248 the whole structure first. Don't do this if TARGET is a
5249 register whose mode size isn't equal to SIZE since
5250 clear_storage can't handle this case. */
5252 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5253 != fields_length (type))
5254 || mostly_zeros_p (exp))
5256 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5259 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5263 if (REG_P (target) && !cleared)
5264 emit_clobber (target);
5266 /* Store each element of the constructor into the
5267 corresponding field of TARGET. */
5268 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5270 enum machine_mode mode;
5271 HOST_WIDE_INT bitsize;
5272 HOST_WIDE_INT bitpos = 0;
5274 rtx to_rtx = target;
5276 /* Just ignore missing fields. We cleared the whole
5277 structure, above, if any fields are missing. */
5281 if (cleared && initializer_zerop (value))
5284 if (host_integerp (DECL_SIZE (field), 1))
5285 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5289 mode = DECL_MODE (field);
5290 if (DECL_BIT_FIELD (field))
5293 offset = DECL_FIELD_OFFSET (field);
5294 if (host_integerp (offset, 0)
5295 && host_integerp (bit_position (field), 0))
5297 bitpos = int_bit_position (field);
5301 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5305 enum machine_mode address_mode;
5309 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5310 make_tree (TREE_TYPE (exp),
5313 offset_rtx = expand_normal (offset);
5314 gcc_assert (MEM_P (to_rtx));
5317 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5318 if (GET_MODE (offset_rtx) != address_mode)
5319 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5321 to_rtx = offset_address (to_rtx, offset_rtx,
5322 highest_pow2_factor (offset));
5325 #ifdef WORD_REGISTER_OPERATIONS
5326 /* If this initializes a field that is smaller than a
5327 word, at the start of a word, try to widen it to a full
5328 word. This special case allows us to output C++ member
5329 function initializations in a form that the optimizers
5332 && bitsize < BITS_PER_WORD
5333 && bitpos % BITS_PER_WORD == 0
5334 && GET_MODE_CLASS (mode) == MODE_INT
5335 && TREE_CODE (value) == INTEGER_CST
5337 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5339 tree type = TREE_TYPE (value);
5341 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5343 type = lang_hooks.types.type_for_size
5344 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5345 value = fold_convert (type, value);
5348 if (BYTES_BIG_ENDIAN)
5350 = fold_build2 (LSHIFT_EXPR, type, value,
5351 build_int_cst (type,
5352 BITS_PER_WORD - bitsize));
5353 bitsize = BITS_PER_WORD;
5358 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5359 && DECL_NONADDRESSABLE_P (field))
5361 to_rtx = copy_rtx (to_rtx);
5362 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5365 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5366 value, type, cleared,
5367 get_alias_set (TREE_TYPE (field)));
5374 unsigned HOST_WIDE_INT i;
5377 tree elttype = TREE_TYPE (type);
5379 HOST_WIDE_INT minelt = 0;
5380 HOST_WIDE_INT maxelt = 0;
5382 domain = TYPE_DOMAIN (type);
5383 const_bounds_p = (TYPE_MIN_VALUE (domain)
5384 && TYPE_MAX_VALUE (domain)
5385 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5386 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5388 /* If we have constant bounds for the range of the type, get them. */
5391 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5392 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5395 /* If the constructor has fewer elements than the array, clear
5396 the whole array first. Similarly if this is static
5397 constructor of a non-BLKmode object. */
5400 else if (REG_P (target) && TREE_STATIC (exp))
5404 unsigned HOST_WIDE_INT idx;
5406 HOST_WIDE_INT count = 0, zero_count = 0;
5407 need_to_clear = ! const_bounds_p;
5409 /* This loop is a more accurate version of the loop in
5410 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5411 is also needed to check for missing elements. */
5412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5414 HOST_WIDE_INT this_node_count;
5419 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5421 tree lo_index = TREE_OPERAND (index, 0);
5422 tree hi_index = TREE_OPERAND (index, 1);
5424 if (! host_integerp (lo_index, 1)
5425 || ! host_integerp (hi_index, 1))
5431 this_node_count = (tree_low_cst (hi_index, 1)
5432 - tree_low_cst (lo_index, 1) + 1);
5435 this_node_count = 1;
5437 count += this_node_count;
5438 if (mostly_zeros_p (value))
5439 zero_count += this_node_count;
5442 /* Clear the entire array first if there are any missing
5443 elements, or if the incidence of zero elements is >=
5446 && (count < maxelt - minelt + 1
5447 || 4 * zero_count >= 3 * count))
5451 if (need_to_clear && size > 0)
5454 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5456 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5460 if (!cleared && REG_P (target))
5461 /* Inform later passes that the old value is dead. */
5462 emit_clobber (target);
5464 /* Store each element of the constructor into the
5465 corresponding element of TARGET, determined by counting the
5467 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5469 enum machine_mode mode;
5470 HOST_WIDE_INT bitsize;
5471 HOST_WIDE_INT bitpos;
5472 rtx xtarget = target;
5474 if (cleared && initializer_zerop (value))
5477 mode = TYPE_MODE (elttype);
5478 if (mode == BLKmode)
5479 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5480 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5483 bitsize = GET_MODE_BITSIZE (mode);
5485 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5487 tree lo_index = TREE_OPERAND (index, 0);
5488 tree hi_index = TREE_OPERAND (index, 1);
5489 rtx index_r, pos_rtx;
5490 HOST_WIDE_INT lo, hi, count;
5493 /* If the range is constant and "small", unroll the loop. */
5495 && host_integerp (lo_index, 0)
5496 && host_integerp (hi_index, 0)
5497 && (lo = tree_low_cst (lo_index, 0),
5498 hi = tree_low_cst (hi_index, 0),
5499 count = hi - lo + 1,
5502 || (host_integerp (TYPE_SIZE (elttype), 1)
5503 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5506 lo -= minelt; hi -= minelt;
5507 for (; lo <= hi; lo++)
5509 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5512 && !MEM_KEEP_ALIAS_SET_P (target)
5513 && TREE_CODE (type) == ARRAY_TYPE
5514 && TYPE_NONALIASED_COMPONENT (type))
5516 target = copy_rtx (target);
5517 MEM_KEEP_ALIAS_SET_P (target) = 1;
5520 store_constructor_field
5521 (target, bitsize, bitpos, mode, value, type, cleared,
5522 get_alias_set (elttype));
5527 rtx loop_start = gen_label_rtx ();
5528 rtx loop_end = gen_label_rtx ();
5531 expand_normal (hi_index);
5533 index = build_decl (EXPR_LOCATION (exp),
5534 VAR_DECL, NULL_TREE, domain);
5535 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5536 SET_DECL_RTL (index, index_r);
5537 store_expr (lo_index, index_r, 0, false);
5539 /* Build the head of the loop. */
5540 do_pending_stack_adjust ();
5541 emit_label (loop_start);
5543 /* Assign value to element index. */
5545 fold_convert (ssizetype,
5546 fold_build2 (MINUS_EXPR,
5549 TYPE_MIN_VALUE (domain)));
5552 size_binop (MULT_EXPR, position,
5553 fold_convert (ssizetype,
5554 TYPE_SIZE_UNIT (elttype)));
5556 pos_rtx = expand_normal (position);
5557 xtarget = offset_address (target, pos_rtx,
5558 highest_pow2_factor (position));
5559 xtarget = adjust_address (xtarget, mode, 0);
5560 if (TREE_CODE (value) == CONSTRUCTOR)
5561 store_constructor (value, xtarget, cleared,
5562 bitsize / BITS_PER_UNIT);
5564 store_expr (value, xtarget, 0, false);
5566 /* Generate a conditional jump to exit the loop. */
5567 exit_cond = build2 (LT_EXPR, integer_type_node,
5569 jumpif (exit_cond, loop_end, -1);
5571 /* Update the loop counter, and jump to the head of
5573 expand_assignment (index,
5574 build2 (PLUS_EXPR, TREE_TYPE (index),
5575 index, integer_one_node),
5578 emit_jump (loop_start);
5580 /* Build the end of the loop. */
5581 emit_label (loop_end);
5584 else if ((index != 0 && ! host_integerp (index, 0))
5585 || ! host_integerp (TYPE_SIZE (elttype), 1))
5590 index = ssize_int (1);
5593 index = fold_convert (ssizetype,
5594 fold_build2 (MINUS_EXPR,
5597 TYPE_MIN_VALUE (domain)));
5600 size_binop (MULT_EXPR, index,
5601 fold_convert (ssizetype,
5602 TYPE_SIZE_UNIT (elttype)));
5603 xtarget = offset_address (target,
5604 expand_normal (position),
5605 highest_pow2_factor (position));
5606 xtarget = adjust_address (xtarget, mode, 0);
5607 store_expr (value, xtarget, 0, false);
5612 bitpos = ((tree_low_cst (index, 0) - minelt)
5613 * tree_low_cst (TYPE_SIZE (elttype), 1));
5615 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5617 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5618 && TREE_CODE (type) == ARRAY_TYPE
5619 && TYPE_NONALIASED_COMPONENT (type))
5621 target = copy_rtx (target);
5622 MEM_KEEP_ALIAS_SET_P (target) = 1;
5624 store_constructor_field (target, bitsize, bitpos, mode, value,
5625 type, cleared, get_alias_set (elttype));
5633 unsigned HOST_WIDE_INT idx;
5634 constructor_elt *ce;
5638 tree elttype = TREE_TYPE (type);
5639 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5640 enum machine_mode eltmode = TYPE_MODE (elttype);
5641 HOST_WIDE_INT bitsize;
5642 HOST_WIDE_INT bitpos;
5643 rtvec vector = NULL;
5645 alias_set_type alias;
5647 gcc_assert (eltmode != BLKmode);
5649 n_elts = TYPE_VECTOR_SUBPARTS (type);
5650 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5652 enum machine_mode mode = GET_MODE (target);
5654 icode = (int) optab_handler (vec_init_optab, mode);
5655 if (icode != CODE_FOR_nothing)
5659 vector = rtvec_alloc (n_elts);
5660 for (i = 0; i < n_elts; i++)
5661 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5665 /* If the constructor has fewer elements than the vector,
5666 clear the whole array first. Similarly if this is static
5667 constructor of a non-BLKmode object. */
5670 else if (REG_P (target) && TREE_STATIC (exp))
5674 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5677 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5679 int n_elts_here = tree_low_cst
5680 (int_const_binop (TRUNC_DIV_EXPR,
5681 TYPE_SIZE (TREE_TYPE (value)),
5682 TYPE_SIZE (elttype)), 1);
5684 count += n_elts_here;
5685 if (mostly_zeros_p (value))
5686 zero_count += n_elts_here;
5689 /* Clear the entire vector first if there are any missing elements,
5690 or if the incidence of zero elements is >= 75%. */
5691 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5694 if (need_to_clear && size > 0 && !vector)
5697 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5699 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5703 /* Inform later passes that the old value is dead. */
5704 if (!cleared && !vector && REG_P (target))
5705 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5708 alias = MEM_ALIAS_SET (target);
5710 alias = get_alias_set (elttype);
5712 /* Store each element of the constructor into the corresponding
5713 element of TARGET, determined by counting the elements. */
5714 for (idx = 0, i = 0;
5715 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5716 idx++, i += bitsize / elt_size)
5718 HOST_WIDE_INT eltpos;
5719 tree value = ce->value;
5721 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5722 if (cleared && initializer_zerop (value))
5726 eltpos = tree_low_cst (ce->index, 1);
5732 /* Vector CONSTRUCTORs should only be built from smaller
5733 vectors in the case of BLKmode vectors. */
5734 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5735 RTVEC_ELT (vector, eltpos)
5736 = expand_normal (value);
5740 enum machine_mode value_mode =
5741 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5742 ? TYPE_MODE (TREE_TYPE (value))
5744 bitpos = eltpos * elt_size;
5745 store_constructor_field (target, bitsize, bitpos,
5746 value_mode, value, type,
5752 emit_insn (GEN_FCN (icode)
5754 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5763 /* Store the value of EXP (an expression tree)
5764 into a subfield of TARGET which has mode MODE and occupies
5765 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5766 If MODE is VOIDmode, it means that we are storing into a bit-field.
5768 Always return const0_rtx unless we have something particular to
5771 TYPE is the type of the underlying object,
5773 ALIAS_SET is the alias set for the destination. This value will
5774 (in general) be different from that for TARGET, since TARGET is a
5775 reference to the containing structure.
5777 If NONTEMPORAL is true, try generating a nontemporal store. */
5780 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5781 enum machine_mode mode, tree exp, tree type,
5782 alias_set_type alias_set, bool nontemporal)
5784 if (TREE_CODE (exp) == ERROR_MARK)
5787 /* If we have nothing to store, do nothing unless the expression has
5790 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5792 /* If we are storing into an unaligned field of an aligned union that is
5793 in a register, we may have the mode of TARGET being an integer mode but
5794 MODE == BLKmode. In that case, get an aligned object whose size and
5795 alignment are the same as TARGET and store TARGET into it (we can avoid
5796 the store if the field being stored is the entire width of TARGET). Then
5797 call ourselves recursively to store the field into a BLKmode version of
5798 that object. Finally, load from the object into TARGET. This is not
5799 very efficient in general, but should only be slightly more expensive
5800 than the otherwise-required unaligned accesses. Perhaps this can be
5801 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5802 twice, once with emit_move_insn and once via store_field. */
5805 && (REG_P (target) || GET_CODE (target) == SUBREG))
5807 rtx object = assign_temp (type, 0, 1, 1);
5808 rtx blk_object = adjust_address (object, BLKmode, 0);
5810 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5811 emit_move_insn (object, target);
5813 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5816 emit_move_insn (target, object);
5818 /* We want to return the BLKmode version of the data. */
5822 if (GET_CODE (target) == CONCAT)
5824 /* We're storing into a struct containing a single __complex. */
5826 gcc_assert (!bitpos);
5827 return store_expr (exp, target, 0, nontemporal);
5830 /* If the structure is in a register or if the component
5831 is a bit field, we cannot use addressing to access it.
5832 Use bit-field techniques or SUBREG to store in it. */
5834 if (mode == VOIDmode
5835 || (mode != BLKmode && ! direct_store[(int) mode]
5836 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5837 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5839 || GET_CODE (target) == SUBREG
5840 /* If the field isn't aligned enough to store as an ordinary memref,
5841 store it as a bit field. */
5843 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5844 || bitpos % GET_MODE_ALIGNMENT (mode))
5845 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5846 || (bitpos % BITS_PER_UNIT != 0)))
5847 /* If the RHS and field are a constant size and the size of the
5848 RHS isn't the same size as the bitfield, we must use bitfield
5851 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5852 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5853 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5854 decl we must use bitfield operations. */
5856 && TREE_CODE (exp) == MEM_REF
5857 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5858 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5859 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5860 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5865 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5866 implies a mask operation. If the precision is the same size as
5867 the field we're storing into, that mask is redundant. This is
5868 particularly common with bit field assignments generated by the
5870 nop_def = get_def_for_expr (exp, NOP_EXPR);
5873 tree type = TREE_TYPE (exp);
5874 if (INTEGRAL_TYPE_P (type)
5875 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5876 && bitsize == TYPE_PRECISION (type))
5878 tree op = gimple_assign_rhs1 (nop_def);
5879 type = TREE_TYPE (op);
5880 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5885 temp = expand_normal (exp);
5887 /* If BITSIZE is narrower than the size of the type of EXP
5888 we will be narrowing TEMP. Normally, what's wanted are the
5889 low-order bits. However, if EXP's type is a record and this is
5890 big-endian machine, we want the upper BITSIZE bits. */
5891 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5892 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5893 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5894 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5895 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
5898 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5900 if (mode != VOIDmode && mode != BLKmode
5901 && mode != TYPE_MODE (TREE_TYPE (exp)))
5902 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5904 /* If the modes of TEMP and TARGET are both BLKmode, both
5905 must be in memory and BITPOS must be aligned on a byte
5906 boundary. If so, we simply do a block copy. Likewise
5907 for a BLKmode-like TARGET. */
5908 if (GET_MODE (temp) == BLKmode
5909 && (GET_MODE (target) == BLKmode
5911 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5912 && (bitpos % BITS_PER_UNIT) == 0
5913 && (bitsize % BITS_PER_UNIT) == 0)))
5915 gcc_assert (MEM_P (target) && MEM_P (temp)
5916 && (bitpos % BITS_PER_UNIT) == 0);
5918 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5919 emit_block_move (target, temp,
5920 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5927 /* Store the value in the bitfield. */
5928 store_bit_field (target, bitsize, bitpos, mode, temp);
5934 /* Now build a reference to just the desired component. */
5935 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5937 if (to_rtx == target)
5938 to_rtx = copy_rtx (to_rtx);
5940 if (!MEM_SCALAR_P (to_rtx))
5941 MEM_IN_STRUCT_P (to_rtx) = 1;
5942 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5943 set_mem_alias_set (to_rtx, alias_set);
5945 return store_expr (exp, to_rtx, 0, nontemporal);
5949 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5950 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5951 codes and find the ultimate containing object, which we return.
5953 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5954 bit position, and *PUNSIGNEDP to the signedness of the field.
5955 If the position of the field is variable, we store a tree
5956 giving the variable offset (in units) in *POFFSET.
5957 This offset is in addition to the bit position.
5958 If the position is not variable, we store 0 in *POFFSET.
5960 If any of the extraction expressions is volatile,
5961 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5963 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5964 Otherwise, it is a mode that can be used to access the field.
5966 If the field describes a variable-sized object, *PMODE is set to
5967 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5968 this case, but the address of the object can be found.
5970 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5971 look through nodes that serve as markers of a greater alignment than
5972 the one that can be deduced from the expression. These nodes make it
5973 possible for front-ends to prevent temporaries from being created by
5974 the middle-end on alignment considerations. For that purpose, the
5975 normal operating mode at high-level is to always pass FALSE so that
5976 the ultimate containing object is really returned; moreover, the
5977 associated predicate handled_component_p will always return TRUE
5978 on these nodes, thus indicating that they are essentially handled
5979 by get_inner_reference. TRUE should only be passed when the caller
5980 is scanning the expression in order to build another representation
5981 and specifically knows how to handle these nodes; as such, this is
5982 the normal operating mode in the RTL expanders. */
5985 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5986 HOST_WIDE_INT *pbitpos, tree *poffset,
5987 enum machine_mode *pmode, int *punsignedp,
5988 int *pvolatilep, bool keep_aligning)
5991 enum machine_mode mode = VOIDmode;
5992 bool blkmode_bitfield = false;
5993 tree offset = size_zero_node;
5994 double_int bit_offset = double_int_zero;
5996 /* First get the mode, signedness, and size. We do this from just the
5997 outermost expression. */
5999 if (TREE_CODE (exp) == COMPONENT_REF)
6001 tree field = TREE_OPERAND (exp, 1);
6002 size_tree = DECL_SIZE (field);
6003 if (!DECL_BIT_FIELD (field))
6004 mode = DECL_MODE (field);
6005 else if (DECL_MODE (field) == BLKmode)
6006 blkmode_bitfield = true;
6007 else if (TREE_THIS_VOLATILE (exp)
6008 && flag_strict_volatile_bitfields > 0)
6009 /* Volatile bitfields should be accessed in the mode of the
6010 field's type, not the mode computed based on the bit
6012 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6014 *punsignedp = DECL_UNSIGNED (field);
6016 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6018 size_tree = TREE_OPERAND (exp, 1);
6019 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6020 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6022 /* For vector types, with the correct size of access, use the mode of
6024 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6025 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6026 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6027 mode = TYPE_MODE (TREE_TYPE (exp));
6031 mode = TYPE_MODE (TREE_TYPE (exp));
6032 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6034 if (mode == BLKmode)
6035 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6037 *pbitsize = GET_MODE_BITSIZE (mode);
6042 if (! host_integerp (size_tree, 1))
6043 mode = BLKmode, *pbitsize = -1;
6045 *pbitsize = tree_low_cst (size_tree, 1);
6048 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6049 and find the ultimate containing object. */
6052 switch (TREE_CODE (exp))
6056 = double_int_add (bit_offset,
6057 tree_to_double_int (TREE_OPERAND (exp, 2)));
6062 tree field = TREE_OPERAND (exp, 1);
6063 tree this_offset = component_ref_field_offset (exp);
6065 /* If this field hasn't been filled in yet, don't go past it.
6066 This should only happen when folding expressions made during
6067 type construction. */
6068 if (this_offset == 0)
6071 offset = size_binop (PLUS_EXPR, offset, this_offset);
6072 bit_offset = double_int_add (bit_offset,
6074 (DECL_FIELD_BIT_OFFSET (field)));
6076 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6081 case ARRAY_RANGE_REF:
6083 tree index = TREE_OPERAND (exp, 1);
6084 tree low_bound = array_ref_low_bound (exp);
6085 tree unit_size = array_ref_element_size (exp);
6087 /* We assume all arrays have sizes that are a multiple of a byte.
6088 First subtract the lower bound, if any, in the type of the
6089 index, then convert to sizetype and multiply by the size of
6090 the array element. */
6091 if (! integer_zerop (low_bound))
6092 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6095 offset = size_binop (PLUS_EXPR, offset,
6096 size_binop (MULT_EXPR,
6097 fold_convert (sizetype, index),
6106 bit_offset = double_int_add (bit_offset,
6107 uhwi_to_double_int (*pbitsize));
6110 case VIEW_CONVERT_EXPR:
6111 if (keep_aligning && STRICT_ALIGNMENT
6112 && (TYPE_ALIGN (TREE_TYPE (exp))
6113 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6114 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6115 < BIGGEST_ALIGNMENT)
6116 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6117 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6122 /* Hand back the decl for MEM[&decl, off]. */
6123 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6125 tree off = TREE_OPERAND (exp, 1);
6126 if (!integer_zerop (off))
6128 double_int boff, coff = mem_ref_offset (exp);
6129 boff = double_int_lshift (coff,
6131 ? 3 : exact_log2 (BITS_PER_UNIT),
6132 HOST_BITS_PER_DOUBLE_INT, true);
6133 bit_offset = double_int_add (bit_offset, boff);
6135 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6143 /* If any reference in the chain is volatile, the effect is volatile. */
6144 if (TREE_THIS_VOLATILE (exp))
6147 exp = TREE_OPERAND (exp, 0);
6151 /* If OFFSET is constant, see if we can return the whole thing as a
6152 constant bit position. Make sure to handle overflow during
6154 if (host_integerp (offset, 0))
6156 double_int tem = double_int_lshift (tree_to_double_int (offset),
6158 ? 3 : exact_log2 (BITS_PER_UNIT),
6159 HOST_BITS_PER_DOUBLE_INT, true);
6160 tem = double_int_add (tem, bit_offset);
6161 if (double_int_fits_in_shwi_p (tem))
6163 *pbitpos = double_int_to_shwi (tem);
6164 *poffset = offset = NULL_TREE;
6168 /* Otherwise, split it up. */
6171 *pbitpos = double_int_to_shwi (bit_offset);
6175 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6176 if (mode == VOIDmode
6178 && (*pbitpos % BITS_PER_UNIT) == 0
6179 && (*pbitsize % BITS_PER_UNIT) == 0)
6187 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6188 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6189 EXP is marked as PACKED. */
6192 contains_packed_reference (const_tree exp)
6194 bool packed_p = false;
6198 switch (TREE_CODE (exp))
6202 tree field = TREE_OPERAND (exp, 1);
6203 packed_p = DECL_PACKED (field)
6204 || TYPE_PACKED (TREE_TYPE (field))
6205 || TYPE_PACKED (TREE_TYPE (exp));
6213 case ARRAY_RANGE_REF:
6216 case VIEW_CONVERT_EXPR:
6222 exp = TREE_OPERAND (exp, 0);
6228 /* Return a tree of sizetype representing the size, in bytes, of the element
6229 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6232 array_ref_element_size (tree exp)
6234 tree aligned_size = TREE_OPERAND (exp, 3);
6235 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6236 location_t loc = EXPR_LOCATION (exp);
6238 /* If a size was specified in the ARRAY_REF, it's the size measured
6239 in alignment units of the element type. So multiply by that value. */
6242 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6243 sizetype from another type of the same width and signedness. */
6244 if (TREE_TYPE (aligned_size) != sizetype)
6245 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6246 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6247 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6250 /* Otherwise, take the size from that of the element type. Substitute
6251 any PLACEHOLDER_EXPR that we have. */
6253 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6256 /* Return a tree representing the lower bound of the array mentioned in
6257 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6260 array_ref_low_bound (tree exp)
6262 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6264 /* If a lower bound is specified in EXP, use it. */
6265 if (TREE_OPERAND (exp, 2))
6266 return TREE_OPERAND (exp, 2);
6268 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6269 substituting for a PLACEHOLDER_EXPR as needed. */
6270 if (domain_type && TYPE_MIN_VALUE (domain_type))
6271 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6273 /* Otherwise, return a zero of the appropriate type. */
6274 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6277 /* Return a tree representing the upper bound of the array mentioned in
6278 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6281 array_ref_up_bound (tree exp)
6283 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6285 /* If there is a domain type and it has an upper bound, use it, substituting
6286 for a PLACEHOLDER_EXPR as needed. */
6287 if (domain_type && TYPE_MAX_VALUE (domain_type))
6288 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6290 /* Otherwise fail. */
6294 /* Return a tree representing the offset, in bytes, of the field referenced
6295 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6298 component_ref_field_offset (tree exp)
6300 tree aligned_offset = TREE_OPERAND (exp, 2);
6301 tree field = TREE_OPERAND (exp, 1);
6302 location_t loc = EXPR_LOCATION (exp);
6304 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6305 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6309 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6310 sizetype from another type of the same width and signedness. */
6311 if (TREE_TYPE (aligned_offset) != sizetype)
6312 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6313 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6314 size_int (DECL_OFFSET_ALIGN (field)
6318 /* Otherwise, take the offset from that of the field. Substitute
6319 any PLACEHOLDER_EXPR that we have. */
6321 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6324 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6326 static unsigned HOST_WIDE_INT
6327 target_align (const_tree target)
6329 /* We might have a chain of nested references with intermediate misaligning
6330 bitfields components, so need to recurse to find out. */
6332 unsigned HOST_WIDE_INT this_align, outer_align;
6334 switch (TREE_CODE (target))
6340 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6341 outer_align = target_align (TREE_OPERAND (target, 0));
6342 return MIN (this_align, outer_align);
6345 case ARRAY_RANGE_REF:
6346 this_align = TYPE_ALIGN (TREE_TYPE (target));
6347 outer_align = target_align (TREE_OPERAND (target, 0));
6348 return MIN (this_align, outer_align);
6351 case NON_LVALUE_EXPR:
6352 case VIEW_CONVERT_EXPR:
6353 this_align = TYPE_ALIGN (TREE_TYPE (target));
6354 outer_align = target_align (TREE_OPERAND (target, 0));
6355 return MAX (this_align, outer_align);
6358 return TYPE_ALIGN (TREE_TYPE (target));
6363 /* Given an rtx VALUE that may contain additions and multiplications, return
6364 an equivalent value that just refers to a register, memory, or constant.
6365 This is done by generating instructions to perform the arithmetic and
6366 returning a pseudo-register containing the value.
6368 The returned value may be a REG, SUBREG, MEM or constant. */
6371 force_operand (rtx value, rtx target)
6374 /* Use subtarget as the target for operand 0 of a binary operation. */
6375 rtx subtarget = get_subtarget (target);
6376 enum rtx_code code = GET_CODE (value);
6378 /* Check for subreg applied to an expression produced by loop optimizer. */
6380 && !REG_P (SUBREG_REG (value))
6381 && !MEM_P (SUBREG_REG (value)))
6384 = simplify_gen_subreg (GET_MODE (value),
6385 force_reg (GET_MODE (SUBREG_REG (value)),
6386 force_operand (SUBREG_REG (value),
6388 GET_MODE (SUBREG_REG (value)),
6389 SUBREG_BYTE (value));
6390 code = GET_CODE (value);
6393 /* Check for a PIC address load. */
6394 if ((code == PLUS || code == MINUS)
6395 && XEXP (value, 0) == pic_offset_table_rtx
6396 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6397 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6398 || GET_CODE (XEXP (value, 1)) == CONST))
6401 subtarget = gen_reg_rtx (GET_MODE (value));
6402 emit_move_insn (subtarget, value);
6406 if (ARITHMETIC_P (value))
6408 op2 = XEXP (value, 1);
6409 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6411 if (code == MINUS && CONST_INT_P (op2))
6414 op2 = negate_rtx (GET_MODE (value), op2);
6417 /* Check for an addition with OP2 a constant integer and our first
6418 operand a PLUS of a virtual register and something else. In that
6419 case, we want to emit the sum of the virtual register and the
6420 constant first and then add the other value. This allows virtual
6421 register instantiation to simply modify the constant rather than
6422 creating another one around this addition. */
6423 if (code == PLUS && CONST_INT_P (op2)
6424 && GET_CODE (XEXP (value, 0)) == PLUS
6425 && REG_P (XEXP (XEXP (value, 0), 0))
6426 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6427 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6429 rtx temp = expand_simple_binop (GET_MODE (value), code,
6430 XEXP (XEXP (value, 0), 0), op2,
6431 subtarget, 0, OPTAB_LIB_WIDEN);
6432 return expand_simple_binop (GET_MODE (value), code, temp,
6433 force_operand (XEXP (XEXP (value,
6435 target, 0, OPTAB_LIB_WIDEN);
6438 op1 = force_operand (XEXP (value, 0), subtarget);
6439 op2 = force_operand (op2, NULL_RTX);
6443 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6445 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6446 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6447 target, 1, OPTAB_LIB_WIDEN);
6449 return expand_divmod (0,
6450 FLOAT_MODE_P (GET_MODE (value))
6451 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6452 GET_MODE (value), op1, op2, target, 0);
6454 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6457 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6460 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6463 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6464 target, 0, OPTAB_LIB_WIDEN);
6466 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6467 target, 1, OPTAB_LIB_WIDEN);
6470 if (UNARY_P (value))
6473 target = gen_reg_rtx (GET_MODE (value));
6474 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6481 case FLOAT_TRUNCATE:
6482 convert_move (target, op1, code == ZERO_EXTEND);
6487 expand_fix (target, op1, code == UNSIGNED_FIX);
6491 case UNSIGNED_FLOAT:
6492 expand_float (target, op1, code == UNSIGNED_FLOAT);
6496 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6500 #ifdef INSN_SCHEDULING
6501 /* On machines that have insn scheduling, we want all memory reference to be
6502 explicit, so we need to deal with such paradoxical SUBREGs. */
6503 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6504 && (GET_MODE_SIZE (GET_MODE (value))
6505 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6507 = simplify_gen_subreg (GET_MODE (value),
6508 force_reg (GET_MODE (SUBREG_REG (value)),
6509 force_operand (SUBREG_REG (value),
6511 GET_MODE (SUBREG_REG (value)),
6512 SUBREG_BYTE (value));
6518 /* Subroutine of expand_expr: return nonzero iff there is no way that
6519 EXP can reference X, which is being modified. TOP_P is nonzero if this
6520 call is going to be used to determine whether we need a temporary
6521 for EXP, as opposed to a recursive call to this function.
6523 It is always safe for this routine to return zero since it merely
6524 searches for optimization opportunities. */
6527 safe_from_p (const_rtx x, tree exp, int top_p)
6533 /* If EXP has varying size, we MUST use a target since we currently
6534 have no way of allocating temporaries of variable size
6535 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6536 So we assume here that something at a higher level has prevented a
6537 clash. This is somewhat bogus, but the best we can do. Only
6538 do this when X is BLKmode and when we are at the top level. */
6539 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6540 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6541 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6542 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6543 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6545 && GET_MODE (x) == BLKmode)
6546 /* If X is in the outgoing argument area, it is always safe. */
6548 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6549 || (GET_CODE (XEXP (x, 0)) == PLUS
6550 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6553 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6554 find the underlying pseudo. */
6555 if (GET_CODE (x) == SUBREG)
6558 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6562 /* Now look at our tree code and possibly recurse. */
6563 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6565 case tcc_declaration:
6566 exp_rtl = DECL_RTL_IF_SET (exp);
6572 case tcc_exceptional:
6573 if (TREE_CODE (exp) == TREE_LIST)
6577 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6579 exp = TREE_CHAIN (exp);
6582 if (TREE_CODE (exp) != TREE_LIST)
6583 return safe_from_p (x, exp, 0);
6586 else if (TREE_CODE (exp) == CONSTRUCTOR)
6588 constructor_elt *ce;
6589 unsigned HOST_WIDE_INT idx;
6591 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6592 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6593 || !safe_from_p (x, ce->value, 0))
6597 else if (TREE_CODE (exp) == ERROR_MARK)
6598 return 1; /* An already-visited SAVE_EXPR? */
6603 /* The only case we look at here is the DECL_INITIAL inside a
6605 return (TREE_CODE (exp) != DECL_EXPR
6606 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6607 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6608 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6611 case tcc_comparison:
6612 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6617 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6619 case tcc_expression:
6622 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6623 the expression. If it is set, we conflict iff we are that rtx or
6624 both are in memory. Otherwise, we check all operands of the
6625 expression recursively. */
6627 switch (TREE_CODE (exp))
6630 /* If the operand is static or we are static, we can't conflict.
6631 Likewise if we don't conflict with the operand at all. */
6632 if (staticp (TREE_OPERAND (exp, 0))
6633 || TREE_STATIC (exp)
6634 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6637 /* Otherwise, the only way this can conflict is if we are taking
6638 the address of a DECL a that address if part of X, which is
6640 exp = TREE_OPERAND (exp, 0);
6643 if (!DECL_RTL_SET_P (exp)
6644 || !MEM_P (DECL_RTL (exp)))
6647 exp_rtl = XEXP (DECL_RTL (exp), 0);
6653 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6654 get_alias_set (exp)))
6659 /* Assume that the call will clobber all hard registers and
6661 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6666 case WITH_CLEANUP_EXPR:
6667 case CLEANUP_POINT_EXPR:
6668 /* Lowered by gimplify.c. */
6672 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6678 /* If we have an rtx, we do not need to scan our operands. */
6682 nops = TREE_OPERAND_LENGTH (exp);
6683 for (i = 0; i < nops; i++)
6684 if (TREE_OPERAND (exp, i) != 0
6685 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6691 /* Should never get a type here. */
6695 /* If we have an rtl, find any enclosed object. Then see if we conflict
6699 if (GET_CODE (exp_rtl) == SUBREG)
6701 exp_rtl = SUBREG_REG (exp_rtl);
6703 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6707 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6708 are memory and they conflict. */
6709 return ! (rtx_equal_p (x, exp_rtl)
6710 || (MEM_P (x) && MEM_P (exp_rtl)
6711 && true_dependence (exp_rtl, VOIDmode, x,
6712 rtx_addr_varies_p)));
6715 /* If we reach here, it is safe. */
6720 /* Return the highest power of two that EXP is known to be a multiple of.
6721 This is used in updating alignment of MEMs in array references. */
6723 unsigned HOST_WIDE_INT
6724 highest_pow2_factor (const_tree exp)
6726 unsigned HOST_WIDE_INT c0, c1;
6728 switch (TREE_CODE (exp))
6731 /* We can find the lowest bit that's a one. If the low
6732 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6733 We need to handle this case since we can find it in a COND_EXPR,
6734 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6735 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6737 if (TREE_OVERFLOW (exp))
6738 return BIGGEST_ALIGNMENT;
6741 /* Note: tree_low_cst is intentionally not used here,
6742 we don't care about the upper bits. */
6743 c0 = TREE_INT_CST_LOW (exp);
6745 return c0 ? c0 : BIGGEST_ALIGNMENT;
6749 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6750 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6751 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6752 return MIN (c0, c1);
6755 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6756 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6759 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6761 if (integer_pow2p (TREE_OPERAND (exp, 1))
6762 && host_integerp (TREE_OPERAND (exp, 1), 1))
6764 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6765 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6766 return MAX (1, c0 / c1);
6771 /* The highest power of two of a bit-and expression is the maximum of
6772 that of its operands. We typically get here for a complex LHS and
6773 a constant negative power of two on the RHS to force an explicit
6774 alignment, so don't bother looking at the LHS. */
6775 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6779 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6782 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6785 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6786 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6787 return MIN (c0, c1);
6796 /* Similar, except that the alignment requirements of TARGET are
6797 taken into account. Assume it is at least as aligned as its
6798 type, unless it is a COMPONENT_REF in which case the layout of
6799 the structure gives the alignment. */
6801 static unsigned HOST_WIDE_INT
6802 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6804 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6805 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6807 return MAX (factor, talign);
6810 /* Subroutine of expand_expr. Expand the two operands of a binary
6811 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6812 The value may be stored in TARGET if TARGET is nonzero. The
6813 MODIFIER argument is as documented by expand_expr. */
6816 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6817 enum expand_modifier modifier)
6819 if (! safe_from_p (target, exp1, 1))
6821 if (operand_equal_p (exp0, exp1, 0))
6823 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6824 *op1 = copy_rtx (*op0);
6828 /* If we need to preserve evaluation order, copy exp0 into its own
6829 temporary variable so that it can't be clobbered by exp1. */
6830 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6831 exp0 = save_expr (exp0);
6832 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6833 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6838 /* Return a MEM that contains constant EXP. DEFER is as for
6839 output_constant_def and MODIFIER is as for expand_expr. */
6842 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6846 mem = output_constant_def (exp, defer);
6847 if (modifier != EXPAND_INITIALIZER)
6848 mem = use_anchored_address (mem);
6852 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6853 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6856 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6857 enum expand_modifier modifier, addr_space_t as)
6859 rtx result, subtarget;
6861 HOST_WIDE_INT bitsize, bitpos;
6862 int volatilep, unsignedp;
6863 enum machine_mode mode1;
6865 /* If we are taking the address of a constant and are at the top level,
6866 we have to use output_constant_def since we can't call force_const_mem
6868 /* ??? This should be considered a front-end bug. We should not be
6869 generating ADDR_EXPR of something that isn't an LVALUE. The only
6870 exception here is STRING_CST. */
6871 if (CONSTANT_CLASS_P (exp))
6872 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6874 /* Everything must be something allowed by is_gimple_addressable. */
6875 switch (TREE_CODE (exp))
6878 /* This case will happen via recursion for &a->b. */
6879 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6883 tree tem = TREE_OPERAND (exp, 0);
6884 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6885 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6887 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6888 return expand_expr (tem, target, tmode, modifier);
6892 /* Expand the initializer like constants above. */
6893 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6896 /* The real part of the complex number is always first, therefore
6897 the address is the same as the address of the parent object. */
6900 inner = TREE_OPERAND (exp, 0);
6904 /* The imaginary part of the complex number is always second.
6905 The expression is therefore always offset by the size of the
6908 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6909 inner = TREE_OPERAND (exp, 0);
6913 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6914 expand_expr, as that can have various side effects; LABEL_DECLs for
6915 example, may not have their DECL_RTL set yet. Expand the rtl of
6916 CONSTRUCTORs too, which should yield a memory reference for the
6917 constructor's contents. Assume language specific tree nodes can
6918 be expanded in some interesting way. */
6919 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6921 || TREE_CODE (exp) == CONSTRUCTOR
6922 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6924 result = expand_expr (exp, target, tmode,
6925 modifier == EXPAND_INITIALIZER
6926 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6928 /* If the DECL isn't in memory, then the DECL wasn't properly
6929 marked TREE_ADDRESSABLE, which will be either a front-end
6930 or a tree optimizer bug. */
6931 gcc_assert (MEM_P (result));
6932 result = XEXP (result, 0);
6934 /* ??? Is this needed anymore? */
6935 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6937 assemble_external (exp);
6938 TREE_USED (exp) = 1;
6941 if (modifier != EXPAND_INITIALIZER
6942 && modifier != EXPAND_CONST_ADDRESS)
6943 result = force_operand (result, target);
6947 /* Pass FALSE as the last argument to get_inner_reference although
6948 we are expanding to RTL. The rationale is that we know how to
6949 handle "aligning nodes" here: we can just bypass them because
6950 they won't change the final object whose address will be returned
6951 (they actually exist only for that purpose). */
6952 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6953 &mode1, &unsignedp, &volatilep, false);
6957 /* We must have made progress. */
6958 gcc_assert (inner != exp);
6960 subtarget = offset || bitpos ? NULL_RTX : target;
6961 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6962 inner alignment, force the inner to be sufficiently aligned. */
6963 if (CONSTANT_CLASS_P (inner)
6964 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6966 inner = copy_node (inner);
6967 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6968 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6969 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6971 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6977 if (modifier != EXPAND_NORMAL)
6978 result = force_operand (result, NULL);
6979 tmp = expand_expr (offset, NULL_RTX, tmode,
6980 modifier == EXPAND_INITIALIZER
6981 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6983 result = convert_memory_address_addr_space (tmode, result, as);
6984 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6986 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6987 result = simplify_gen_binary (PLUS, tmode, result, tmp);
6990 subtarget = bitpos ? NULL_RTX : target;
6991 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6992 1, OPTAB_LIB_WIDEN);
6998 /* Someone beforehand should have rejected taking the address
6999 of such an object. */
7000 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7002 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7003 if (modifier < EXPAND_SUM)
7004 result = force_operand (result, target);
7010 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7011 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7014 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7015 enum expand_modifier modifier)
7017 addr_space_t as = ADDR_SPACE_GENERIC;
7018 enum machine_mode address_mode = Pmode;
7019 enum machine_mode pointer_mode = ptr_mode;
7020 enum machine_mode rmode;
7023 /* Target mode of VOIDmode says "whatever's natural". */
7024 if (tmode == VOIDmode)
7025 tmode = TYPE_MODE (TREE_TYPE (exp));
7027 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7029 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7030 address_mode = targetm.addr_space.address_mode (as);
7031 pointer_mode = targetm.addr_space.pointer_mode (as);
7034 /* We can get called with some Weird Things if the user does silliness
7035 like "(short) &a". In that case, convert_memory_address won't do
7036 the right thing, so ignore the given target mode. */
7037 if (tmode != address_mode && tmode != pointer_mode)
7038 tmode = address_mode;
7040 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7041 tmode, modifier, as);
7043 /* Despite expand_expr claims concerning ignoring TMODE when not
7044 strictly convenient, stuff breaks if we don't honor it. Note
7045 that combined with the above, we only do this for pointer modes. */
7046 rmode = GET_MODE (result);
7047 if (rmode == VOIDmode)
7050 result = convert_memory_address_addr_space (tmode, result, as);
7055 /* Generate code for computing CONSTRUCTOR EXP.
7056 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7057 is TRUE, instead of creating a temporary variable in memory
7058 NULL is returned and the caller needs to handle it differently. */
7061 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7062 bool avoid_temp_mem)
7064 tree type = TREE_TYPE (exp);
7065 enum machine_mode mode = TYPE_MODE (type);
7067 /* Try to avoid creating a temporary at all. This is possible
7068 if all of the initializer is zero.
7069 FIXME: try to handle all [0..255] initializers we can handle
7071 if (TREE_STATIC (exp)
7072 && !TREE_ADDRESSABLE (exp)
7073 && target != 0 && mode == BLKmode
7074 && all_zeros_p (exp))
7076 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7080 /* All elts simple constants => refer to a constant in memory. But
7081 if this is a non-BLKmode mode, let it store a field at a time
7082 since that should make a CONST_INT or CONST_DOUBLE when we
7083 fold. Likewise, if we have a target we can use, it is best to
7084 store directly into the target unless the type is large enough
7085 that memcpy will be used. If we are making an initializer and
7086 all operands are constant, put it in memory as well.
7088 FIXME: Avoid trying to fill vector constructors piece-meal.
7089 Output them with output_constant_def below unless we're sure
7090 they're zeros. This should go away when vector initializers
7091 are treated like VECTOR_CST instead of arrays. */
7092 if ((TREE_STATIC (exp)
7093 && ((mode == BLKmode
7094 && ! (target != 0 && safe_from_p (target, exp, 1)))
7095 || TREE_ADDRESSABLE (exp)
7096 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7097 && (! MOVE_BY_PIECES_P
7098 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7100 && ! mostly_zeros_p (exp))))
7101 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7102 && TREE_CONSTANT (exp)))
7109 constructor = expand_expr_constant (exp, 1, modifier);
7111 if (modifier != EXPAND_CONST_ADDRESS
7112 && modifier != EXPAND_INITIALIZER
7113 && modifier != EXPAND_SUM)
7114 constructor = validize_mem (constructor);
7119 /* Handle calls that pass values in multiple non-contiguous
7120 locations. The Irix 6 ABI has examples of this. */
7121 if (target == 0 || ! safe_from_p (target, exp, 1)
7122 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7128 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7129 | (TREE_READONLY (exp)
7130 * TYPE_QUAL_CONST))),
7131 0, TREE_ADDRESSABLE (exp), 1);
7134 store_constructor (exp, target, 0, int_expr_size (exp));
7139 /* expand_expr: generate code for computing expression EXP.
7140 An rtx for the computed value is returned. The value is never null.
7141 In the case of a void EXP, const0_rtx is returned.
7143 The value may be stored in TARGET if TARGET is nonzero.
7144 TARGET is just a suggestion; callers must assume that
7145 the rtx returned may not be the same as TARGET.
7147 If TARGET is CONST0_RTX, it means that the value will be ignored.
7149 If TMODE is not VOIDmode, it suggests generating the
7150 result in mode TMODE. But this is done only when convenient.
7151 Otherwise, TMODE is ignored and the value generated in its natural mode.
7152 TMODE is just a suggestion; callers must assume that
7153 the rtx returned may not have mode TMODE.
7155 Note that TARGET may have neither TMODE nor MODE. In that case, it
7156 probably will not be used.
7158 If MODIFIER is EXPAND_SUM then when EXP is an addition
7159 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7160 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7161 products as above, or REG or MEM, or constant.
7162 Ordinarily in such cases we would output mul or add instructions
7163 and then return a pseudo reg containing the sum.
7165 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7166 it also marks a label as absolutely required (it can't be dead).
7167 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7168 This is used for outputting expressions used in initializers.
7170 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7171 with a constant address even if that address is not normally legitimate.
7172 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7174 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7175 a call parameter. Such targets require special care as we haven't yet
7176 marked TARGET so that it's safe from being trashed by libcalls. We
7177 don't want to use TARGET for anything but the final result;
7178 Intermediate values must go elsewhere. Additionally, calls to
7179 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7181 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7182 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7183 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7184 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7188 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7189 enum expand_modifier modifier, rtx *alt_rtl)
7193 /* Handle ERROR_MARK before anybody tries to access its type. */
7194 if (TREE_CODE (exp) == ERROR_MARK
7195 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7197 ret = CONST0_RTX (tmode);
7198 return ret ? ret : const0_rtx;
7201 /* If this is an expression of some kind and it has an associated line
7202 number, then emit the line number before expanding the expression.
7204 We need to save and restore the file and line information so that
7205 errors discovered during expansion are emitted with the right
7206 information. It would be better of the diagnostic routines
7207 used the file/line information embedded in the tree nodes rather
7209 if (cfun && EXPR_HAS_LOCATION (exp))
7211 location_t saved_location = input_location;
7212 location_t saved_curr_loc = get_curr_insn_source_location ();
7213 tree saved_block = get_curr_insn_block ();
7214 input_location = EXPR_LOCATION (exp);
7215 set_curr_insn_source_location (input_location);
7217 /* Record where the insns produced belong. */
7218 set_curr_insn_block (TREE_BLOCK (exp));
7220 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7222 input_location = saved_location;
7223 set_curr_insn_block (saved_block);
7224 set_curr_insn_source_location (saved_curr_loc);
7228 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7235 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7236 enum expand_modifier modifier)
7238 rtx op0, op1, op2, temp;
7241 enum machine_mode mode;
7242 enum tree_code code = ops->code;
7244 rtx subtarget, original_target;
7246 bool reduce_bit_field;
7247 location_t loc = ops->location;
7248 tree treeop0, treeop1, treeop2;
7249 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7250 ? reduce_to_bit_field_precision ((expr), \
7256 mode = TYPE_MODE (type);
7257 unsignedp = TYPE_UNSIGNED (type);
7263 /* We should be called only on simple (binary or unary) expressions,
7264 exactly those that are valid in gimple expressions that aren't
7265 GIMPLE_SINGLE_RHS (or invalid). */
7266 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7267 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7268 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7270 ignore = (target == const0_rtx
7271 || ((CONVERT_EXPR_CODE_P (code)
7272 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7273 && TREE_CODE (type) == VOID_TYPE));
7275 /* We should be called only if we need the result. */
7276 gcc_assert (!ignore);
7278 /* An operation in what may be a bit-field type needs the
7279 result to be reduced to the precision of the bit-field type,
7280 which is narrower than that of the type's mode. */
7281 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7282 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7284 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7287 /* Use subtarget as the target for operand 0 of a binary operation. */
7288 subtarget = get_subtarget (target);
7289 original_target = target;
7293 case NON_LVALUE_EXPR:
7296 if (treeop0 == error_mark_node)
7299 if (TREE_CODE (type) == UNION_TYPE)
7301 tree valtype = TREE_TYPE (treeop0);
7303 /* If both input and output are BLKmode, this conversion isn't doing
7304 anything except possibly changing memory attribute. */
7305 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7307 rtx result = expand_expr (treeop0, target, tmode,
7310 result = copy_rtx (result);
7311 set_mem_attributes (result, type, 0);
7317 if (TYPE_MODE (type) != BLKmode)
7318 target = gen_reg_rtx (TYPE_MODE (type));
7320 target = assign_temp (type, 0, 1, 1);
7324 /* Store data into beginning of memory target. */
7325 store_expr (treeop0,
7326 adjust_address (target, TYPE_MODE (valtype), 0),
7327 modifier == EXPAND_STACK_PARM,
7332 gcc_assert (REG_P (target));
7334 /* Store this field into a union of the proper type. */
7335 store_field (target,
7336 MIN ((int_size_in_bytes (TREE_TYPE
7339 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7340 0, TYPE_MODE (valtype), treeop0,
7344 /* Return the entire union. */
7348 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7350 op0 = expand_expr (treeop0, target, VOIDmode,
7353 /* If the signedness of the conversion differs and OP0 is
7354 a promoted SUBREG, clear that indication since we now
7355 have to do the proper extension. */
7356 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7357 && GET_CODE (op0) == SUBREG)
7358 SUBREG_PROMOTED_VAR_P (op0) = 0;
7360 return REDUCE_BIT_FIELD (op0);
7363 op0 = expand_expr (treeop0, NULL_RTX, mode,
7364 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7365 if (GET_MODE (op0) == mode)
7368 /* If OP0 is a constant, just convert it into the proper mode. */
7369 else if (CONSTANT_P (op0))
7371 tree inner_type = TREE_TYPE (treeop0);
7372 enum machine_mode inner_mode = GET_MODE (op0);
7374 if (inner_mode == VOIDmode)
7375 inner_mode = TYPE_MODE (inner_type);
7377 if (modifier == EXPAND_INITIALIZER)
7378 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7379 subreg_lowpart_offset (mode,
7382 op0= convert_modes (mode, inner_mode, op0,
7383 TYPE_UNSIGNED (inner_type));
7386 else if (modifier == EXPAND_INITIALIZER)
7387 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7389 else if (target == 0)
7390 op0 = convert_to_mode (mode, op0,
7391 TYPE_UNSIGNED (TREE_TYPE
7395 convert_move (target, op0,
7396 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7400 return REDUCE_BIT_FIELD (op0);
7402 case ADDR_SPACE_CONVERT_EXPR:
7404 tree treeop0_type = TREE_TYPE (treeop0);
7406 addr_space_t as_from;
7408 gcc_assert (POINTER_TYPE_P (type));
7409 gcc_assert (POINTER_TYPE_P (treeop0_type));
7411 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7412 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7414 /* Conversions between pointers to the same address space should
7415 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7416 gcc_assert (as_to != as_from);
7418 /* Ask target code to handle conversion between pointers
7419 to overlapping address spaces. */
7420 if (targetm.addr_space.subset_p (as_to, as_from)
7421 || targetm.addr_space.subset_p (as_from, as_to))
7423 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7424 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7429 /* For disjoint address spaces, converting anything but
7430 a null pointer invokes undefined behaviour. We simply
7431 always return a null pointer here. */
7432 return CONST0_RTX (mode);
7435 case POINTER_PLUS_EXPR:
7436 /* Even though the sizetype mode and the pointer's mode can be different
7437 expand is able to handle this correctly and get the correct result out
7438 of the PLUS_EXPR code. */
7439 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7440 if sizetype precision is smaller than pointer precision. */
7441 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7442 treeop1 = fold_convert_loc (loc, type,
7443 fold_convert_loc (loc, ssizetype,
7446 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7447 something else, make sure we add the register to the constant and
7448 then to the other thing. This case can occur during strength
7449 reduction and doing it this way will produce better code if the
7450 frame pointer or argument pointer is eliminated.
7452 fold-const.c will ensure that the constant is always in the inner
7453 PLUS_EXPR, so the only case we need to do anything about is if
7454 sp, ap, or fp is our second argument, in which case we must swap
7455 the innermost first argument and our second argument. */
7457 if (TREE_CODE (treeop0) == PLUS_EXPR
7458 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7459 && TREE_CODE (treeop1) == VAR_DECL
7460 && (DECL_RTL (treeop1) == frame_pointer_rtx
7461 || DECL_RTL (treeop1) == stack_pointer_rtx
7462 || DECL_RTL (treeop1) == arg_pointer_rtx))
7466 treeop1 = TREE_OPERAND (treeop0, 0);
7467 TREE_OPERAND (treeop0, 0) = t;
7470 /* If the result is to be ptr_mode and we are adding an integer to
7471 something, we might be forming a constant. So try to use
7472 plus_constant. If it produces a sum and we can't accept it,
7473 use force_operand. This allows P = &ARR[const] to generate
7474 efficient code on machines where a SYMBOL_REF is not a valid
7477 If this is an EXPAND_SUM call, always return the sum. */
7478 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7479 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7481 if (modifier == EXPAND_STACK_PARM)
7483 if (TREE_CODE (treeop0) == INTEGER_CST
7484 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7485 && TREE_CONSTANT (treeop1))
7489 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7491 /* Use immed_double_const to ensure that the constant is
7492 truncated according to the mode of OP1, then sign extended
7493 to a HOST_WIDE_INT. Using the constant directly can result
7494 in non-canonical RTL in a 64x32 cross compile. */
7496 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7498 TYPE_MODE (TREE_TYPE (treeop1)));
7499 op1 = plus_constant (op1, INTVAL (constant_part));
7500 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7501 op1 = force_operand (op1, target);
7502 return REDUCE_BIT_FIELD (op1);
7505 else if (TREE_CODE (treeop1) == INTEGER_CST
7506 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7507 && TREE_CONSTANT (treeop0))
7511 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7512 (modifier == EXPAND_INITIALIZER
7513 ? EXPAND_INITIALIZER : EXPAND_SUM));
7514 if (! CONSTANT_P (op0))
7516 op1 = expand_expr (treeop1, NULL_RTX,
7517 VOIDmode, modifier);
7518 /* Return a PLUS if modifier says it's OK. */
7519 if (modifier == EXPAND_SUM
7520 || modifier == EXPAND_INITIALIZER)
7521 return simplify_gen_binary (PLUS, mode, op0, op1);
7524 /* Use immed_double_const to ensure that the constant is
7525 truncated according to the mode of OP1, then sign extended
7526 to a HOST_WIDE_INT. Using the constant directly can result
7527 in non-canonical RTL in a 64x32 cross compile. */
7529 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7531 TYPE_MODE (TREE_TYPE (treeop0)));
7532 op0 = plus_constant (op0, INTVAL (constant_part));
7533 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7534 op0 = force_operand (op0, target);
7535 return REDUCE_BIT_FIELD (op0);
7539 /* Use TER to expand pointer addition of a negated value
7540 as pointer subtraction. */
7541 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7542 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7543 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7544 && TREE_CODE (treeop1) == SSA_NAME
7545 && TYPE_MODE (TREE_TYPE (treeop0))
7546 == TYPE_MODE (TREE_TYPE (treeop1)))
7548 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7551 treeop1 = gimple_assign_rhs1 (def);
7557 /* No sense saving up arithmetic to be done
7558 if it's all in the wrong mode to form part of an address.
7559 And force_operand won't know whether to sign-extend or
7561 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7562 || mode != ptr_mode)
7564 expand_operands (treeop0, treeop1,
7565 subtarget, &op0, &op1, EXPAND_NORMAL);
7566 if (op0 == const0_rtx)
7568 if (op1 == const0_rtx)
7573 expand_operands (treeop0, treeop1,
7574 subtarget, &op0, &op1, modifier);
7575 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7579 /* For initializers, we are allowed to return a MINUS of two
7580 symbolic constants. Here we handle all cases when both operands
7582 /* Handle difference of two symbolic constants,
7583 for the sake of an initializer. */
7584 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7585 && really_constant_p (treeop0)
7586 && really_constant_p (treeop1))
7588 expand_operands (treeop0, treeop1,
7589 NULL_RTX, &op0, &op1, modifier);
7591 /* If the last operand is a CONST_INT, use plus_constant of
7592 the negated constant. Else make the MINUS. */
7593 if (CONST_INT_P (op1))
7594 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7596 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7599 /* No sense saving up arithmetic to be done
7600 if it's all in the wrong mode to form part of an address.
7601 And force_operand won't know whether to sign-extend or
7603 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7604 || mode != ptr_mode)
7607 expand_operands (treeop0, treeop1,
7608 subtarget, &op0, &op1, modifier);
7610 /* Convert A - const to A + (-const). */
7611 if (CONST_INT_P (op1))
7613 op1 = negate_rtx (mode, op1);
7614 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7619 case WIDEN_MULT_PLUS_EXPR:
7620 case WIDEN_MULT_MINUS_EXPR:
7621 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7622 op2 = expand_normal (treeop2);
7623 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7627 case WIDEN_MULT_EXPR:
7628 /* If first operand is constant, swap them.
7629 Thus the following special case checks need only
7630 check the second operand. */
7631 if (TREE_CODE (treeop0) == INTEGER_CST)
7638 /* First, check if we have a multiplication of one signed and one
7639 unsigned operand. */
7640 if (TREE_CODE (treeop1) != INTEGER_CST
7641 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7642 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7644 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7645 this_optab = usmul_widen_optab;
7646 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7648 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7650 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7651 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7654 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7660 /* Check for a multiplication with matching signedness. */
7661 else if ((TREE_CODE (treeop1) == INTEGER_CST
7662 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7663 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7664 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7666 tree op0type = TREE_TYPE (treeop0);
7667 enum machine_mode innermode = TYPE_MODE (op0type);
7668 bool zextend_p = TYPE_UNSIGNED (op0type);
7669 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7670 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7672 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7673 && TREE_CODE (treeop0) != INTEGER_CST)
7675 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7677 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7679 temp = expand_widening_mult (mode, op0, op1, target,
7680 unsignedp, this_optab);
7681 return REDUCE_BIT_FIELD (temp);
7683 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7684 && innermode == word_mode)
7687 op0 = expand_normal (treeop0);
7688 if (TREE_CODE (treeop1) == INTEGER_CST)
7689 op1 = convert_modes (innermode, mode,
7690 expand_normal (treeop1), unsignedp);
7692 op1 = expand_normal (treeop1);
7693 temp = expand_binop (mode, other_optab, op0, op1, target,
7694 unsignedp, OPTAB_LIB_WIDEN);
7695 hipart = gen_highpart (innermode, temp);
7696 htem = expand_mult_highpart_adjust (innermode, hipart,
7700 emit_move_insn (hipart, htem);
7701 return REDUCE_BIT_FIELD (temp);
7705 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7706 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7707 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7708 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7712 optab opt = fma_optab;
7715 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7717 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7719 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7722 gcc_assert (fn != NULL_TREE);
7723 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7724 return expand_builtin (call_expr, target, subtarget, mode, false);
7727 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7728 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7733 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7736 op0 = expand_normal (gimple_assign_rhs1 (def0));
7737 op2 = expand_normal (gimple_assign_rhs1 (def2));
7740 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7743 op0 = expand_normal (gimple_assign_rhs1 (def0));
7746 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7749 op2 = expand_normal (gimple_assign_rhs1 (def2));
7753 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7755 op2 = expand_normal (treeop2);
7756 op1 = expand_normal (treeop1);
7758 return expand_ternary_op (TYPE_MODE (type), opt,
7759 op0, op1, op2, target, 0);
7763 /* If this is a fixed-point operation, then we cannot use the code
7764 below because "expand_mult" doesn't support sat/no-sat fixed-point
7766 if (ALL_FIXED_POINT_MODE_P (mode))
7769 /* If first operand is constant, swap them.
7770 Thus the following special case checks need only
7771 check the second operand. */
7772 if (TREE_CODE (treeop0) == INTEGER_CST)
7779 /* Attempt to return something suitable for generating an
7780 indexed address, for machines that support that. */
7782 if (modifier == EXPAND_SUM && mode == ptr_mode
7783 && host_integerp (treeop1, 0))
7785 tree exp1 = treeop1;
7787 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7791 op0 = force_operand (op0, NULL_RTX);
7793 op0 = copy_to_mode_reg (mode, op0);
7795 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7796 gen_int_mode (tree_low_cst (exp1, 0),
7797 TYPE_MODE (TREE_TYPE (exp1)))));
7800 if (modifier == EXPAND_STACK_PARM)
7803 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7804 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7806 case TRUNC_DIV_EXPR:
7807 case FLOOR_DIV_EXPR:
7809 case ROUND_DIV_EXPR:
7810 case EXACT_DIV_EXPR:
7811 /* If this is a fixed-point operation, then we cannot use the code
7812 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7814 if (ALL_FIXED_POINT_MODE_P (mode))
7817 if (modifier == EXPAND_STACK_PARM)
7819 /* Possible optimization: compute the dividend with EXPAND_SUM
7820 then if the divisor is constant can optimize the case
7821 where some terms of the dividend have coeffs divisible by it. */
7822 expand_operands (treeop0, treeop1,
7823 subtarget, &op0, &op1, EXPAND_NORMAL);
7824 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7829 case TRUNC_MOD_EXPR:
7830 case FLOOR_MOD_EXPR:
7832 case ROUND_MOD_EXPR:
7833 if (modifier == EXPAND_STACK_PARM)
7835 expand_operands (treeop0, treeop1,
7836 subtarget, &op0, &op1, EXPAND_NORMAL);
7837 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7839 case FIXED_CONVERT_EXPR:
7840 op0 = expand_normal (treeop0);
7841 if (target == 0 || modifier == EXPAND_STACK_PARM)
7842 target = gen_reg_rtx (mode);
7844 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7845 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7846 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7847 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7849 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7852 case FIX_TRUNC_EXPR:
7853 op0 = expand_normal (treeop0);
7854 if (target == 0 || modifier == EXPAND_STACK_PARM)
7855 target = gen_reg_rtx (mode);
7856 expand_fix (target, op0, unsignedp);
7860 op0 = expand_normal (treeop0);
7861 if (target == 0 || modifier == EXPAND_STACK_PARM)
7862 target = gen_reg_rtx (mode);
7863 /* expand_float can't figure out what to do if FROM has VOIDmode.
7864 So give it the correct mode. With -O, cse will optimize this. */
7865 if (GET_MODE (op0) == VOIDmode)
7866 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7868 expand_float (target, op0,
7869 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7873 op0 = expand_expr (treeop0, subtarget,
7874 VOIDmode, EXPAND_NORMAL);
7875 if (modifier == EXPAND_STACK_PARM)
7877 temp = expand_unop (mode,
7878 optab_for_tree_code (NEGATE_EXPR, type,
7882 return REDUCE_BIT_FIELD (temp);
7885 op0 = expand_expr (treeop0, subtarget,
7886 VOIDmode, EXPAND_NORMAL);
7887 if (modifier == EXPAND_STACK_PARM)
7890 /* ABS_EXPR is not valid for complex arguments. */
7891 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7892 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7894 /* Unsigned abs is simply the operand. Testing here means we don't
7895 risk generating incorrect code below. */
7896 if (TYPE_UNSIGNED (type))
7899 return expand_abs (mode, op0, target, unsignedp,
7900 safe_from_p (target, treeop0, 1));
7904 target = original_target;
7906 || modifier == EXPAND_STACK_PARM
7907 || (MEM_P (target) && MEM_VOLATILE_P (target))
7908 || GET_MODE (target) != mode
7910 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7911 target = gen_reg_rtx (mode);
7912 expand_operands (treeop0, treeop1,
7913 target, &op0, &op1, EXPAND_NORMAL);
7915 /* First try to do it with a special MIN or MAX instruction.
7916 If that does not win, use a conditional jump to select the proper
7918 this_optab = optab_for_tree_code (code, type, optab_default);
7919 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7924 /* At this point, a MEM target is no longer useful; we will get better
7927 if (! REG_P (target))
7928 target = gen_reg_rtx (mode);
7930 /* If op1 was placed in target, swap op0 and op1. */
7931 if (target != op0 && target == op1)
7938 /* We generate better code and avoid problems with op1 mentioning
7939 target by forcing op1 into a pseudo if it isn't a constant. */
7940 if (! CONSTANT_P (op1))
7941 op1 = force_reg (mode, op1);
7944 enum rtx_code comparison_code;
7947 if (code == MAX_EXPR)
7948 comparison_code = unsignedp ? GEU : GE;
7950 comparison_code = unsignedp ? LEU : LE;
7952 /* Canonicalize to comparisons against 0. */
7953 if (op1 == const1_rtx)
7955 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7956 or (a != 0 ? a : 1) for unsigned.
7957 For MIN we are safe converting (a <= 1 ? a : 1)
7958 into (a <= 0 ? a : 1) */
7959 cmpop1 = const0_rtx;
7960 if (code == MAX_EXPR)
7961 comparison_code = unsignedp ? NE : GT;
7963 if (op1 == constm1_rtx && !unsignedp)
7965 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7966 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7967 cmpop1 = const0_rtx;
7968 if (code == MIN_EXPR)
7969 comparison_code = LT;
7971 #ifdef HAVE_conditional_move
7972 /* Use a conditional move if possible. */
7973 if (can_conditionally_move_p (mode))
7977 /* ??? Same problem as in expmed.c: emit_conditional_move
7978 forces a stack adjustment via compare_from_rtx, and we
7979 lose the stack adjustment if the sequence we are about
7980 to create is discarded. */
7981 do_pending_stack_adjust ();
7985 /* Try to emit the conditional move. */
7986 insn = emit_conditional_move (target, comparison_code,
7991 /* If we could do the conditional move, emit the sequence,
7995 rtx seq = get_insns ();
8001 /* Otherwise discard the sequence and fall back to code with
8007 emit_move_insn (target, op0);
8009 temp = gen_label_rtx ();
8010 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8011 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8014 emit_move_insn (target, op1);
8019 op0 = expand_expr (treeop0, subtarget,
8020 VOIDmode, EXPAND_NORMAL);
8021 if (modifier == EXPAND_STACK_PARM)
8023 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8027 /* ??? Can optimize bitwise operations with one arg constant.
8028 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8029 and (a bitwise1 b) bitwise2 b (etc)
8030 but that is probably not worth while. */
8032 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8033 boolean values when we want in all cases to compute both of them. In
8034 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8035 as actual zero-or-1 values and then bitwise anding. In cases where
8036 there cannot be any side effects, better code would be made by
8037 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8038 how to recognize those cases. */
8040 case TRUTH_AND_EXPR:
8041 code = BIT_AND_EXPR;
8046 code = BIT_IOR_EXPR;
8050 case TRUTH_XOR_EXPR:
8051 code = BIT_XOR_EXPR;
8057 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8058 || (GET_MODE_PRECISION (TYPE_MODE (type))
8059 == TYPE_PRECISION (type)));
8064 /* If this is a fixed-point operation, then we cannot use the code
8065 below because "expand_shift" doesn't support sat/no-sat fixed-point
8067 if (ALL_FIXED_POINT_MODE_P (mode))
8070 if (! safe_from_p (subtarget, treeop1, 1))
8072 if (modifier == EXPAND_STACK_PARM)
8074 op0 = expand_expr (treeop0, subtarget,
8075 VOIDmode, EXPAND_NORMAL);
8076 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8078 if (code == LSHIFT_EXPR)
8079 temp = REDUCE_BIT_FIELD (temp);
8082 /* Could determine the answer when only additive constants differ. Also,
8083 the addition of one can be handled by changing the condition. */
8090 case UNORDERED_EXPR:
8098 temp = do_store_flag (ops,
8099 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8100 tmode != VOIDmode ? tmode : mode);
8104 /* Use a compare and a jump for BLKmode comparisons, or for function
8105 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8108 || modifier == EXPAND_STACK_PARM
8109 || ! safe_from_p (target, treeop0, 1)
8110 || ! safe_from_p (target, treeop1, 1)
8111 /* Make sure we don't have a hard reg (such as function's return
8112 value) live across basic blocks, if not optimizing. */
8113 || (!optimize && REG_P (target)
8114 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8115 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8117 emit_move_insn (target, const0_rtx);
8119 op1 = gen_label_rtx ();
8120 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8122 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8123 emit_move_insn (target, constm1_rtx);
8125 emit_move_insn (target, const1_rtx);
8130 case TRUTH_NOT_EXPR:
8131 if (modifier == EXPAND_STACK_PARM)
8133 op0 = expand_expr (treeop0, target,
8134 VOIDmode, EXPAND_NORMAL);
8135 /* The parser is careful to generate TRUTH_NOT_EXPR
8136 only with operands that are always zero or one. */
8137 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8138 target, 1, OPTAB_LIB_WIDEN);
8143 /* Get the rtx code of the operands. */
8144 op0 = expand_normal (treeop0);
8145 op1 = expand_normal (treeop1);
8148 target = gen_reg_rtx (TYPE_MODE (type));
8150 /* Move the real (op0) and imaginary (op1) parts to their location. */
8151 write_complex_part (target, op0, false);
8152 write_complex_part (target, op1, true);
8156 case WIDEN_SUM_EXPR:
8158 tree oprnd0 = treeop0;
8159 tree oprnd1 = treeop1;
8161 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8162 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8167 case REDUC_MAX_EXPR:
8168 case REDUC_MIN_EXPR:
8169 case REDUC_PLUS_EXPR:
8171 op0 = expand_normal (treeop0);
8172 this_optab = optab_for_tree_code (code, type, optab_default);
8173 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8178 case VEC_EXTRACT_EVEN_EXPR:
8179 case VEC_EXTRACT_ODD_EXPR:
8181 expand_operands (treeop0, treeop1,
8182 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8183 this_optab = optab_for_tree_code (code, type, optab_default);
8184 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8190 case VEC_INTERLEAVE_HIGH_EXPR:
8191 case VEC_INTERLEAVE_LOW_EXPR:
8193 expand_operands (treeop0, treeop1,
8194 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8195 this_optab = optab_for_tree_code (code, type, optab_default);
8196 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8202 case VEC_LSHIFT_EXPR:
8203 case VEC_RSHIFT_EXPR:
8205 target = expand_vec_shift_expr (ops, target);
8209 case VEC_UNPACK_HI_EXPR:
8210 case VEC_UNPACK_LO_EXPR:
8212 op0 = expand_normal (treeop0);
8213 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8219 case VEC_UNPACK_FLOAT_HI_EXPR:
8220 case VEC_UNPACK_FLOAT_LO_EXPR:
8222 op0 = expand_normal (treeop0);
8223 /* The signedness is determined from input operand. */
8224 temp = expand_widen_pattern_expr
8225 (ops, op0, NULL_RTX, NULL_RTX,
8226 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8232 case VEC_WIDEN_MULT_HI_EXPR:
8233 case VEC_WIDEN_MULT_LO_EXPR:
8235 tree oprnd0 = treeop0;
8236 tree oprnd1 = treeop1;
8238 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8239 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8241 gcc_assert (target);
8245 case VEC_PACK_TRUNC_EXPR:
8246 case VEC_PACK_SAT_EXPR:
8247 case VEC_PACK_FIX_TRUNC_EXPR:
8248 mode = TYPE_MODE (TREE_TYPE (treeop0));
8253 tree oprnd0 = treeop0;
8254 tree oprnd1 = treeop1;
8255 tree oprnd2 = treeop2;
8258 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8259 op2 = expand_normal (oprnd2);
8260 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8265 case REALIGN_LOAD_EXPR:
8267 tree oprnd0 = treeop0;
8268 tree oprnd1 = treeop1;
8269 tree oprnd2 = treeop2;
8272 this_optab = optab_for_tree_code (code, type, optab_default);
8273 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8274 op2 = expand_normal (oprnd2);
8275 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8285 /* Here to do an ordinary binary operator. */
8287 expand_operands (treeop0, treeop1,
8288 subtarget, &op0, &op1, EXPAND_NORMAL);
8290 this_optab = optab_for_tree_code (code, type, optab_default);
8292 if (modifier == EXPAND_STACK_PARM)
8294 temp = expand_binop (mode, this_optab, op0, op1, target,
8295 unsignedp, OPTAB_LIB_WIDEN);
8297 return REDUCE_BIT_FIELD (temp);
8299 #undef REDUCE_BIT_FIELD
8302 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8303 enum expand_modifier modifier, rtx *alt_rtl)
8305 rtx op0, op1, temp, decl_rtl;
8308 enum machine_mode mode;
8309 enum tree_code code = TREE_CODE (exp);
8310 rtx subtarget, original_target;
8313 bool reduce_bit_field;
8314 location_t loc = EXPR_LOCATION (exp);
8315 struct separate_ops ops;
8316 tree treeop0, treeop1, treeop2;
8317 tree ssa_name = NULL_TREE;
8320 type = TREE_TYPE (exp);
8321 mode = TYPE_MODE (type);
8322 unsignedp = TYPE_UNSIGNED (type);
8324 treeop0 = treeop1 = treeop2 = NULL_TREE;
8325 if (!VL_EXP_CLASS_P (exp))
8326 switch (TREE_CODE_LENGTH (code))
8329 case 3: treeop2 = TREE_OPERAND (exp, 2);
8330 case 2: treeop1 = TREE_OPERAND (exp, 1);
8331 case 1: treeop0 = TREE_OPERAND (exp, 0);
8341 ignore = (target == const0_rtx
8342 || ((CONVERT_EXPR_CODE_P (code)
8343 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8344 && TREE_CODE (type) == VOID_TYPE));
8346 /* An operation in what may be a bit-field type needs the
8347 result to be reduced to the precision of the bit-field type,
8348 which is narrower than that of the type's mode. */
8349 reduce_bit_field = (!ignore
8350 && INTEGRAL_TYPE_P (type)
8351 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8353 /* If we are going to ignore this result, we need only do something
8354 if there is a side-effect somewhere in the expression. If there
8355 is, short-circuit the most common cases here. Note that we must
8356 not call expand_expr with anything but const0_rtx in case this
8357 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8361 if (! TREE_SIDE_EFFECTS (exp))
8364 /* Ensure we reference a volatile object even if value is ignored, but
8365 don't do this if all we are doing is taking its address. */
8366 if (TREE_THIS_VOLATILE (exp)
8367 && TREE_CODE (exp) != FUNCTION_DECL
8368 && mode != VOIDmode && mode != BLKmode
8369 && modifier != EXPAND_CONST_ADDRESS)
8371 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8377 if (TREE_CODE_CLASS (code) == tcc_unary
8378 || code == COMPONENT_REF || code == INDIRECT_REF)
8379 return expand_expr (treeop0, const0_rtx, VOIDmode,
8382 else if (TREE_CODE_CLASS (code) == tcc_binary
8383 || TREE_CODE_CLASS (code) == tcc_comparison
8384 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8386 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8387 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8390 else if (code == BIT_FIELD_REF)
8392 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8393 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8394 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8401 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8404 /* Use subtarget as the target for operand 0 of a binary operation. */
8405 subtarget = get_subtarget (target);
8406 original_target = target;
8412 tree function = decl_function_context (exp);
8414 temp = label_rtx (exp);
8415 temp = gen_rtx_LABEL_REF (Pmode, temp);
8417 if (function != current_function_decl
8419 LABEL_REF_NONLOCAL_P (temp) = 1;
8421 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8426 /* ??? ivopts calls expander, without any preparation from
8427 out-of-ssa. So fake instructions as if this was an access to the
8428 base variable. This unnecessarily allocates a pseudo, see how we can
8429 reuse it, if partition base vars have it set already. */
8430 if (!currently_expanding_to_rtl)
8431 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8434 g = get_gimple_for_ssa_name (exp);
8435 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8437 && modifier == EXPAND_INITIALIZER
8438 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8439 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8440 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8441 g = SSA_NAME_DEF_STMT (exp);
8443 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8447 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8448 exp = SSA_NAME_VAR (ssa_name);
8449 goto expand_decl_rtl;
8453 /* If a static var's type was incomplete when the decl was written,
8454 but the type is complete now, lay out the decl now. */
8455 if (DECL_SIZE (exp) == 0
8456 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8457 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8458 layout_decl (exp, 0);
8460 /* ... fall through ... */
8464 decl_rtl = DECL_RTL (exp);
8466 gcc_assert (decl_rtl);
8467 decl_rtl = copy_rtx (decl_rtl);
8468 /* Record writes to register variables. */
8469 if (modifier == EXPAND_WRITE
8471 && HARD_REGISTER_P (decl_rtl))
8472 add_to_hard_reg_set (&crtl->asm_clobbers,
8473 GET_MODE (decl_rtl), REGNO (decl_rtl));
8475 /* Ensure variable marked as used even if it doesn't go through
8476 a parser. If it hasn't be used yet, write out an external
8478 if (! TREE_USED (exp))
8480 assemble_external (exp);
8481 TREE_USED (exp) = 1;
8484 /* Show we haven't gotten RTL for this yet. */
8487 /* Variables inherited from containing functions should have
8488 been lowered by this point. */
8489 context = decl_function_context (exp);
8490 gcc_assert (!context
8491 || context == current_function_decl
8492 || TREE_STATIC (exp)
8493 || DECL_EXTERNAL (exp)
8494 /* ??? C++ creates functions that are not TREE_STATIC. */
8495 || TREE_CODE (exp) == FUNCTION_DECL);
8497 /* This is the case of an array whose size is to be determined
8498 from its initializer, while the initializer is still being parsed.
8501 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8502 temp = validize_mem (decl_rtl);
8504 /* If DECL_RTL is memory, we are in the normal case and the
8505 address is not valid, get the address into a register. */
8507 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8510 *alt_rtl = decl_rtl;
8511 decl_rtl = use_anchored_address (decl_rtl);
8512 if (modifier != EXPAND_CONST_ADDRESS
8513 && modifier != EXPAND_SUM
8514 && !memory_address_addr_space_p (DECL_MODE (exp),
8516 MEM_ADDR_SPACE (decl_rtl)))
8517 temp = replace_equiv_address (decl_rtl,
8518 copy_rtx (XEXP (decl_rtl, 0)));
8521 /* If we got something, return it. But first, set the alignment
8522 if the address is a register. */
8525 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8526 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8531 /* If the mode of DECL_RTL does not match that of the decl, it
8532 must be a promoted value. We return a SUBREG of the wanted mode,
8533 but mark it so that we know that it was already extended. */
8534 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8536 enum machine_mode pmode;
8538 /* Get the signedness to be used for this variable. Ensure we get
8539 the same mode we got when the variable was declared. */
8540 if (code == SSA_NAME
8541 && (g = SSA_NAME_DEF_STMT (ssa_name))
8542 && gimple_code (g) == GIMPLE_CALL)
8544 gcc_assert (!gimple_call_internal_p (g));
8545 pmode = promote_function_mode (type, mode, &unsignedp,
8546 gimple_call_fntype (g),
8550 pmode = promote_decl_mode (exp, &unsignedp);
8551 gcc_assert (GET_MODE (decl_rtl) == pmode);
8553 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8554 SUBREG_PROMOTED_VAR_P (temp) = 1;
8555 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8562 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8563 TREE_INT_CST_HIGH (exp), mode);
8569 tree tmp = NULL_TREE;
8570 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8571 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8572 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8573 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8574 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8575 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8576 return const_vector_from_tree (exp);
8577 if (GET_MODE_CLASS (mode) == MODE_INT)
8579 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8581 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8584 tmp = build_constructor_from_list (type,
8585 TREE_VECTOR_CST_ELTS (exp));
8586 return expand_expr (tmp, ignore ? const0_rtx : target,
8591 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8594 /* If optimized, generate immediate CONST_DOUBLE
8595 which will be turned into memory by reload if necessary.
8597 We used to force a register so that loop.c could see it. But
8598 this does not allow gen_* patterns to perform optimizations with
8599 the constants. It also produces two insns in cases like "x = 1.0;".
8600 On most machines, floating-point constants are not permitted in
8601 many insns, so we'd end up copying it to a register in any case.
8603 Now, we do the copying in expand_binop, if appropriate. */
8604 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8605 TYPE_MODE (TREE_TYPE (exp)));
8608 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8609 TYPE_MODE (TREE_TYPE (exp)));
8612 /* Handle evaluating a complex constant in a CONCAT target. */
8613 if (original_target && GET_CODE (original_target) == CONCAT)
8615 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8618 rtarg = XEXP (original_target, 0);
8619 itarg = XEXP (original_target, 1);
8621 /* Move the real and imaginary parts separately. */
8622 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8623 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8626 emit_move_insn (rtarg, op0);
8628 emit_move_insn (itarg, op1);
8630 return original_target;
8633 /* ... fall through ... */
8636 temp = expand_expr_constant (exp, 1, modifier);
8638 /* temp contains a constant address.
8639 On RISC machines where a constant address isn't valid,
8640 make some insns to get that address into a register. */
8641 if (modifier != EXPAND_CONST_ADDRESS
8642 && modifier != EXPAND_INITIALIZER
8643 && modifier != EXPAND_SUM
8644 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8645 MEM_ADDR_SPACE (temp)))
8646 return replace_equiv_address (temp,
8647 copy_rtx (XEXP (temp, 0)));
8653 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8655 if (!SAVE_EXPR_RESOLVED_P (exp))
8657 /* We can indeed still hit this case, typically via builtin
8658 expanders calling save_expr immediately before expanding
8659 something. Assume this means that we only have to deal
8660 with non-BLKmode values. */
8661 gcc_assert (GET_MODE (ret) != BLKmode);
8663 val = build_decl (EXPR_LOCATION (exp),
8664 VAR_DECL, NULL, TREE_TYPE (exp));
8665 DECL_ARTIFICIAL (val) = 1;
8666 DECL_IGNORED_P (val) = 1;
8668 TREE_OPERAND (exp, 0) = treeop0;
8669 SAVE_EXPR_RESOLVED_P (exp) = 1;
8671 if (!CONSTANT_P (ret))
8672 ret = copy_to_reg (ret);
8673 SET_DECL_RTL (val, ret);
8681 /* If we don't need the result, just ensure we evaluate any
8685 unsigned HOST_WIDE_INT idx;
8688 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8689 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8694 return expand_constructor (exp, target, modifier, false);
8696 case TARGET_MEM_REF:
8698 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8699 struct mem_address addr;
8702 get_address_description (exp, &addr);
8703 op0 = addr_for_mem_ref (&addr, as, true);
8704 op0 = memory_address_addr_space (mode, op0, as);
8705 temp = gen_rtx_MEM (mode, op0);
8706 set_mem_attributes (temp, exp, 0);
8707 set_mem_addr_space (temp, as);
8708 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8709 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8711 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8712 /* If the target does not have special handling for unaligned
8713 loads of mode then it can use regular moves for them. */
8714 && ((icode = optab_handler (movmisalign_optab, mode))
8715 != CODE_FOR_nothing))
8719 /* We've already validated the memory, and we're creating a
8720 new pseudo destination. The predicates really can't fail. */
8721 reg = gen_reg_rtx (mode);
8723 /* Nor can the insn generator. */
8724 insn = GEN_FCN (icode) (reg, temp);
8725 gcc_assert (insn != NULL_RTX);
8736 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8737 enum machine_mode address_mode;
8738 tree base = TREE_OPERAND (exp, 0);
8741 /* Handle expansion of non-aliased memory with non-BLKmode. That
8742 might end up in a register. */
8743 if (TREE_CODE (base) == ADDR_EXPR)
8745 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8747 base = TREE_OPERAND (base, 0);
8751 base = get_addr_base_and_unit_offset (base, &off);
8755 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8756 decl we must use bitfield operations. */
8758 && !TREE_ADDRESSABLE (base)
8759 && DECL_MODE (base) != BLKmode
8760 && DECL_RTL_SET_P (base)
8761 && !MEM_P (DECL_RTL (base)))
8765 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8766 && (GET_MODE_BITSIZE (DECL_MODE (base))
8767 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8768 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8769 TREE_TYPE (exp), base),
8770 target, tmode, modifier);
8771 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8772 bftype = TREE_TYPE (base);
8773 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8774 bftype = TREE_TYPE (exp);
8775 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8777 TYPE_SIZE (TREE_TYPE (exp)),
8779 target, tmode, modifier);
8782 address_mode = targetm.addr_space.address_mode (as);
8783 base = TREE_OPERAND (exp, 0);
8784 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8786 tree mask = gimple_assign_rhs2 (def_stmt);
8787 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8788 gimple_assign_rhs1 (def_stmt), mask);
8789 TREE_OPERAND (exp, 0) = base;
8791 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8792 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8793 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8794 op0 = memory_address_addr_space (address_mode, op0, as);
8795 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8798 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8799 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8801 op0 = memory_address_addr_space (mode, op0, as);
8802 temp = gen_rtx_MEM (mode, op0);
8803 set_mem_attributes (temp, exp, 0);
8804 set_mem_addr_space (temp, as);
8805 if (TREE_THIS_VOLATILE (exp))
8806 MEM_VOLATILE_P (temp) = 1;
8808 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8809 /* If the target does not have special handling for unaligned
8810 loads of mode then it can use regular moves for them. */
8811 && ((icode = optab_handler (movmisalign_optab, mode))
8812 != CODE_FOR_nothing))
8816 /* We've already validated the memory, and we're creating a
8817 new pseudo destination. The predicates really can't fail. */
8818 reg = gen_reg_rtx (mode);
8820 /* Nor can the insn generator. */
8821 insn = GEN_FCN (icode) (reg, temp);
8832 tree array = treeop0;
8833 tree index = treeop1;
8835 /* Fold an expression like: "foo"[2].
8836 This is not done in fold so it won't happen inside &.
8837 Don't fold if this is for wide characters since it's too
8838 difficult to do correctly and this is a very rare case. */
8840 if (modifier != EXPAND_CONST_ADDRESS
8841 && modifier != EXPAND_INITIALIZER
8842 && modifier != EXPAND_MEMORY)
8844 tree t = fold_read_from_constant_string (exp);
8847 return expand_expr (t, target, tmode, modifier);
8850 /* If this is a constant index into a constant array,
8851 just get the value from the array. Handle both the cases when
8852 we have an explicit constructor and when our operand is a variable
8853 that was declared const. */
8855 if (modifier != EXPAND_CONST_ADDRESS
8856 && modifier != EXPAND_INITIALIZER
8857 && modifier != EXPAND_MEMORY
8858 && TREE_CODE (array) == CONSTRUCTOR
8859 && ! TREE_SIDE_EFFECTS (array)
8860 && TREE_CODE (index) == INTEGER_CST)
8862 unsigned HOST_WIDE_INT ix;
8865 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8867 if (tree_int_cst_equal (field, index))
8869 if (!TREE_SIDE_EFFECTS (value))
8870 return expand_expr (fold (value), target, tmode, modifier);
8875 else if (optimize >= 1
8876 && modifier != EXPAND_CONST_ADDRESS
8877 && modifier != EXPAND_INITIALIZER
8878 && modifier != EXPAND_MEMORY
8879 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8880 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8881 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8882 && const_value_known_p (array))
8884 if (TREE_CODE (index) == INTEGER_CST)
8886 tree init = DECL_INITIAL (array);
8888 if (TREE_CODE (init) == CONSTRUCTOR)
8890 unsigned HOST_WIDE_INT ix;
8893 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8895 if (tree_int_cst_equal (field, index))
8897 if (TREE_SIDE_EFFECTS (value))
8900 if (TREE_CODE (value) == CONSTRUCTOR)
8902 /* If VALUE is a CONSTRUCTOR, this
8903 optimization is only useful if
8904 this doesn't store the CONSTRUCTOR
8905 into memory. If it does, it is more
8906 efficient to just load the data from
8907 the array directly. */
8908 rtx ret = expand_constructor (value, target,
8910 if (ret == NULL_RTX)
8914 return expand_expr (fold (value), target, tmode,
8918 else if(TREE_CODE (init) == STRING_CST)
8920 tree index1 = index;
8921 tree low_bound = array_ref_low_bound (exp);
8922 index1 = fold_convert_loc (loc, sizetype,
8925 /* Optimize the special-case of a zero lower bound.
8927 We convert the low_bound to sizetype to avoid some problems
8928 with constant folding. (E.g. suppose the lower bound is 1,
8929 and its mode is QI. Without the conversion,l (ARRAY
8930 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8931 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8933 if (! integer_zerop (low_bound))
8934 index1 = size_diffop_loc (loc, index1,
8935 fold_convert_loc (loc, sizetype,
8938 if (0 > compare_tree_int (index1,
8939 TREE_STRING_LENGTH (init)))
8941 tree type = TREE_TYPE (TREE_TYPE (init));
8942 enum machine_mode mode = TYPE_MODE (type);
8944 if (GET_MODE_CLASS (mode) == MODE_INT
8945 && GET_MODE_SIZE (mode) == 1)
8946 return gen_int_mode (TREE_STRING_POINTER (init)
8947 [TREE_INT_CST_LOW (index1)],
8954 goto normal_inner_ref;
8957 /* If the operand is a CONSTRUCTOR, we can just extract the
8958 appropriate field if it is present. */
8959 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8961 unsigned HOST_WIDE_INT idx;
8964 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8966 if (field == treeop1
8967 /* We can normally use the value of the field in the
8968 CONSTRUCTOR. However, if this is a bitfield in
8969 an integral mode that we can fit in a HOST_WIDE_INT,
8970 we must mask only the number of bits in the bitfield,
8971 since this is done implicitly by the constructor. If
8972 the bitfield does not meet either of those conditions,
8973 we can't do this optimization. */
8974 && (! DECL_BIT_FIELD (field)
8975 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8976 && (GET_MODE_BITSIZE (DECL_MODE (field))
8977 <= HOST_BITS_PER_WIDE_INT))))
8979 if (DECL_BIT_FIELD (field)
8980 && modifier == EXPAND_STACK_PARM)
8982 op0 = expand_expr (value, target, tmode, modifier);
8983 if (DECL_BIT_FIELD (field))
8985 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8986 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8988 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8990 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8991 op0 = expand_and (imode, op0, op1, target);
8995 int count = GET_MODE_BITSIZE (imode) - bitsize;
8997 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8999 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9007 goto normal_inner_ref;
9010 case ARRAY_RANGE_REF:
9013 enum machine_mode mode1, mode2;
9014 HOST_WIDE_INT bitsize, bitpos;
9016 int volatilep = 0, must_force_mem;
9017 bool packedp = false;
9018 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9019 &mode1, &unsignedp, &volatilep, true);
9020 rtx orig_op0, memloc;
9022 /* If we got back the original object, something is wrong. Perhaps
9023 we are evaluating an expression too early. In any event, don't
9024 infinitely recurse. */
9025 gcc_assert (tem != exp);
9027 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9028 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9029 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9032 /* If TEM's type is a union of variable size, pass TARGET to the inner
9033 computation, since it will need a temporary and TARGET is known
9034 to have to do. This occurs in unchecked conversion in Ada. */
9037 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9038 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9040 && modifier != EXPAND_STACK_PARM
9041 ? target : NULL_RTX),
9043 (modifier == EXPAND_INITIALIZER
9044 || modifier == EXPAND_CONST_ADDRESS
9045 || modifier == EXPAND_STACK_PARM)
9046 ? modifier : EXPAND_NORMAL);
9049 /* If the bitfield is volatile, we want to access it in the
9050 field's mode, not the computed mode.
9051 If a MEM has VOIDmode (external with incomplete type),
9052 use BLKmode for it instead. */
9055 if (volatilep && flag_strict_volatile_bitfields > 0)
9056 op0 = adjust_address (op0, mode1, 0);
9057 else if (GET_MODE (op0) == VOIDmode)
9058 op0 = adjust_address (op0, BLKmode, 0);
9062 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9064 /* If we have either an offset, a BLKmode result, or a reference
9065 outside the underlying object, we must force it to memory.
9066 Such a case can occur in Ada if we have unchecked conversion
9067 of an expression from a scalar type to an aggregate type or
9068 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9069 passed a partially uninitialized object or a view-conversion
9070 to a larger size. */
9071 must_force_mem = (offset
9073 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9075 /* Handle CONCAT first. */
9076 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9079 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9082 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9085 op0 = XEXP (op0, 0);
9086 mode2 = GET_MODE (op0);
9088 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9089 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9093 op0 = XEXP (op0, 1);
9095 mode2 = GET_MODE (op0);
9098 /* Otherwise force into memory. */
9102 /* If this is a constant, put it in a register if it is a legitimate
9103 constant and we don't need a memory reference. */
9104 if (CONSTANT_P (op0)
9106 && targetm.legitimate_constant_p (mode2, op0)
9108 op0 = force_reg (mode2, op0);
9110 /* Otherwise, if this is a constant, try to force it to the constant
9111 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9112 is a legitimate constant. */
9113 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9114 op0 = validize_mem (memloc);
9116 /* Otherwise, if this is a constant or the object is not in memory
9117 and need be, put it there. */
9118 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9120 tree nt = build_qualified_type (TREE_TYPE (tem),
9121 (TYPE_QUALS (TREE_TYPE (tem))
9122 | TYPE_QUAL_CONST));
9123 memloc = assign_temp (nt, 1, 1, 1);
9124 emit_move_insn (memloc, op0);
9130 enum machine_mode address_mode;
9131 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9134 gcc_assert (MEM_P (op0));
9137 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9138 if (GET_MODE (offset_rtx) != address_mode)
9139 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9141 if (GET_MODE (op0) == BLKmode
9142 /* A constant address in OP0 can have VOIDmode, we must
9143 not try to call force_reg in that case. */
9144 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9146 && (bitpos % bitsize) == 0
9147 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9148 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9150 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9154 op0 = offset_address (op0, offset_rtx,
9155 highest_pow2_factor (offset));
9158 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9159 record its alignment as BIGGEST_ALIGNMENT. */
9160 if (MEM_P (op0) && bitpos == 0 && offset != 0
9161 && is_aligning_offset (offset, tem))
9162 set_mem_align (op0, BIGGEST_ALIGNMENT);
9164 /* Don't forget about volatility even if this is a bitfield. */
9165 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9167 if (op0 == orig_op0)
9168 op0 = copy_rtx (op0);
9170 MEM_VOLATILE_P (op0) = 1;
9173 /* In cases where an aligned union has an unaligned object
9174 as a field, we might be extracting a BLKmode value from
9175 an integer-mode (e.g., SImode) object. Handle this case
9176 by doing the extract into an object as wide as the field
9177 (which we know to be the width of a basic mode), then
9178 storing into memory, and changing the mode to BLKmode. */
9179 if (mode1 == VOIDmode
9180 || REG_P (op0) || GET_CODE (op0) == SUBREG
9181 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9182 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9184 && modifier != EXPAND_CONST_ADDRESS
9185 && modifier != EXPAND_INITIALIZER)
9186 /* If the field is volatile, we always want an aligned
9187 access. Only do this if the access is not already naturally
9188 aligned, otherwise "normal" (non-bitfield) volatile fields
9189 become non-addressable. */
9190 || (volatilep && flag_strict_volatile_bitfields > 0
9191 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9192 /* If the field isn't aligned enough to fetch as a memref,
9193 fetch it as a bit field. */
9194 || (mode1 != BLKmode
9195 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9196 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9198 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9199 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9200 && ((modifier == EXPAND_CONST_ADDRESS
9201 || modifier == EXPAND_INITIALIZER)
9203 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9204 || (bitpos % BITS_PER_UNIT != 0)))
9205 /* If the type and the field are a constant size and the
9206 size of the type isn't the same size as the bitfield,
9207 we must use bitfield operations. */
9209 && TYPE_SIZE (TREE_TYPE (exp))
9210 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9211 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9214 enum machine_mode ext_mode = mode;
9216 if (ext_mode == BLKmode
9217 && ! (target != 0 && MEM_P (op0)
9219 && bitpos % BITS_PER_UNIT == 0))
9220 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9222 if (ext_mode == BLKmode)
9225 target = assign_temp (type, 0, 1, 1);
9230 /* In this case, BITPOS must start at a byte boundary and
9231 TARGET, if specified, must be a MEM. */
9232 gcc_assert (MEM_P (op0)
9233 && (!target || MEM_P (target))
9234 && !(bitpos % BITS_PER_UNIT));
9236 emit_block_move (target,
9237 adjust_address (op0, VOIDmode,
9238 bitpos / BITS_PER_UNIT),
9239 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9241 (modifier == EXPAND_STACK_PARM
9242 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9247 op0 = validize_mem (op0);
9249 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9250 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9252 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9253 (modifier == EXPAND_STACK_PARM
9254 ? NULL_RTX : target),
9255 ext_mode, ext_mode);
9257 /* If the result is a record type and BITSIZE is narrower than
9258 the mode of OP0, an integral mode, and this is a big endian
9259 machine, we must put the field into the high-order bits. */
9260 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9261 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9262 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9263 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9264 GET_MODE_BITSIZE (GET_MODE (op0))
9267 /* If the result type is BLKmode, store the data into a temporary
9268 of the appropriate type, but with the mode corresponding to the
9269 mode for the data we have (op0's mode). It's tempting to make
9270 this a constant type, since we know it's only being stored once,
9271 but that can cause problems if we are taking the address of this
9272 COMPONENT_REF because the MEM of any reference via that address
9273 will have flags corresponding to the type, which will not
9274 necessarily be constant. */
9275 if (mode == BLKmode)
9277 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9280 /* If the reference doesn't use the alias set of its type,
9281 we cannot create the temporary using that type. */
9282 if (component_uses_parent_alias_set (exp))
9284 new_rtx = assign_stack_local (ext_mode, size, 0);
9285 set_mem_alias_set (new_rtx, get_alias_set (exp));
9288 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9290 emit_move_insn (new_rtx, op0);
9291 op0 = copy_rtx (new_rtx);
9292 PUT_MODE (op0, BLKmode);
9293 set_mem_attributes (op0, exp, 1);
9299 /* If the result is BLKmode, use that to access the object
9301 if (mode == BLKmode)
9304 /* Get a reference to just this component. */
9305 if (modifier == EXPAND_CONST_ADDRESS
9306 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9307 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9309 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9311 if (op0 == orig_op0)
9312 op0 = copy_rtx (op0);
9314 set_mem_attributes (op0, exp, 0);
9315 if (REG_P (XEXP (op0, 0)))
9316 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9318 MEM_VOLATILE_P (op0) |= volatilep;
9319 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9320 || modifier == EXPAND_CONST_ADDRESS
9321 || modifier == EXPAND_INITIALIZER)
9323 else if (target == 0)
9324 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9326 convert_move (target, op0, unsignedp);
9331 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9334 /* All valid uses of __builtin_va_arg_pack () are removed during
9336 if (CALL_EXPR_VA_ARG_PACK (exp))
9337 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9339 tree fndecl = get_callee_fndecl (exp), attr;
9342 && (attr = lookup_attribute ("error",
9343 DECL_ATTRIBUTES (fndecl))) != NULL)
9344 error ("%Kcall to %qs declared with attribute error: %s",
9345 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9346 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9348 && (attr = lookup_attribute ("warning",
9349 DECL_ATTRIBUTES (fndecl))) != NULL)
9350 warning_at (tree_nonartificial_location (exp),
9351 0, "%Kcall to %qs declared with attribute warning: %s",
9352 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9353 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9355 /* Check for a built-in function. */
9356 if (fndecl && DECL_BUILT_IN (fndecl))
9358 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9359 return expand_builtin (exp, target, subtarget, tmode, ignore);
9362 return expand_call (exp, target, ignore);
9364 case VIEW_CONVERT_EXPR:
9367 /* If we are converting to BLKmode, try to avoid an intermediate
9368 temporary by fetching an inner memory reference. */
9370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9371 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9372 && handled_component_p (treeop0))
9374 enum machine_mode mode1;
9375 HOST_WIDE_INT bitsize, bitpos;
9380 = get_inner_reference (treeop0, &bitsize, &bitpos,
9381 &offset, &mode1, &unsignedp, &volatilep,
9385 /* ??? We should work harder and deal with non-zero offsets. */
9387 && (bitpos % BITS_PER_UNIT) == 0
9389 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9391 /* See the normal_inner_ref case for the rationale. */
9394 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9395 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9397 && modifier != EXPAND_STACK_PARM
9398 ? target : NULL_RTX),
9400 (modifier == EXPAND_INITIALIZER
9401 || modifier == EXPAND_CONST_ADDRESS
9402 || modifier == EXPAND_STACK_PARM)
9403 ? modifier : EXPAND_NORMAL);
9405 if (MEM_P (orig_op0))
9409 /* Get a reference to just this component. */
9410 if (modifier == EXPAND_CONST_ADDRESS
9411 || modifier == EXPAND_SUM
9412 || modifier == EXPAND_INITIALIZER)
9413 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9415 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9417 if (op0 == orig_op0)
9418 op0 = copy_rtx (op0);
9420 set_mem_attributes (op0, treeop0, 0);
9421 if (REG_P (XEXP (op0, 0)))
9422 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9424 MEM_VOLATILE_P (op0) |= volatilep;
9430 op0 = expand_expr (treeop0,
9431 NULL_RTX, VOIDmode, modifier);
9433 /* If the input and output modes are both the same, we are done. */
9434 if (mode == GET_MODE (op0))
9436 /* If neither mode is BLKmode, and both modes are the same size
9437 then we can use gen_lowpart. */
9438 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9439 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9440 && !COMPLEX_MODE_P (GET_MODE (op0)))
9442 if (GET_CODE (op0) == SUBREG)
9443 op0 = force_reg (GET_MODE (op0), op0);
9444 temp = gen_lowpart_common (mode, op0);
9449 if (!REG_P (op0) && !MEM_P (op0))
9450 op0 = force_reg (GET_MODE (op0), op0);
9451 op0 = gen_lowpart (mode, op0);
9454 /* If both types are integral, convert from one mode to the other. */
9455 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9456 op0 = convert_modes (mode, GET_MODE (op0), op0,
9457 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9458 /* As a last resort, spill op0 to memory, and reload it in a
9460 else if (!MEM_P (op0))
9462 /* If the operand is not a MEM, force it into memory. Since we
9463 are going to be changing the mode of the MEM, don't call
9464 force_const_mem for constants because we don't allow pool
9465 constants to change mode. */
9466 tree inner_type = TREE_TYPE (treeop0);
9468 gcc_assert (!TREE_ADDRESSABLE (exp));
9470 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9472 = assign_stack_temp_for_type
9473 (TYPE_MODE (inner_type),
9474 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9476 emit_move_insn (target, op0);
9480 /* At this point, OP0 is in the correct mode. If the output type is
9481 such that the operand is known to be aligned, indicate that it is.
9482 Otherwise, we need only be concerned about alignment for non-BLKmode
9486 op0 = copy_rtx (op0);
9488 if (TYPE_ALIGN_OK (type))
9489 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9490 else if (STRICT_ALIGNMENT
9492 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9494 tree inner_type = TREE_TYPE (treeop0);
9495 HOST_WIDE_INT temp_size
9496 = MAX (int_size_in_bytes (inner_type),
9497 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9499 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9500 rtx new_with_op0_mode
9501 = adjust_address (new_rtx, GET_MODE (op0), 0);
9503 gcc_assert (!TREE_ADDRESSABLE (exp));
9505 if (GET_MODE (op0) == BLKmode)
9506 emit_block_move (new_with_op0_mode, op0,
9507 GEN_INT (GET_MODE_SIZE (mode)),
9508 (modifier == EXPAND_STACK_PARM
9509 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9511 emit_move_insn (new_with_op0_mode, op0);
9516 op0 = adjust_address (op0, mode, 0);
9521 /* Use a compare and a jump for BLKmode comparisons, or for function
9522 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9524 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9525 are occassionally created by folding during expansion. */
9526 case TRUTH_ANDIF_EXPR:
9527 case TRUTH_ORIF_EXPR:
9530 || modifier == EXPAND_STACK_PARM
9531 || ! safe_from_p (target, treeop0, 1)
9532 || ! safe_from_p (target, treeop1, 1)
9533 /* Make sure we don't have a hard reg (such as function's return
9534 value) live across basic blocks, if not optimizing. */
9535 || (!optimize && REG_P (target)
9536 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9537 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9540 emit_move_insn (target, const0_rtx);
9542 op1 = gen_label_rtx ();
9543 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9546 emit_move_insn (target, const1_rtx);
9549 return ignore ? const0_rtx : target;
9551 case STATEMENT_LIST:
9553 tree_stmt_iterator iter;
9555 gcc_assert (ignore);
9557 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9558 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9563 /* A COND_EXPR with its type being VOID_TYPE represents a
9564 conditional jump and is handled in
9565 expand_gimple_cond_expr. */
9566 gcc_assert (!VOID_TYPE_P (type));
9568 /* Note that COND_EXPRs whose type is a structure or union
9569 are required to be constructed to contain assignments of
9570 a temporary variable, so that we can evaluate them here
9571 for side effect only. If type is void, we must do likewise. */
9573 gcc_assert (!TREE_ADDRESSABLE (type)
9575 && TREE_TYPE (treeop1) != void_type_node
9576 && TREE_TYPE (treeop2) != void_type_node);
9578 /* If we are not to produce a result, we have no target. Otherwise,
9579 if a target was specified use it; it will not be used as an
9580 intermediate target unless it is safe. If no target, use a
9583 if (modifier != EXPAND_STACK_PARM
9585 && safe_from_p (original_target, treeop0, 1)
9586 && GET_MODE (original_target) == mode
9587 #ifdef HAVE_conditional_move
9588 && (! can_conditionally_move_p (mode)
9589 || REG_P (original_target))
9591 && !MEM_P (original_target))
9592 temp = original_target;
9594 temp = assign_temp (type, 0, 0, 1);
9596 do_pending_stack_adjust ();
9598 op0 = gen_label_rtx ();
9599 op1 = gen_label_rtx ();
9600 jumpifnot (treeop0, op0, -1);
9601 store_expr (treeop1, temp,
9602 modifier == EXPAND_STACK_PARM,
9605 emit_jump_insn (gen_jump (op1));
9608 store_expr (treeop2, temp,
9609 modifier == EXPAND_STACK_PARM,
9617 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9624 gcc_assert (ignore);
9626 /* Check for |= or &= of a bitfield of size one into another bitfield
9627 of size 1. In this case, (unless we need the result of the
9628 assignment) we can do this more efficiently with a
9629 test followed by an assignment, if necessary.
9631 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9632 things change so we do, this code should be enhanced to
9634 if (TREE_CODE (lhs) == COMPONENT_REF
9635 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9636 || TREE_CODE (rhs) == BIT_AND_EXPR)
9637 && TREE_OPERAND (rhs, 0) == lhs
9638 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9639 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9640 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9642 rtx label = gen_label_rtx ();
9643 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9644 do_jump (TREE_OPERAND (rhs, 1),
9646 value ? 0 : label, -1);
9647 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9648 MOVE_NONTEMPORAL (exp));
9649 do_pending_stack_adjust ();
9654 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9659 return expand_expr_addr_expr (exp, target, tmode, modifier);
9662 op0 = expand_normal (treeop0);
9663 return read_complex_part (op0, false);
9666 op0 = expand_normal (treeop0);
9667 return read_complex_part (op0, true);
9674 /* Expanded in cfgexpand.c. */
9677 case TRY_CATCH_EXPR:
9679 case EH_FILTER_EXPR:
9680 case TRY_FINALLY_EXPR:
9681 /* Lowered by tree-eh.c. */
9684 case WITH_CLEANUP_EXPR:
9685 case CLEANUP_POINT_EXPR:
9687 case CASE_LABEL_EXPR:
9693 case PREINCREMENT_EXPR:
9694 case PREDECREMENT_EXPR:
9695 case POSTINCREMENT_EXPR:
9696 case POSTDECREMENT_EXPR:
9699 /* Lowered by gimplify.c. */
9703 /* Function descriptors are not valid except for as
9704 initialization constants, and should not be expanded. */
9707 case WITH_SIZE_EXPR:
9708 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9709 have pulled out the size to use in whatever context it needed. */
9710 return expand_expr_real (treeop0, original_target, tmode,
9713 case COMPOUND_LITERAL_EXPR:
9715 /* Initialize the anonymous variable declared in the compound
9716 literal, then return the variable. */
9717 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9719 /* Create RTL for this variable. */
9720 if (!DECL_RTL_SET_P (decl))
9722 if (DECL_HARD_REGISTER (decl))
9723 /* The user specified an assembler name for this variable.
9725 rest_of_decl_compilation (decl, 0, 0);
9730 return expand_expr_real (decl, original_target, tmode,
9735 return expand_expr_real_2 (&ops, target, tmode, modifier);
9739 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9740 signedness of TYPE), possibly returning the result in TARGET. */
9742 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9744 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9745 if (target && GET_MODE (target) != GET_MODE (exp))
9747 /* For constant values, reduce using build_int_cst_type. */
9748 if (CONST_INT_P (exp))
9750 HOST_WIDE_INT value = INTVAL (exp);
9751 tree t = build_int_cst_type (type, value);
9752 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9754 else if (TYPE_UNSIGNED (type))
9756 rtx mask = immed_double_int_const (double_int_mask (prec),
9758 return expand_and (GET_MODE (exp), exp, mask, target);
9762 int count = GET_MODE_BITSIZE (GET_MODE (exp)) - prec;
9763 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
9764 exp, count, target, 0);
9765 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
9766 exp, count, target, 0);
9770 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9771 when applied to the address of EXP produces an address known to be
9772 aligned more than BIGGEST_ALIGNMENT. */
9775 is_aligning_offset (const_tree offset, const_tree exp)
9777 /* Strip off any conversions. */
9778 while (CONVERT_EXPR_P (offset))
9779 offset = TREE_OPERAND (offset, 0);
9781 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9782 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9783 if (TREE_CODE (offset) != BIT_AND_EXPR
9784 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9785 || compare_tree_int (TREE_OPERAND (offset, 1),
9786 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9787 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9790 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9791 It must be NEGATE_EXPR. Then strip any more conversions. */
9792 offset = TREE_OPERAND (offset, 0);
9793 while (CONVERT_EXPR_P (offset))
9794 offset = TREE_OPERAND (offset, 0);
9796 if (TREE_CODE (offset) != NEGATE_EXPR)
9799 offset = TREE_OPERAND (offset, 0);
9800 while (CONVERT_EXPR_P (offset))
9801 offset = TREE_OPERAND (offset, 0);
9803 /* This must now be the address of EXP. */
9804 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9807 /* Return the tree node if an ARG corresponds to a string constant or zero
9808 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9809 in bytes within the string that ARG is accessing. The type of the
9810 offset will be `sizetype'. */
9813 string_constant (tree arg, tree *ptr_offset)
9815 tree array, offset, lower_bound;
9818 if (TREE_CODE (arg) == ADDR_EXPR)
9820 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9822 *ptr_offset = size_zero_node;
9823 return TREE_OPERAND (arg, 0);
9825 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9827 array = TREE_OPERAND (arg, 0);
9828 offset = size_zero_node;
9830 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9832 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9833 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9834 if (TREE_CODE (array) != STRING_CST
9835 && TREE_CODE (array) != VAR_DECL)
9838 /* Check if the array has a nonzero lower bound. */
9839 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9840 if (!integer_zerop (lower_bound))
9842 /* If the offset and base aren't both constants, return 0. */
9843 if (TREE_CODE (lower_bound) != INTEGER_CST)
9845 if (TREE_CODE (offset) != INTEGER_CST)
9847 /* Adjust offset by the lower bound. */
9848 offset = size_diffop (fold_convert (sizetype, offset),
9849 fold_convert (sizetype, lower_bound));
9855 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9857 tree arg0 = TREE_OPERAND (arg, 0);
9858 tree arg1 = TREE_OPERAND (arg, 1);
9863 if (TREE_CODE (arg0) == ADDR_EXPR
9864 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9865 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9867 array = TREE_OPERAND (arg0, 0);
9870 else if (TREE_CODE (arg1) == ADDR_EXPR
9871 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9872 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9874 array = TREE_OPERAND (arg1, 0);
9883 if (TREE_CODE (array) == STRING_CST)
9885 *ptr_offset = fold_convert (sizetype, offset);
9888 else if (TREE_CODE (array) == VAR_DECL
9889 || TREE_CODE (array) == CONST_DECL)
9893 /* Variables initialized to string literals can be handled too. */
9894 if (!const_value_known_p (array)
9895 || !DECL_INITIAL (array)
9896 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9899 /* Avoid const char foo[4] = "abcde"; */
9900 if (DECL_SIZE_UNIT (array) == NULL_TREE
9901 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9902 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9903 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9906 /* If variable is bigger than the string literal, OFFSET must be constant
9907 and inside of the bounds of the string literal. */
9908 offset = fold_convert (sizetype, offset);
9909 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9910 && (! host_integerp (offset, 1)
9911 || compare_tree_int (offset, length) >= 0))
9914 *ptr_offset = offset;
9915 return DECL_INITIAL (array);
9921 /* Generate code to calculate OPS, and exploded expression
9922 using a store-flag instruction and return an rtx for the result.
9923 OPS reflects a comparison.
9925 If TARGET is nonzero, store the result there if convenient.
9927 Return zero if there is no suitable set-flag instruction
9928 available on this machine.
9930 Once expand_expr has been called on the arguments of the comparison,
9931 we are committed to doing the store flag, since it is not safe to
9932 re-evaluate the expression. We emit the store-flag insn by calling
9933 emit_store_flag, but only expand the arguments if we have a reason
9934 to believe that emit_store_flag will be successful. If we think that
9935 it will, but it isn't, we have to simulate the store-flag with a
9936 set/jump/set sequence. */
9939 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9942 tree arg0, arg1, type;
9944 enum machine_mode operand_mode;
9947 rtx subtarget = target;
9948 location_t loc = ops->location;
9953 /* Don't crash if the comparison was erroneous. */
9954 if (arg0 == error_mark_node || arg1 == error_mark_node)
9957 type = TREE_TYPE (arg0);
9958 operand_mode = TYPE_MODE (type);
9959 unsignedp = TYPE_UNSIGNED (type);
9961 /* We won't bother with BLKmode store-flag operations because it would mean
9962 passing a lot of information to emit_store_flag. */
9963 if (operand_mode == BLKmode)
9966 /* We won't bother with store-flag operations involving function pointers
9967 when function pointers must be canonicalized before comparisons. */
9968 #ifdef HAVE_canonicalize_funcptr_for_compare
9969 if (HAVE_canonicalize_funcptr_for_compare
9970 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9973 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9974 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9975 == FUNCTION_TYPE))))
9982 /* Get the rtx comparison code to use. We know that EXP is a comparison
9983 operation of some type. Some comparisons against 1 and -1 can be
9984 converted to comparisons with zero. Do so here so that the tests
9985 below will be aware that we have a comparison with zero. These
9986 tests will not catch constants in the first operand, but constants
9987 are rarely passed as the first operand. */
9998 if (integer_onep (arg1))
9999 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10001 code = unsignedp ? LTU : LT;
10004 if (! unsignedp && integer_all_onesp (arg1))
10005 arg1 = integer_zero_node, code = LT;
10007 code = unsignedp ? LEU : LE;
10010 if (! unsignedp && integer_all_onesp (arg1))
10011 arg1 = integer_zero_node, code = GE;
10013 code = unsignedp ? GTU : GT;
10016 if (integer_onep (arg1))
10017 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10019 code = unsignedp ? GEU : GE;
10022 case UNORDERED_EXPR:
10048 gcc_unreachable ();
10051 /* Put a constant second. */
10052 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10053 || TREE_CODE (arg0) == FIXED_CST)
10055 tem = arg0; arg0 = arg1; arg1 = tem;
10056 code = swap_condition (code);
10059 /* If this is an equality or inequality test of a single bit, we can
10060 do this by shifting the bit being tested to the low-order bit and
10061 masking the result with the constant 1. If the condition was EQ,
10062 we xor it with 1. This does not require an scc insn and is faster
10063 than an scc insn even if we have it.
10065 The code to make this transformation was moved into fold_single_bit_test,
10066 so we just call into the folder and expand its result. */
10068 if ((code == NE || code == EQ)
10069 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10070 && integer_pow2p (TREE_OPERAND (arg0, 1))
10071 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10073 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10074 return expand_expr (fold_single_bit_test (loc,
10075 code == NE ? NE_EXPR : EQ_EXPR,
10077 target, VOIDmode, EXPAND_NORMAL);
10080 if (! get_subtarget (target)
10081 || GET_MODE (subtarget) != operand_mode)
10084 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10087 target = gen_reg_rtx (mode);
10089 /* Try a cstore if possible. */
10090 return emit_store_flag_force (target, code, op0, op1,
10091 operand_mode, unsignedp,
10092 (TYPE_PRECISION (ops->type) == 1
10093 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10097 /* Stubs in case we haven't got a casesi insn. */
10098 #ifndef HAVE_casesi
10099 # define HAVE_casesi 0
10100 # define gen_casesi(a, b, c, d, e) (0)
10101 # define CODE_FOR_casesi CODE_FOR_nothing
10104 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10105 0 otherwise (i.e. if there is no casesi instruction). */
10107 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10108 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10109 rtx fallback_label ATTRIBUTE_UNUSED)
10111 struct expand_operand ops[5];
10112 enum machine_mode index_mode = SImode;
10113 int index_bits = GET_MODE_BITSIZE (index_mode);
10114 rtx op1, op2, index;
10119 /* Convert the index to SImode. */
10120 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10122 enum machine_mode omode = TYPE_MODE (index_type);
10123 rtx rangertx = expand_normal (range);
10125 /* We must handle the endpoints in the original mode. */
10126 index_expr = build2 (MINUS_EXPR, index_type,
10127 index_expr, minval);
10128 minval = integer_zero_node;
10129 index = expand_normal (index_expr);
10131 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10132 omode, 1, default_label);
10133 /* Now we can safely truncate. */
10134 index = convert_to_mode (index_mode, index, 0);
10138 if (TYPE_MODE (index_type) != index_mode)
10140 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10141 index_expr = fold_convert (index_type, index_expr);
10144 index = expand_normal (index_expr);
10147 do_pending_stack_adjust ();
10149 op1 = expand_normal (minval);
10150 op2 = expand_normal (range);
10152 create_input_operand (&ops[0], index, index_mode);
10153 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10154 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10155 create_fixed_operand (&ops[3], table_label);
10156 create_fixed_operand (&ops[4], (default_label
10158 : fallback_label));
10159 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10163 /* Attempt to generate a tablejump instruction; same concept. */
10164 #ifndef HAVE_tablejump
10165 #define HAVE_tablejump 0
10166 #define gen_tablejump(x, y) (0)
10169 /* Subroutine of the next function.
10171 INDEX is the value being switched on, with the lowest value
10172 in the table already subtracted.
10173 MODE is its expected mode (needed if INDEX is constant).
10174 RANGE is the length of the jump table.
10175 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10177 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10178 index value is out of range. */
10181 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10186 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10187 cfun->cfg->max_jumptable_ents = INTVAL (range);
10189 /* Do an unsigned comparison (in the proper mode) between the index
10190 expression and the value which represents the length of the range.
10191 Since we just finished subtracting the lower bound of the range
10192 from the index expression, this comparison allows us to simultaneously
10193 check that the original index expression value is both greater than
10194 or equal to the minimum value of the range and less than or equal to
10195 the maximum value of the range. */
10198 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10201 /* If index is in range, it must fit in Pmode.
10202 Convert to Pmode so we can index with it. */
10204 index = convert_to_mode (Pmode, index, 1);
10206 /* Don't let a MEM slip through, because then INDEX that comes
10207 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10208 and break_out_memory_refs will go to work on it and mess it up. */
10209 #ifdef PIC_CASE_VECTOR_ADDRESS
10210 if (flag_pic && !REG_P (index))
10211 index = copy_to_mode_reg (Pmode, index);
10214 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10215 GET_MODE_SIZE, because this indicates how large insns are. The other
10216 uses should all be Pmode, because they are addresses. This code
10217 could fail if addresses and insns are not the same size. */
10218 index = gen_rtx_PLUS (Pmode,
10219 gen_rtx_MULT (Pmode, index,
10220 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10221 gen_rtx_LABEL_REF (Pmode, table_label));
10222 #ifdef PIC_CASE_VECTOR_ADDRESS
10224 index = PIC_CASE_VECTOR_ADDRESS (index);
10227 index = memory_address (CASE_VECTOR_MODE, index);
10228 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10229 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10230 convert_move (temp, vector, 0);
10232 emit_jump_insn (gen_tablejump (temp, table_label));
10234 /* If we are generating PIC code or if the table is PC-relative, the
10235 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10236 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10241 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10242 rtx table_label, rtx default_label)
10246 if (! HAVE_tablejump)
10249 index_expr = fold_build2 (MINUS_EXPR, index_type,
10250 fold_convert (index_type, index_expr),
10251 fold_convert (index_type, minval));
10252 index = expand_normal (index_expr);
10253 do_pending_stack_adjust ();
10255 do_tablejump (index, TYPE_MODE (index_type),
10256 convert_modes (TYPE_MODE (index_type),
10257 TYPE_MODE (TREE_TYPE (range)),
10258 expand_normal (range),
10259 TYPE_UNSIGNED (TREE_TYPE (range))),
10260 table_label, default_label);
10264 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10266 const_vector_from_tree (tree exp)
10271 enum machine_mode inner, mode;
10273 mode = TYPE_MODE (TREE_TYPE (exp));
10275 if (initializer_zerop (exp))
10276 return CONST0_RTX (mode);
10278 units = GET_MODE_NUNITS (mode);
10279 inner = GET_MODE_INNER (mode);
10281 v = rtvec_alloc (units);
10283 link = TREE_VECTOR_CST_ELTS (exp);
10284 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10286 elt = TREE_VALUE (link);
10288 if (TREE_CODE (elt) == REAL_CST)
10289 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10291 else if (TREE_CODE (elt) == FIXED_CST)
10292 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10295 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10299 /* Initialize remaining elements to 0. */
10300 for (; i < units; ++i)
10301 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10303 return gen_rtx_CONST_VECTOR (mode, v);
10306 /* Build a decl for a personality function given a language prefix. */
10309 build_personality_function (const char *lang)
10311 const char *unwind_and_version;
10315 switch (targetm_common.except_unwind_info (&global_options))
10320 unwind_and_version = "_sj0";
10324 unwind_and_version = "_v0";
10327 gcc_unreachable ();
10330 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10332 type = build_function_type_list (integer_type_node, integer_type_node,
10333 long_long_unsigned_type_node,
10334 ptr_type_node, ptr_type_node, NULL_TREE);
10335 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10336 get_identifier (name), type);
10337 DECL_ARTIFICIAL (decl) = 1;
10338 DECL_EXTERNAL (decl) = 1;
10339 TREE_PUBLIC (decl) = 1;
10341 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10342 are the flags assigned by targetm.encode_section_info. */
10343 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10348 /* Extracts the personality function of DECL and returns the corresponding
10352 get_personality_function (tree decl)
10354 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10355 enum eh_personality_kind pk;
10357 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10358 if (pk == eh_personality_none)
10362 && pk == eh_personality_any)
10363 personality = lang_hooks.eh_personality ();
10365 if (pk == eh_personality_lang)
10366 gcc_assert (personality != NULL_TREE);
10368 return XEXP (DECL_RTL (personality), 0);
10371 #include "gt-expr.h"