1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
52 #include "common/common-target.h"
55 #include "diagnostic.h"
56 #include "ssaexpand.h"
57 #include "target-globals.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* This structure is used by move_by_pieces to describe the move to
94 struct move_by_pieces_d
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces_d
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* This macro is used to determine whether move_by_pieces should be called
164 to perform a structure copy. */
165 #ifndef MOVE_BY_PIECES_P
166 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
167 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
168 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
171 /* This macro is used to determine whether clear_by_pieces should be
172 called to clear storage. */
173 #ifndef CLEAR_BY_PIECES_P
174 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
175 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
176 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
179 /* This macro is used to determine whether store_by_pieces should be
180 called to "memset" storage with byte values other than zero. */
181 #ifndef SET_BY_PIECES_P
182 #define SET_BY_PIECES_P(SIZE, ALIGN) \
183 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
184 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
187 /* This macro is used to determine whether store_by_pieces should be
188 called to "memcpy" storage when the source is a constant string. */
189 #ifndef STORE_BY_PIECES_P
190 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
192 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
195 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
197 #ifndef SLOW_UNALIGNED_ACCESS
198 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
201 /* This is run to set up which modes can be used
202 directly in memory and to initialize the block move optab. It is run
203 at the beginning of compilation and when the target is reinitialized. */
206 init_expr_target (void)
209 enum machine_mode mode;
214 /* Try indexing by frame ptr and try by stack ptr.
215 It is known that on the Convex the stack ptr isn't a valid index.
216 With luck, one or the other is valid on any machine. */
217 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
218 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
220 /* A scratch register we can modify in-place below to avoid
221 useless RTL allocations. */
222 reg = gen_rtx_REG (VOIDmode, -1);
224 insn = rtx_alloc (INSN);
225 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
226 PATTERN (insn) = pat;
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
233 direct_load[(int) mode] = direct_store[(int) mode] = 0;
234 PUT_MODE (mem, mode);
235 PUT_MODE (mem1, mode);
236 PUT_MODE (reg, mode);
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
246 if (! HARD_REGNO_MODE_OK (regno, mode))
249 SET_REGNO (reg, regno);
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
273 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
276 mode = GET_MODE_WIDER_MODE (mode))
278 enum machine_mode srcmode;
279 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
280 srcmode = GET_MODE_WIDER_MODE (srcmode))
284 ic = can_extend_p (mode, srcmode, 0);
285 if (ic == CODE_FOR_nothing)
288 PUT_MODE (mem, srcmode);
290 if (insn_operand_matches (ic, 1, mem))
291 float_extend_from_mem[mode][srcmode] = true;
296 /* This is run at the start of compiling a function. */
301 memset (&crtl->expr, 0, sizeof (crtl->expr));
304 /* Copy data from FROM to TO, where the machine modes are not the same.
305 Both modes may be integer, or both may be floating, or both may be
307 UNSIGNEDP should be nonzero if FROM is an unsigned type.
308 This causes zero-extension instead of sign-extension. */
311 convert_move (rtx to, rtx from, int unsignedp)
313 enum machine_mode to_mode = GET_MODE (to);
314 enum machine_mode from_mode = GET_MODE (from);
315 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
316 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
320 /* rtx code for making an equivalent value. */
321 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
322 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
325 gcc_assert (to_real == from_real);
326 gcc_assert (to_mode != BLKmode);
327 gcc_assert (from_mode != BLKmode);
329 /* If the source and destination are already the same, then there's
334 /* If FROM is a SUBREG that indicates that we have already done at least
335 the required extension, strip it. We don't handle such SUBREGs as
338 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
339 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
340 >= GET_MODE_SIZE (to_mode))
341 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
342 from = gen_lowpart (to_mode, from), from_mode = to_mode;
344 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
346 if (to_mode == from_mode
347 || (from_mode == VOIDmode && CONSTANT_P (from)))
349 emit_move_insn (to, from);
353 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
355 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
357 if (VECTOR_MODE_P (to_mode))
358 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
360 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
362 emit_move_insn (to, from);
366 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
368 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
369 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
378 gcc_assert ((GET_MODE_PRECISION (from_mode)
379 != GET_MODE_PRECISION (to_mode))
380 || (DECIMAL_FLOAT_MODE_P (from_mode)
381 != DECIMAL_FLOAT_MODE_P (to_mode)));
383 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
384 /* Conversion between decimal float and binary float, same size. */
385 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
386 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
391 /* Try converting directly if the insn is supported. */
393 code = convert_optab_handler (tab, to_mode, from_mode);
394 if (code != CODE_FOR_nothing)
396 emit_unop_insn (code, to, from,
397 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
401 /* Otherwise use a libcall. */
402 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
404 /* Is this conversion implemented yet? */
405 gcc_assert (libcall);
408 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
410 insns = get_insns ();
412 emit_libcall_block (insns, to, value,
413 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
415 : gen_rtx_FLOAT_EXTEND (to_mode, from));
419 /* Handle pointer conversion. */ /* SPEE 900220. */
420 /* Targets are expected to provide conversion insns between PxImode and
421 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
422 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
424 enum machine_mode full_mode
425 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
427 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
428 != CODE_FOR_nothing);
430 if (full_mode != from_mode)
431 from = convert_to_mode (full_mode, from, unsignedp);
432 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
436 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
439 enum machine_mode full_mode
440 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
442 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
443 != CODE_FOR_nothing);
445 if (to_mode == full_mode)
447 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
453 new_from = gen_reg_rtx (full_mode);
454 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
455 new_from, from, UNKNOWN);
457 /* else proceed to integer conversions below. */
458 from_mode = full_mode;
462 /* Make sure both are fixed-point modes or both are not. */
463 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
464 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
465 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
467 /* If we widen from_mode to to_mode and they are in the same class,
468 we won't saturate the result.
469 Otherwise, always saturate the result to play safe. */
470 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
471 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
472 expand_fixed_convert (to, from, 0, 0);
474 expand_fixed_convert (to, from, 0, 1);
478 /* Now both modes are integers. */
480 /* Handle expanding beyond a word. */
481 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
482 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
489 enum machine_mode lowpart_mode;
490 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
492 /* Try converting directly if the insn is supported. */
493 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
496 /* If FROM is a SUBREG, put it into a register. Do this
497 so that we always generate the same set of insns for
498 better cse'ing; if an intermediate assignment occurred,
499 we won't be doing the operation directly on the SUBREG. */
500 if (optimize > 0 && GET_CODE (from) == SUBREG)
501 from = force_reg (from_mode, from);
502 emit_unop_insn (code, to, from, equiv_code);
505 /* Next, try converting via full word. */
506 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
507 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
508 != CODE_FOR_nothing))
510 rtx word_to = gen_reg_rtx (word_mode);
513 if (reg_overlap_mentioned_p (to, from))
514 from = force_reg (from_mode, from);
517 convert_move (word_to, from, unsignedp);
518 emit_unop_insn (code, to, word_to, equiv_code);
522 /* No special multiword conversion insn; do it by hand. */
525 /* Since we will turn this into a no conflict block, we must ensure
526 that the source does not overlap the target. */
528 if (reg_overlap_mentioned_p (to, from))
529 from = force_reg (from_mode, from);
531 /* Get a copy of FROM widened to a word, if necessary. */
532 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
533 lowpart_mode = word_mode;
535 lowpart_mode = from_mode;
537 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
539 lowpart = gen_lowpart (lowpart_mode, to);
540 emit_move_insn (lowpart, lowfrom);
542 /* Compute the value to put in each remaining word. */
544 fill_value = const0_rtx;
546 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
547 LT, lowfrom, const0_rtx,
550 /* Fill the remaining words. */
551 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
553 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
554 rtx subword = operand_subword (to, index, 1, to_mode);
556 gcc_assert (subword);
558 if (fill_value != subword)
559 emit_move_insn (subword, fill_value);
562 insns = get_insns ();
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
613 emit_unop_insn (code, to, from, equiv_code);
618 enum machine_mode intermediate;
622 /* Search for a mode to convert via. */
623 for (intermediate = from_mode; intermediate != VOIDmode;
624 intermediate = GET_MODE_WIDER_MODE (intermediate))
625 if (((can_extend_p (to_mode, intermediate, unsignedp)
627 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
628 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
629 GET_MODE_BITSIZE (intermediate))))
630 && (can_extend_p (intermediate, from_mode, unsignedp)
631 != CODE_FOR_nothing))
633 convert_move (to, convert_to_mode (intermediate, from,
634 unsignedp), unsignedp);
638 /* No suitable intermediate mode.
639 Generate what we need with shifts. */
640 shift_amount = (GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
801 enum machine_mode tmode;
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
808 enum machine_mode tmode, xmode;
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
829 enum machine_mode tmode, mode = VOIDmode;
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 to_addr_mode = VOIDmode;
901 #ifdef STACK_GROWS_DOWNWARD
907 data.to_addr = to_addr;
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
989 gcc_assert (!data.reverse);
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1008 to1 = adjust_address (data.to, QImode, data.offset);
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1023 unsigned HOST_WIDE_INT n_insns = 0;
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1027 while (max_size > 1)
1029 enum machine_mode mode;
1030 enum insn_code icode;
1032 mode = widest_int_mode_for_size (max_size);
1034 if (mode == VOIDmode)
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041 max_size = GET_MODE_SIZE (mode);
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1059 while (data->len >= size)
1062 data->offset -= size;
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1070 to1 = adjust_address (data->to, mode, data->offset);
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1077 from1 = adjust_address (data->from, mode, data->offset);
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1087 emit_insn ((*genfun) (to1, from1));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1102 if (! data->reverse)
1103 data->offset += size;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 retval = emit_block_move_via_libcall (x, y, size,
1185 method == BLOCK_OP_TAILCALL);
1187 emit_block_move_via_loop (x, y, size, align);
1189 if (method == BLOCK_OP_CALL_PARM)
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1198 return emit_block_move_hints (x, y, size, method, 0, -1);
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1206 block_move_libcall_safe_for_call_parm (void)
1208 #if defined (REG_PARM_STACK_SPACE)
1212 /* If arguments are pushed on the stack, then they're safe. */
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn = emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1224 && REG_PARM_STACK_SPACE (fn) != 0)
1228 /* If any argument goes in memory, then it might clobber an outgoing
1231 CUMULATIVE_ARGS args_so_far_v;
1232 cumulative_args_t args_so_far;
1235 fn = emit_block_move_libcall_fn (false);
1236 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1237 args_so_far = pack_cumulative_args (&args_so_far_v);
1239 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1240 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1242 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1243 rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1245 if (!tmp || !REG_P (tmp))
1247 if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1249 targetm.calls.function_arg_advance (args_so_far, mode,
1256 /* A subroutine of emit_block_move. Expand a movmem pattern;
1257 return true if successful. */
1260 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1261 unsigned int expected_align, HOST_WIDE_INT expected_size)
1263 int save_volatile_ok = volatile_ok;
1264 enum machine_mode mode;
1266 if (expected_align < align)
1267 expected_align = align;
1269 /* Since this is a move insn, we don't care about volatility. */
1272 /* Try the most limited insn first, because there's no point
1273 including more than one in the machine description unless
1274 the more limited one has some advantage. */
1276 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1277 mode = GET_MODE_WIDER_MODE (mode))
1279 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. */
1286 && ((CONST_INT_P (size)
1287 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1288 <= (GET_MODE_MASK (mode) >> 1)))
1289 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1291 struct expand_operand ops[6];
1294 /* ??? When called via emit_block_move_for_call, it'd be
1295 nice if there were some way to inform the backend, so
1296 that it doesn't fail the expansion because it thinks
1297 emitting the libcall would be more efficient. */
1298 nops = insn_data[(int) code].n_generator_args;
1299 gcc_assert (nops == 4 || nops == 6);
1301 create_fixed_operand (&ops[0], x);
1302 create_fixed_operand (&ops[1], y);
1303 /* The check above guarantees that this size conversion is valid. */
1304 create_convert_operand_to (&ops[2], size, mode, true);
1305 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1308 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1309 create_integer_operand (&ops[5], expected_size);
1311 if (maybe_expand_insn (code, nops, ops))
1313 volatile_ok = save_volatile_ok;
1319 volatile_ok = save_volatile_ok;
1323 /* A subroutine of emit_block_move. Expand a call to memcpy.
1324 Return the return value from memcpy, 0 otherwise. */
1327 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1329 rtx dst_addr, src_addr;
1330 tree call_expr, fn, src_tree, dst_tree, size_tree;
1331 enum machine_mode size_mode;
1334 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1335 pseudos. We can then place those new pseudos into a VAR_DECL and
1338 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1339 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1341 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1342 src_addr = convert_memory_address (ptr_mode, src_addr);
1344 dst_tree = make_tree (ptr_type_node, dst_addr);
1345 src_tree = make_tree (ptr_type_node, src_addr);
1347 size_mode = TYPE_MODE (sizetype);
1349 size = convert_to_mode (size_mode, size, 1);
1350 size = copy_to_mode_reg (size_mode, size);
1352 /* It is incorrect to use the libcall calling conventions to call
1353 memcpy in this context. This could be a user call to memcpy and
1354 the user may wish to examine the return value from memcpy. For
1355 targets where libcalls and normal calls have different conventions
1356 for returning pointers, we could end up generating incorrect code. */
1358 size_tree = make_tree (sizetype, size);
1360 fn = emit_block_move_libcall_fn (true);
1361 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1362 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1364 retval = expand_normal (call_expr);
1369 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1370 for the function we use for block copies. The first time FOR_CALL
1371 is true, we call assemble_external. */
1373 static GTY(()) tree block_move_fn;
1376 init_block_move_fn (const char *asmspec)
1382 fn = get_identifier ("memcpy");
1383 args = build_function_type_list (ptr_type_node, ptr_type_node,
1384 const_ptr_type_node, sizetype,
1387 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1388 DECL_EXTERNAL (fn) = 1;
1389 TREE_PUBLIC (fn) = 1;
1390 DECL_ARTIFICIAL (fn) = 1;
1391 TREE_NOTHROW (fn) = 1;
1392 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1393 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1399 set_user_assembler_name (block_move_fn, asmspec);
1403 emit_block_move_libcall_fn (int for_call)
1405 static bool emitted_extern;
1408 init_block_move_fn (NULL);
1410 if (for_call && !emitted_extern)
1412 emitted_extern = true;
1413 make_decl_rtl (block_move_fn);
1414 assemble_external (block_move_fn);
1417 return block_move_fn;
1420 /* A subroutine of emit_block_move. Copy the data via an explicit
1421 loop. This is used only when libcalls are forbidden. */
1422 /* ??? It'd be nice to copy in hunks larger than QImode. */
1425 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1426 unsigned int align ATTRIBUTE_UNUSED)
1428 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1429 enum machine_mode x_addr_mode
1430 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1431 enum machine_mode y_addr_mode
1432 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1433 enum machine_mode iter_mode;
1435 iter_mode = GET_MODE (size);
1436 if (iter_mode == VOIDmode)
1437 iter_mode = word_mode;
1439 top_label = gen_label_rtx ();
1440 cmp_label = gen_label_rtx ();
1441 iter = gen_reg_rtx (iter_mode);
1443 emit_move_insn (iter, const0_rtx);
1445 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1446 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1447 do_pending_stack_adjust ();
1449 emit_jump (cmp_label);
1450 emit_label (top_label);
1452 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1453 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1455 if (x_addr_mode != y_addr_mode)
1456 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1457 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1459 x = change_address (x, QImode, x_addr);
1460 y = change_address (y, QImode, y_addr);
1462 emit_move_insn (x, y);
1464 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1465 true, OPTAB_LIB_WIDEN);
1467 emit_move_insn (iter, tmp);
1469 emit_label (cmp_label);
1471 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1475 /* Copy all or part of a value X into registers starting at REGNO.
1476 The number of registers to be filled is NREGS. */
1479 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1482 #ifdef HAVE_load_multiple
1490 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1491 x = validize_mem (force_const_mem (mode, x));
1493 /* See if the machine can do this with a load multiple insn. */
1494 #ifdef HAVE_load_multiple
1495 if (HAVE_load_multiple)
1497 last = get_last_insn ();
1498 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1506 delete_insns_since (last);
1510 for (i = 0; i < nregs; i++)
1511 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1512 operand_subword_force (x, i, mode));
1515 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1516 The number of registers to be filled is NREGS. */
1519 move_block_from_reg (int regno, rtx x, int nregs)
1526 /* See if the machine can do this with a store multiple insn. */
1527 #ifdef HAVE_store_multiple
1528 if (HAVE_store_multiple)
1530 rtx last = get_last_insn ();
1531 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1539 delete_insns_since (last);
1543 for (i = 0; i < nregs; i++)
1545 rtx tem = operand_subword (x, i, 1, BLKmode);
1549 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1553 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1554 ORIG, where ORIG is a non-consecutive group of registers represented by
1555 a PARALLEL. The clone is identical to the original except in that the
1556 original set of registers is replaced by a new set of pseudo registers.
1557 The new set has the same modes as the original set. */
1560 gen_group_rtx (rtx orig)
1565 gcc_assert (GET_CODE (orig) == PARALLEL);
1567 length = XVECLEN (orig, 0);
1568 tmps = XALLOCAVEC (rtx, length);
1570 /* Skip a NULL entry in first slot. */
1571 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1576 for (; i < length; i++)
1578 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1579 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1581 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1584 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1587 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1588 except that values are placed in TMPS[i], and must later be moved
1589 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1592 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1596 enum machine_mode m = GET_MODE (orig_src);
1598 gcc_assert (GET_CODE (dst) == PARALLEL);
1601 && !SCALAR_INT_MODE_P (m)
1602 && !MEM_P (orig_src)
1603 && GET_CODE (orig_src) != CONCAT)
1605 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1606 if (imode == BLKmode)
1607 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1609 src = gen_reg_rtx (imode);
1610 if (imode != BLKmode)
1611 src = gen_lowpart (GET_MODE (orig_src), src);
1612 emit_move_insn (src, orig_src);
1613 /* ...and back again. */
1614 if (imode != BLKmode)
1615 src = gen_lowpart (imode, src);
1616 emit_group_load_1 (tmps, dst, src, type, ssize);
1620 /* Check for a NULL entry, used to indicate that the parameter goes
1621 both on the stack and in registers. */
1622 if (XEXP (XVECEXP (dst, 0, 0), 0))
1627 /* Process the pieces. */
1628 for (i = start; i < XVECLEN (dst, 0); i++)
1630 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1631 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1632 unsigned int bytelen = GET_MODE_SIZE (mode);
1635 /* Handle trailing fragments that run over the size of the struct. */
1636 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1638 /* Arrange to shift the fragment to where it belongs.
1639 extract_bit_field loads to the lsb of the reg. */
1641 #ifdef BLOCK_REG_PADDING
1642 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1643 == (BYTES_BIG_ENDIAN ? upward : downward)
1648 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1649 bytelen = ssize - bytepos;
1650 gcc_assert (bytelen > 0);
1653 /* If we won't be loading directly from memory, protect the real source
1654 from strange tricks we might play; but make sure that the source can
1655 be loaded directly into the destination. */
1657 if (!MEM_P (orig_src)
1658 && (!CONSTANT_P (orig_src)
1659 || (GET_MODE (orig_src) != mode
1660 && GET_MODE (orig_src) != VOIDmode)))
1662 if (GET_MODE (orig_src) == VOIDmode)
1663 src = gen_reg_rtx (mode);
1665 src = gen_reg_rtx (GET_MODE (orig_src));
1667 emit_move_insn (src, orig_src);
1670 /* Optimize the access just a bit. */
1672 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1673 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1674 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1675 && bytelen == GET_MODE_SIZE (mode))
1677 tmps[i] = gen_reg_rtx (mode);
1678 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1680 else if (COMPLEX_MODE_P (mode)
1681 && GET_MODE (src) == mode
1682 && bytelen == GET_MODE_SIZE (mode))
1683 /* Let emit_move_complex do the bulk of the work. */
1685 else if (GET_CODE (src) == CONCAT)
1687 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1688 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1690 if ((bytepos == 0 && bytelen == slen0)
1691 || (bytepos != 0 && bytepos + bytelen <= slen))
1693 /* The following assumes that the concatenated objects all
1694 have the same size. In this case, a simple calculation
1695 can be used to determine the object and the bit field
1697 tmps[i] = XEXP (src, bytepos / slen0);
1698 if (! CONSTANT_P (tmps[i])
1699 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1700 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1701 (bytepos % slen0) * BITS_PER_UNIT,
1702 1, false, NULL_RTX, mode, mode);
1708 gcc_assert (!bytepos);
1709 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1710 emit_move_insn (mem, src);
1711 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1712 0, 1, false, NULL_RTX, mode, mode);
1715 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1716 SIMD register, which is currently broken. While we get GCC
1717 to emit proper RTL for these cases, let's dump to memory. */
1718 else if (VECTOR_MODE_P (GET_MODE (dst))
1721 int slen = GET_MODE_SIZE (GET_MODE (src));
1724 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1725 emit_move_insn (mem, src);
1726 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1728 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1729 && XVECLEN (dst, 0) > 1)
1730 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1731 else if (CONSTANT_P (src))
1733 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1741 gcc_assert (2 * len == ssize);
1742 split_double (src, &first, &second);
1749 else if (REG_P (src) && GET_MODE (src) == mode)
1752 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1753 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1757 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 /* Emit code to move a block SRC of type TYPE to a block DST,
1763 where DST is non-consecutive registers represented by a PARALLEL.
1764 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1768 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1773 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1774 emit_group_load_1 (tmps, dst, src, type, ssize);
1776 /* Copy the extracted pieces into the proper (probable) hard regs. */
1777 for (i = 0; i < XVECLEN (dst, 0); i++)
1779 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1782 emit_move_insn (d, tmps[i]);
1786 /* Similar, but load SRC into new pseudos in a format that looks like
1787 PARALLEL. This can later be fed to emit_group_move to get things
1788 in the right place. */
1791 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1796 vec = rtvec_alloc (XVECLEN (parallel, 0));
1797 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1799 /* Convert the vector to look just like the original PARALLEL, except
1800 with the computed values. */
1801 for (i = 0; i < XVECLEN (parallel, 0); i++)
1803 rtx e = XVECEXP (parallel, 0, i);
1804 rtx d = XEXP (e, 0);
1808 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1809 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1811 RTVEC_ELT (vec, i) = e;
1814 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1817 /* Emit code to move a block SRC to block DST, where SRC and DST are
1818 non-consecutive groups of registers, each represented by a PARALLEL. */
1821 emit_group_move (rtx dst, rtx src)
1825 gcc_assert (GET_CODE (src) == PARALLEL
1826 && GET_CODE (dst) == PARALLEL
1827 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1829 /* Skip first entry if NULL. */
1830 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1831 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1832 XEXP (XVECEXP (src, 0, i), 0));
1835 /* Move a group of registers represented by a PARALLEL into pseudos. */
1838 emit_group_move_into_temps (rtx src)
1840 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1843 for (i = 0; i < XVECLEN (src, 0); i++)
1845 rtx e = XVECEXP (src, 0, i);
1846 rtx d = XEXP (e, 0);
1849 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1850 RTVEC_ELT (vec, i) = e;
1853 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1856 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1857 where SRC is non-consecutive registers represented by a PARALLEL.
1858 SSIZE represents the total size of block ORIG_DST, or -1 if not
1862 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1865 int start, finish, i;
1866 enum machine_mode m = GET_MODE (orig_dst);
1868 gcc_assert (GET_CODE (src) == PARALLEL);
1870 if (!SCALAR_INT_MODE_P (m)
1871 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1873 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1874 if (imode == BLKmode)
1875 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1877 dst = gen_reg_rtx (imode);
1878 emit_group_store (dst, src, type, ssize);
1879 if (imode != BLKmode)
1880 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1881 emit_move_insn (orig_dst, dst);
1885 /* Check for a NULL entry, used to indicate that the parameter goes
1886 both on the stack and in registers. */
1887 if (XEXP (XVECEXP (src, 0, 0), 0))
1891 finish = XVECLEN (src, 0);
1893 tmps = XALLOCAVEC (rtx, finish);
1895 /* Copy the (probable) hard regs into pseudos. */
1896 for (i = start; i < finish; i++)
1898 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1899 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1901 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1902 emit_move_insn (tmps[i], reg);
1908 /* If we won't be storing directly into memory, protect the real destination
1909 from strange tricks we might play. */
1911 if (GET_CODE (dst) == PARALLEL)
1915 /* We can get a PARALLEL dst if there is a conditional expression in
1916 a return statement. In that case, the dst and src are the same,
1917 so no action is necessary. */
1918 if (rtx_equal_p (dst, src))
1921 /* It is unclear if we can ever reach here, but we may as well handle
1922 it. Allocate a temporary, and split this into a store/load to/from
1925 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1926 emit_group_store (temp, src, type, ssize);
1927 emit_group_load (dst, temp, type, ssize);
1930 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1932 enum machine_mode outer = GET_MODE (dst);
1933 enum machine_mode inner;
1934 HOST_WIDE_INT bytepos;
1938 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1939 dst = gen_reg_rtx (outer);
1941 /* Make life a bit easier for combine. */
1942 /* If the first element of the vector is the low part
1943 of the destination mode, use a paradoxical subreg to
1944 initialize the destination. */
1947 inner = GET_MODE (tmps[start]);
1948 bytepos = subreg_lowpart_offset (inner, outer);
1949 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1951 temp = simplify_gen_subreg (outer, tmps[start],
1955 emit_move_insn (dst, temp);
1962 /* If the first element wasn't the low part, try the last. */
1964 && start < finish - 1)
1966 inner = GET_MODE (tmps[finish - 1]);
1967 bytepos = subreg_lowpart_offset (inner, outer);
1968 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1970 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1974 emit_move_insn (dst, temp);
1981 /* Otherwise, simply initialize the result to zero. */
1983 emit_move_insn (dst, CONST0_RTX (outer));
1986 /* Process the pieces. */
1987 for (i = start; i < finish; i++)
1989 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1990 enum machine_mode mode = GET_MODE (tmps[i]);
1991 unsigned int bytelen = GET_MODE_SIZE (mode);
1992 unsigned int adj_bytelen = bytelen;
1995 /* Handle trailing fragments that run over the size of the struct. */
1996 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1997 adj_bytelen = ssize - bytepos;
1999 if (GET_CODE (dst) == CONCAT)
2001 if (bytepos + adj_bytelen
2002 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2003 dest = XEXP (dst, 0);
2004 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2006 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2007 dest = XEXP (dst, 1);
2011 enum machine_mode dest_mode = GET_MODE (dest);
2012 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2014 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2016 if (GET_MODE_ALIGNMENT (dest_mode)
2017 >= GET_MODE_ALIGNMENT (tmp_mode))
2019 dest = assign_stack_temp (dest_mode,
2020 GET_MODE_SIZE (dest_mode),
2022 emit_move_insn (adjust_address (dest,
2030 dest = assign_stack_temp (tmp_mode,
2031 GET_MODE_SIZE (tmp_mode),
2033 emit_move_insn (dest, tmps[i]);
2034 dst = adjust_address (dest, dest_mode, bytepos);
2040 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2042 /* store_bit_field always takes its value from the lsb.
2043 Move the fragment to the lsb if it's not already there. */
2045 #ifdef BLOCK_REG_PADDING
2046 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2047 == (BYTES_BIG_ENDIAN ? upward : downward)
2053 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2054 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2057 bytelen = adj_bytelen;
2060 /* Optimize the access just a bit. */
2062 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2063 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2064 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2065 && bytelen == GET_MODE_SIZE (mode))
2066 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2068 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2072 /* Copy from the pseudo into the (probable) hard reg. */
2073 if (orig_dst != dst)
2074 emit_move_insn (orig_dst, dst);
2077 /* Generate code to copy a BLKmode object of TYPE out of a
2078 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2079 is null, a stack temporary is created. TGTBLK is returned.
2081 The purpose of this routine is to handle functions that return
2082 BLKmode structures in registers. Some machines (the PA for example)
2083 want to return all small structures in registers regardless of the
2084 structure's alignment. */
2087 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2089 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2090 rtx src = NULL, dst = NULL;
2091 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2092 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2093 enum machine_mode copy_mode;
2097 tgtblk = assign_temp (build_qualified_type (type,
2099 | TYPE_QUAL_CONST)),
2101 preserve_temp_slots (tgtblk);
2104 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2105 into a new pseudo which is a full word. */
2107 if (GET_MODE (srcreg) != BLKmode
2108 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2109 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2111 /* If the structure doesn't take up a whole number of words, see whether
2112 SRCREG is padded on the left or on the right. If it's on the left,
2113 set PADDING_CORRECTION to the number of bits to skip.
2115 In most ABIs, the structure will be returned at the least end of
2116 the register, which translates to right padding on little-endian
2117 targets and left padding on big-endian targets. The opposite
2118 holds if the structure is returned at the most significant
2119 end of the register. */
2120 if (bytes % UNITS_PER_WORD != 0
2121 && (targetm.calls.return_in_msb (type)
2123 : BYTES_BIG_ENDIAN))
2125 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2127 /* Copy the structure BITSIZE bits at a time. If the target lives in
2128 memory, take care of not reading/writing past its end by selecting
2129 a copy mode suited to BITSIZE. This should always be possible given
2132 We could probably emit more efficient code for machines which do not use
2133 strict alignment, but it doesn't seem worth the effort at the current
2136 copy_mode = word_mode;
2139 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2140 if (mem_mode != BLKmode)
2141 copy_mode = mem_mode;
2144 for (bitpos = 0, xbitpos = padding_correction;
2145 bitpos < bytes * BITS_PER_UNIT;
2146 bitpos += bitsize, xbitpos += bitsize)
2148 /* We need a new source operand each time xbitpos is on a
2149 word boundary and when xbitpos == padding_correction
2150 (the first time through). */
2151 if (xbitpos % BITS_PER_WORD == 0
2152 || xbitpos == padding_correction)
2153 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2156 /* We need a new destination operand each time bitpos is on
2158 if (bitpos % BITS_PER_WORD == 0)
2159 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2161 /* Use xbitpos for the source extraction (right justified) and
2162 bitpos for the destination store (left justified). */
2163 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2164 extract_bit_field (src, bitsize,
2165 xbitpos % BITS_PER_WORD, 1, false,
2166 NULL_RTX, copy_mode, copy_mode));
2172 /* Add a USE expression for REG to the (possibly empty) list pointed
2173 to by CALL_FUSAGE. REG must denote a hard register. */
2176 use_reg (rtx *call_fusage, rtx reg)
2178 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2181 = gen_rtx_EXPR_LIST (VOIDmode,
2182 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2185 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2186 starting at REGNO. All of these registers must be hard registers. */
2189 use_regs (rtx *call_fusage, int regno, int nregs)
2193 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2195 for (i = 0; i < nregs; i++)
2196 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2199 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2200 PARALLEL REGS. This is for calls that pass values in multiple
2201 non-contiguous locations. The Irix 6 ABI has examples of this. */
2204 use_group_regs (rtx *call_fusage, rtx regs)
2208 for (i = 0; i < XVECLEN (regs, 0); i++)
2210 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2212 /* A NULL entry means the parameter goes both on the stack and in
2213 registers. This can also be a MEM for targets that pass values
2214 partially on the stack and partially in registers. */
2215 if (reg != 0 && REG_P (reg))
2216 use_reg (call_fusage, reg);
2220 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2221 assigment and the code of the expresion on the RHS is CODE. Return
2225 get_def_for_expr (tree name, enum tree_code code)
2229 if (TREE_CODE (name) != SSA_NAME)
2232 def_stmt = get_gimple_for_ssa_name (name);
2234 || gimple_assign_rhs_code (def_stmt) != code)
2241 /* Determine whether the LEN bytes generated by CONSTFUN can be
2242 stored to memory using several move instructions. CONSTFUNDATA is
2243 a pointer which will be passed as argument in every CONSTFUN call.
2244 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2245 a memset operation and false if it's a copy of a constant string.
2246 Return nonzero if a call to store_by_pieces should succeed. */
2249 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2250 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2251 void *constfundata, unsigned int align, bool memsetp)
2253 unsigned HOST_WIDE_INT l;
2254 unsigned int max_size;
2255 HOST_WIDE_INT offset = 0;
2256 enum machine_mode mode;
2257 enum insn_code icode;
2259 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2260 rtx cst ATTRIBUTE_UNUSED;
2266 ? SET_BY_PIECES_P (len, align)
2267 : STORE_BY_PIECES_P (len, align)))
2270 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2272 /* We would first store what we can in the largest integer mode, then go to
2273 successively smaller modes. */
2276 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2280 max_size = STORE_MAX_PIECES + 1;
2281 while (max_size > 1)
2283 mode = widest_int_mode_for_size (max_size);
2285 if (mode == VOIDmode)
2288 icode = optab_handler (mov_optab, mode);
2289 if (icode != CODE_FOR_nothing
2290 && align >= GET_MODE_ALIGNMENT (mode))
2292 unsigned int size = GET_MODE_SIZE (mode);
2299 cst = (*constfun) (constfundata, offset, mode);
2300 if (!targetm.legitimate_constant_p (mode, cst))
2310 max_size = GET_MODE_SIZE (mode);
2313 /* The code above should have handled everything. */
2320 /* Generate several move instructions to store LEN bytes generated by
2321 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2322 pointer which will be passed as argument in every CONSTFUN call.
2323 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2324 a memset operation and false if it's a copy of a constant string.
2325 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2326 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2330 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2331 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2332 void *constfundata, unsigned int align, bool memsetp, int endp)
2334 enum machine_mode to_addr_mode
2335 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2336 struct store_by_pieces_d data;
2340 gcc_assert (endp != 2);
2345 ? SET_BY_PIECES_P (len, align)
2346 : STORE_BY_PIECES_P (len, align));
2347 data.constfun = constfun;
2348 data.constfundata = constfundata;
2351 store_by_pieces_1 (&data, align);
2356 gcc_assert (!data.reverse);
2361 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2362 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2364 data.to_addr = copy_to_mode_reg (to_addr_mode,
2365 plus_constant (data.to_addr,
2368 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2375 to1 = adjust_address (data.to, QImode, data.offset);
2383 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2384 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2387 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2389 struct store_by_pieces_d data;
2394 data.constfun = clear_by_pieces_1;
2395 data.constfundata = NULL;
2398 store_by_pieces_1 (&data, align);
2401 /* Callback routine for clear_by_pieces.
2402 Return const0_rtx unconditionally. */
2405 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2406 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2407 enum machine_mode mode ATTRIBUTE_UNUSED)
2412 /* Subroutine of clear_by_pieces and store_by_pieces.
2413 Generate several move instructions to store LEN bytes of block TO. (A MEM
2414 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2417 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2418 unsigned int align ATTRIBUTE_UNUSED)
2420 enum machine_mode to_addr_mode
2421 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2422 rtx to_addr = XEXP (data->to, 0);
2423 unsigned int max_size = STORE_MAX_PIECES + 1;
2424 enum insn_code icode;
2427 data->to_addr = to_addr;
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432 data->explicit_inc_to = 0;
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 data->offset = data->len;
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2444 /* Determine the main mode we'll be using.
2445 MODE might not be used depending on the definitions of the
2446 USE_* macros below. */
2447 enum machine_mode mode ATTRIBUTE_UNUSED
2448 = widest_int_mode_for_size (max_size);
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 data->to_addr = copy_to_mode_reg (to_addr_mode,
2453 plus_constant (to_addr, data->len));
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = -1;
2458 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2459 && ! data->autinc_to)
2461 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2462 data->autinc_to = 1;
2463 data->explicit_inc_to = 1;
2466 if ( !data->autinc_to && CONSTANT_P (to_addr))
2467 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2470 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2472 /* First store what we can in the largest integer mode, then go to
2473 successively smaller modes. */
2475 while (max_size > 1)
2477 enum machine_mode mode = widest_int_mode_for_size (max_size);
2479 if (mode == VOIDmode)
2482 icode = optab_handler (mov_optab, mode);
2483 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2484 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2486 max_size = GET_MODE_SIZE (mode);
2489 /* The code above should have handled everything. */
2490 gcc_assert (!data->len);
2493 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2494 with move instructions for mode MODE. GENFUN is the gen_... function
2495 to make a move insn for that mode. DATA has all the other info. */
2498 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2499 struct store_by_pieces_d *data)
2501 unsigned int size = GET_MODE_SIZE (mode);
2504 while (data->len >= size)
2507 data->offset -= size;
2509 if (data->autinc_to)
2510 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2513 to1 = adjust_address (data->to, mode, data->offset);
2515 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2516 emit_insn (gen_add2_insn (data->to_addr,
2517 GEN_INT (-(HOST_WIDE_INT) size)));
2519 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2520 emit_insn ((*genfun) (to1, cst));
2522 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2523 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2525 if (! data->reverse)
2526 data->offset += size;
2532 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2533 its length in bytes. */
2536 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2537 unsigned int expected_align, HOST_WIDE_INT expected_size)
2539 enum machine_mode mode = GET_MODE (object);
2542 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2544 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2545 just move a zero. Otherwise, do this a piece at a time. */
2547 && CONST_INT_P (size)
2548 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2550 rtx zero = CONST0_RTX (mode);
2553 emit_move_insn (object, zero);
2557 if (COMPLEX_MODE_P (mode))
2559 zero = CONST0_RTX (GET_MODE_INNER (mode));
2562 write_complex_part (object, zero, 0);
2563 write_complex_part (object, zero, 1);
2569 if (size == const0_rtx)
2572 align = MEM_ALIGN (object);
2574 if (CONST_INT_P (size)
2575 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2576 clear_by_pieces (object, INTVAL (size), align);
2577 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2578 expected_align, expected_size))
2580 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2581 return set_storage_via_libcall (object, size, const0_rtx,
2582 method == BLOCK_OP_TAILCALL);
2590 clear_storage (rtx object, rtx size, enum block_op_methods method)
2592 return clear_storage_hints (object, size, method, 0, -1);
2596 /* A subroutine of clear_storage. Expand a call to memset.
2597 Return the return value of memset, 0 otherwise. */
2600 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2602 tree call_expr, fn, object_tree, size_tree, val_tree;
2603 enum machine_mode size_mode;
2606 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2607 place those into new pseudos into a VAR_DECL and use them later. */
2609 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2611 size_mode = TYPE_MODE (sizetype);
2612 size = convert_to_mode (size_mode, size, 1);
2613 size = copy_to_mode_reg (size_mode, size);
2615 /* It is incorrect to use the libcall calling conventions to call
2616 memset in this context. This could be a user call to memset and
2617 the user may wish to examine the return value from memset. For
2618 targets where libcalls and normal calls have different conventions
2619 for returning pointers, we could end up generating incorrect code. */
2621 object_tree = make_tree (ptr_type_node, object);
2622 if (!CONST_INT_P (val))
2623 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2624 size_tree = make_tree (sizetype, size);
2625 val_tree = make_tree (integer_type_node, val);
2627 fn = clear_storage_libcall_fn (true);
2628 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2629 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2631 retval = expand_normal (call_expr);
2636 /* A subroutine of set_storage_via_libcall. Create the tree node
2637 for the function we use for block clears. The first time FOR_CALL
2638 is true, we call assemble_external. */
2640 tree block_clear_fn;
2643 init_block_clear_fn (const char *asmspec)
2645 if (!block_clear_fn)
2649 fn = get_identifier ("memset");
2650 args = build_function_type_list (ptr_type_node, ptr_type_node,
2651 integer_type_node, sizetype,
2654 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2655 DECL_EXTERNAL (fn) = 1;
2656 TREE_PUBLIC (fn) = 1;
2657 DECL_ARTIFICIAL (fn) = 1;
2658 TREE_NOTHROW (fn) = 1;
2659 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2660 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2662 block_clear_fn = fn;
2666 set_user_assembler_name (block_clear_fn, asmspec);
2670 clear_storage_libcall_fn (int for_call)
2672 static bool emitted_extern;
2674 if (!block_clear_fn)
2675 init_block_clear_fn (NULL);
2677 if (for_call && !emitted_extern)
2679 emitted_extern = true;
2680 make_decl_rtl (block_clear_fn);
2681 assemble_external (block_clear_fn);
2684 return block_clear_fn;
2687 /* Expand a setmem pattern; return true if successful. */
2690 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2691 unsigned int expected_align, HOST_WIDE_INT expected_size)
2693 /* Try the most limited insn first, because there's no point
2694 including more than one in the machine description unless
2695 the more limited one has some advantage. */
2697 enum machine_mode mode;
2699 if (expected_align < align)
2700 expected_align = align;
2702 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2703 mode = GET_MODE_WIDER_MODE (mode))
2705 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2707 if (code != CODE_FOR_nothing
2708 /* We don't need MODE to be narrower than
2709 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2710 the mode mask, as it is returned by the macro, it will
2711 definitely be less than the actual mode mask. */
2712 && ((CONST_INT_P (size)
2713 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2714 <= (GET_MODE_MASK (mode) >> 1)))
2715 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2717 struct expand_operand ops[6];
2720 nops = insn_data[(int) code].n_generator_args;
2721 gcc_assert (nops == 4 || nops == 6);
2723 create_fixed_operand (&ops[0], object);
2724 /* The check above guarantees that this size conversion is valid. */
2725 create_convert_operand_to (&ops[1], size, mode, true);
2726 create_convert_operand_from (&ops[2], val, byte_mode, true);
2727 create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2730 create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2731 create_integer_operand (&ops[5], expected_size);
2733 if (maybe_expand_insn (code, nops, ops))
2742 /* Write to one of the components of the complex value CPLX. Write VAL to
2743 the real part if IMAG_P is false, and the imaginary part if its true. */
2746 write_complex_part (rtx cplx, rtx val, bool imag_p)
2748 enum machine_mode cmode;
2749 enum machine_mode imode;
2752 if (GET_CODE (cplx) == CONCAT)
2754 emit_move_insn (XEXP (cplx, imag_p), val);
2758 cmode = GET_MODE (cplx);
2759 imode = GET_MODE_INNER (cmode);
2760 ibitsize = GET_MODE_BITSIZE (imode);
2762 /* For MEMs simplify_gen_subreg may generate an invalid new address
2763 because, e.g., the original address is considered mode-dependent
2764 by the target, which restricts simplify_subreg from invoking
2765 adjust_address_nv. Instead of preparing fallback support for an
2766 invalid address, we call adjust_address_nv directly. */
2769 emit_move_insn (adjust_address_nv (cplx, imode,
2770 imag_p ? GET_MODE_SIZE (imode) : 0),
2775 /* If the sub-object is at least word sized, then we know that subregging
2776 will work. This special case is important, since store_bit_field
2777 wants to operate on integer modes, and there's rarely an OImode to
2778 correspond to TCmode. */
2779 if (ibitsize >= BITS_PER_WORD
2780 /* For hard regs we have exact predicates. Assume we can split
2781 the original object if it spans an even number of hard regs.
2782 This special case is important for SCmode on 64-bit platforms
2783 where the natural size of floating-point regs is 32-bit. */
2785 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2786 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2788 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2789 imag_p ? GET_MODE_SIZE (imode) : 0);
2792 emit_move_insn (part, val);
2796 /* simplify_gen_subreg may fail for sub-word MEMs. */
2797 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2800 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2803 /* Extract one of the components of the complex value CPLX. Extract the
2804 real part if IMAG_P is false, and the imaginary part if it's true. */
2807 read_complex_part (rtx cplx, bool imag_p)
2809 enum machine_mode cmode, imode;
2812 if (GET_CODE (cplx) == CONCAT)
2813 return XEXP (cplx, imag_p);
2815 cmode = GET_MODE (cplx);
2816 imode = GET_MODE_INNER (cmode);
2817 ibitsize = GET_MODE_BITSIZE (imode);
2819 /* Special case reads from complex constants that got spilled to memory. */
2820 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2822 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2823 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2825 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2826 if (CONSTANT_CLASS_P (part))
2827 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2831 /* For MEMs simplify_gen_subreg may generate an invalid new address
2832 because, e.g., the original address is considered mode-dependent
2833 by the target, which restricts simplify_subreg from invoking
2834 adjust_address_nv. Instead of preparing fallback support for an
2835 invalid address, we call adjust_address_nv directly. */
2837 return adjust_address_nv (cplx, imode,
2838 imag_p ? GET_MODE_SIZE (imode) : 0);
2840 /* If the sub-object is at least word sized, then we know that subregging
2841 will work. This special case is important, since extract_bit_field
2842 wants to operate on integer modes, and there's rarely an OImode to
2843 correspond to TCmode. */
2844 if (ibitsize >= BITS_PER_WORD
2845 /* For hard regs we have exact predicates. Assume we can split
2846 the original object if it spans an even number of hard regs.
2847 This special case is important for SCmode on 64-bit platforms
2848 where the natural size of floating-point regs is 32-bit. */
2850 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2851 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2853 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2854 imag_p ? GET_MODE_SIZE (imode) : 0);
2858 /* simplify_gen_subreg may fail for sub-word MEMs. */
2859 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2862 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2863 true, false, NULL_RTX, imode, imode);
2866 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2867 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2868 represented in NEW_MODE. If FORCE is true, this will never happen, as
2869 we'll force-create a SUBREG if needed. */
2872 emit_move_change_mode (enum machine_mode new_mode,
2873 enum machine_mode old_mode, rtx x, bool force)
2877 if (push_operand (x, GET_MODE (x)))
2879 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2880 MEM_COPY_ATTRIBUTES (ret, x);
2884 /* We don't have to worry about changing the address since the
2885 size in bytes is supposed to be the same. */
2886 if (reload_in_progress)
2888 /* Copy the MEM to change the mode and move any
2889 substitutions from the old MEM to the new one. */
2890 ret = adjust_address_nv (x, new_mode, 0);
2891 copy_replacements (x, ret);
2894 ret = adjust_address (x, new_mode, 0);
2898 /* Note that we do want simplify_subreg's behavior of validating
2899 that the new mode is ok for a hard register. If we were to use
2900 simplify_gen_subreg, we would create the subreg, but would
2901 probably run into the target not being able to implement it. */
2902 /* Except, of course, when FORCE is true, when this is exactly what
2903 we want. Which is needed for CCmodes on some targets. */
2905 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2907 ret = simplify_subreg (new_mode, x, old_mode, 0);
2913 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2914 an integer mode of the same size as MODE. Returns the instruction
2915 emitted, or NULL if such a move could not be generated. */
2918 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2920 enum machine_mode imode;
2921 enum insn_code code;
2923 /* There must exist a mode of the exact size we require. */
2924 imode = int_mode_for_mode (mode);
2925 if (imode == BLKmode)
2928 /* The target must support moves in this mode. */
2929 code = optab_handler (mov_optab, imode);
2930 if (code == CODE_FOR_nothing)
2933 x = emit_move_change_mode (imode, mode, x, force);
2936 y = emit_move_change_mode (imode, mode, y, force);
2939 return emit_insn (GEN_FCN (code) (x, y));
2942 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2943 Return an equivalent MEM that does not use an auto-increment. */
2946 emit_move_resolve_push (enum machine_mode mode, rtx x)
2948 enum rtx_code code = GET_CODE (XEXP (x, 0));
2949 HOST_WIDE_INT adjust;
2952 adjust = GET_MODE_SIZE (mode);
2953 #ifdef PUSH_ROUNDING
2954 adjust = PUSH_ROUNDING (adjust);
2956 if (code == PRE_DEC || code == POST_DEC)
2958 else if (code == PRE_MODIFY || code == POST_MODIFY)
2960 rtx expr = XEXP (XEXP (x, 0), 1);
2963 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2964 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2965 val = INTVAL (XEXP (expr, 1));
2966 if (GET_CODE (expr) == MINUS)
2968 gcc_assert (adjust == val || adjust == -val);
2972 /* Do not use anti_adjust_stack, since we don't want to update
2973 stack_pointer_delta. */
2974 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2975 GEN_INT (adjust), stack_pointer_rtx,
2976 0, OPTAB_LIB_WIDEN);
2977 if (temp != stack_pointer_rtx)
2978 emit_move_insn (stack_pointer_rtx, temp);
2985 temp = stack_pointer_rtx;
2990 temp = plus_constant (stack_pointer_rtx, -adjust);
2996 return replace_equiv_address (x, temp);
2999 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3000 X is known to satisfy push_operand, and MODE is known to be complex.
3001 Returns the last instruction emitted. */
3004 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3006 enum machine_mode submode = GET_MODE_INNER (mode);
3009 #ifdef PUSH_ROUNDING
3010 unsigned int submodesize = GET_MODE_SIZE (submode);
3012 /* In case we output to the stack, but the size is smaller than the
3013 machine can push exactly, we need to use move instructions. */
3014 if (PUSH_ROUNDING (submodesize) != submodesize)
3016 x = emit_move_resolve_push (mode, x);
3017 return emit_move_insn (x, y);
3021 /* Note that the real part always precedes the imag part in memory
3022 regardless of machine's endianness. */
3023 switch (GET_CODE (XEXP (x, 0)))
3037 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3038 read_complex_part (y, imag_first));
3039 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3040 read_complex_part (y, !imag_first));
3043 /* A subroutine of emit_move_complex. Perform the move from Y to X
3044 via two moves of the parts. Returns the last instruction emitted. */
3047 emit_move_complex_parts (rtx x, rtx y)
3049 /* Show the output dies here. This is necessary for SUBREGs
3050 of pseudos since we cannot track their lifetimes correctly;
3051 hard regs shouldn't appear here except as return values. */
3052 if (!reload_completed && !reload_in_progress
3053 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3056 write_complex_part (x, read_complex_part (y, false), false);
3057 write_complex_part (x, read_complex_part (y, true), true);
3059 return get_last_insn ();
3062 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3063 MODE is known to be complex. Returns the last instruction emitted. */
3066 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3070 /* Need to take special care for pushes, to maintain proper ordering
3071 of the data, and possibly extra padding. */
3072 if (push_operand (x, mode))
3073 return emit_move_complex_push (mode, x, y);
3075 /* See if we can coerce the target into moving both values at once. */
3077 /* Move floating point as parts. */
3078 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3079 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3081 /* Not possible if the values are inherently not adjacent. */
3082 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3084 /* Is possible if both are registers (or subregs of registers). */
3085 else if (register_operand (x, mode) && register_operand (y, mode))
3087 /* If one of the operands is a memory, and alignment constraints
3088 are friendly enough, we may be able to do combined memory operations.
3089 We do not attempt this if Y is a constant because that combination is
3090 usually better with the by-parts thing below. */
3091 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3092 && (!STRICT_ALIGNMENT
3093 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3102 /* For memory to memory moves, optimal behavior can be had with the
3103 existing block move logic. */
3104 if (MEM_P (x) && MEM_P (y))
3106 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3107 BLOCK_OP_NO_LIBCALL);
3108 return get_last_insn ();
3111 ret = emit_move_via_integer (mode, x, y, true);
3116 return emit_move_complex_parts (x, y);
3119 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3120 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3123 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3127 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3130 enum insn_code code = optab_handler (mov_optab, CCmode);
3131 if (code != CODE_FOR_nothing)
3133 x = emit_move_change_mode (CCmode, mode, x, true);
3134 y = emit_move_change_mode (CCmode, mode, y, true);
3135 return emit_insn (GEN_FCN (code) (x, y));
3139 /* Otherwise, find the MODE_INT mode of the same width. */
3140 ret = emit_move_via_integer (mode, x, y, false);
3141 gcc_assert (ret != NULL);
3145 /* Return true if word I of OP lies entirely in the
3146 undefined bits of a paradoxical subreg. */
3149 undefined_operand_subword_p (const_rtx op, int i)
3151 enum machine_mode innermode, innermostmode;
3153 if (GET_CODE (op) != SUBREG)
3155 innermode = GET_MODE (op);
3156 innermostmode = GET_MODE (SUBREG_REG (op));
3157 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3158 /* The SUBREG_BYTE represents offset, as if the value were stored in
3159 memory, except for a paradoxical subreg where we define
3160 SUBREG_BYTE to be 0; undo this exception as in
3162 if (SUBREG_BYTE (op) == 0
3163 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3165 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3166 if (WORDS_BIG_ENDIAN)
3167 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3168 if (BYTES_BIG_ENDIAN)
3169 offset += difference % UNITS_PER_WORD;
3171 if (offset >= GET_MODE_SIZE (innermostmode)
3172 || offset <= -GET_MODE_SIZE (word_mode))
3177 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3178 MODE is any multi-word or full-word mode that lacks a move_insn
3179 pattern. Note that you will get better code if you define such
3180 patterns, even if they must turn into multiple assembler instructions. */
3183 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3190 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3192 /* If X is a push on the stack, do the push now and replace
3193 X with a reference to the stack pointer. */
3194 if (push_operand (x, mode))
3195 x = emit_move_resolve_push (mode, x);
3197 /* If we are in reload, see if either operand is a MEM whose address
3198 is scheduled for replacement. */
3199 if (reload_in_progress && MEM_P (x)
3200 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3201 x = replace_equiv_address_nv (x, inner);
3202 if (reload_in_progress && MEM_P (y)
3203 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3204 y = replace_equiv_address_nv (y, inner);
3208 need_clobber = false;
3210 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3213 rtx xpart = operand_subword (x, i, 1, mode);
3216 /* Do not generate code for a move if it would come entirely
3217 from the undefined bits of a paradoxical subreg. */
3218 if (undefined_operand_subword_p (y, i))
3221 ypart = operand_subword (y, i, 1, mode);
3223 /* If we can't get a part of Y, put Y into memory if it is a
3224 constant. Otherwise, force it into a register. Then we must
3225 be able to get a part of Y. */
3226 if (ypart == 0 && CONSTANT_P (y))
3228 y = use_anchored_address (force_const_mem (mode, y));
3229 ypart = operand_subword (y, i, 1, mode);
3231 else if (ypart == 0)
3232 ypart = operand_subword_force (y, i, mode);
3234 gcc_assert (xpart && ypart);
3236 need_clobber |= (GET_CODE (xpart) == SUBREG);
3238 last_insn = emit_move_insn (xpart, ypart);
3244 /* Show the output dies here. This is necessary for SUBREGs
3245 of pseudos since we cannot track their lifetimes correctly;
3246 hard regs shouldn't appear here except as return values.
3247 We never want to emit such a clobber after reload. */
3249 && ! (reload_in_progress || reload_completed)
3250 && need_clobber != 0)
3258 /* Low level part of emit_move_insn.
3259 Called just like emit_move_insn, but assumes X and Y
3260 are basically valid. */
3263 emit_move_insn_1 (rtx x, rtx y)
3265 enum machine_mode mode = GET_MODE (x);
3266 enum insn_code code;
3268 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3270 code = optab_handler (mov_optab, mode);
3271 if (code != CODE_FOR_nothing)
3272 return emit_insn (GEN_FCN (code) (x, y));
3274 /* Expand complex moves by moving real part and imag part. */
3275 if (COMPLEX_MODE_P (mode))
3276 return emit_move_complex (mode, x, y);
3278 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3279 || ALL_FIXED_POINT_MODE_P (mode))
3281 rtx result = emit_move_via_integer (mode, x, y, true);
3283 /* If we can't find an integer mode, use multi words. */
3287 return emit_move_multi_word (mode, x, y);
3290 if (GET_MODE_CLASS (mode) == MODE_CC)
3291 return emit_move_ccmode (mode, x, y);
3293 /* Try using a move pattern for the corresponding integer mode. This is
3294 only safe when simplify_subreg can convert MODE constants into integer
3295 constants. At present, it can only do this reliably if the value
3296 fits within a HOST_WIDE_INT. */
3297 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3299 rtx ret = emit_move_via_integer (mode, x, y, false);
3304 return emit_move_multi_word (mode, x, y);
3307 /* Generate code to copy Y into X.
3308 Both Y and X must have the same mode, except that
3309 Y can be a constant with VOIDmode.
3310 This mode cannot be BLKmode; use emit_block_move for that.
3312 Return the last instruction emitted. */
3315 emit_move_insn (rtx x, rtx y)
3317 enum machine_mode mode = GET_MODE (x);
3318 rtx y_cst = NULL_RTX;
3321 gcc_assert (mode != BLKmode
3322 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3327 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3328 && (last_insn = compress_float_constant (x, y)))
3333 if (!targetm.legitimate_constant_p (mode, y))
3335 y = force_const_mem (mode, y);
3337 /* If the target's cannot_force_const_mem prevented the spill,
3338 assume that the target's move expanders will also take care
3339 of the non-legitimate constant. */
3343 y = use_anchored_address (y);
3347 /* If X or Y are memory references, verify that their addresses are valid
3350 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3352 && ! push_operand (x, GET_MODE (x))))
3353 x = validize_mem (x);
3356 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3357 MEM_ADDR_SPACE (y)))
3358 y = validize_mem (y);
3360 gcc_assert (mode != BLKmode);
3362 last_insn = emit_move_insn_1 (x, y);
3364 if (y_cst && REG_P (x)
3365 && (set = single_set (last_insn)) != NULL_RTX
3366 && SET_DEST (set) == x
3367 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3368 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3373 /* If Y is representable exactly in a narrower mode, and the target can
3374 perform the extension directly from constant or memory, then emit the
3375 move as an extension. */
3378 compress_float_constant (rtx x, rtx y)
3380 enum machine_mode dstmode = GET_MODE (x);
3381 enum machine_mode orig_srcmode = GET_MODE (y);
3382 enum machine_mode srcmode;
3384 int oldcost, newcost;
3385 bool speed = optimize_insn_for_speed_p ();
3387 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3389 if (targetm.legitimate_constant_p (dstmode, y))
3390 oldcost = rtx_cost (y, SET, speed);
3392 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3394 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3395 srcmode != orig_srcmode;
3396 srcmode = GET_MODE_WIDER_MODE (srcmode))
3399 rtx trunc_y, last_insn;
3401 /* Skip if the target can't extend this way. */
3402 ic = can_extend_p (dstmode, srcmode, 0);
3403 if (ic == CODE_FOR_nothing)
3406 /* Skip if the narrowed value isn't exact. */
3407 if (! exact_real_truncate (srcmode, &r))
3410 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3412 if (targetm.legitimate_constant_p (srcmode, trunc_y))
3414 /* Skip if the target needs extra instructions to perform
3416 if (!insn_operand_matches (ic, 1, trunc_y))
3418 /* This is valid, but may not be cheaper than the original. */
3419 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3420 if (oldcost < newcost)
3423 else if (float_extend_from_mem[dstmode][srcmode])
3425 trunc_y = force_const_mem (srcmode, trunc_y);
3426 /* This is valid, but may not be cheaper than the original. */
3427 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3428 if (oldcost < newcost)
3430 trunc_y = validize_mem (trunc_y);
3435 /* For CSE's benefit, force the compressed constant pool entry
3436 into a new pseudo. This constant may be used in different modes,
3437 and if not, combine will put things back together for us. */
3438 trunc_y = force_reg (srcmode, trunc_y);
3439 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3440 last_insn = get_last_insn ();
3443 set_unique_reg_note (last_insn, REG_EQUAL, y);
3451 /* Pushing data onto the stack. */
3453 /* Push a block of length SIZE (perhaps variable)
3454 and return an rtx to address the beginning of the block.
3455 The value may be virtual_outgoing_args_rtx.
3457 EXTRA is the number of bytes of padding to push in addition to SIZE.
3458 BELOW nonzero means this padding comes at low addresses;
3459 otherwise, the padding comes at high addresses. */
3462 push_block (rtx size, int extra, int below)
3466 size = convert_modes (Pmode, ptr_mode, size, 1);
3467 if (CONSTANT_P (size))
3468 anti_adjust_stack (plus_constant (size, extra));
3469 else if (REG_P (size) && extra == 0)
3470 anti_adjust_stack (size);
3473 temp = copy_to_mode_reg (Pmode, size);
3475 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3476 temp, 0, OPTAB_LIB_WIDEN);
3477 anti_adjust_stack (temp);
3480 #ifndef STACK_GROWS_DOWNWARD
3486 temp = virtual_outgoing_args_rtx;
3487 if (extra != 0 && below)
3488 temp = plus_constant (temp, extra);
3492 if (CONST_INT_P (size))
3493 temp = plus_constant (virtual_outgoing_args_rtx,
3494 -INTVAL (size) - (below ? 0 : extra));
3495 else if (extra != 0 && !below)
3496 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3497 negate_rtx (Pmode, plus_constant (size, extra)));
3499 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3500 negate_rtx (Pmode, size));
3503 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3506 #ifdef PUSH_ROUNDING
3508 /* Emit single push insn. */
3511 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3514 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3516 enum insn_code icode;
3518 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3519 /* If there is push pattern, use it. Otherwise try old way of throwing
3520 MEM representing push operation to move expander. */
3521 icode = optab_handler (push_optab, mode);
3522 if (icode != CODE_FOR_nothing)
3524 struct expand_operand ops[1];
3526 create_input_operand (&ops[0], x, mode);
3527 if (maybe_expand_insn (icode, 1, ops))
3530 if (GET_MODE_SIZE (mode) == rounded_size)
3531 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3532 /* If we are to pad downward, adjust the stack pointer first and
3533 then store X into the stack location using an offset. This is
3534 because emit_move_insn does not know how to pad; it does not have
3536 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3538 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3539 HOST_WIDE_INT offset;
3541 emit_move_insn (stack_pointer_rtx,
3542 expand_binop (Pmode,
3543 #ifdef STACK_GROWS_DOWNWARD
3549 GEN_INT (rounded_size),
3550 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3552 offset = (HOST_WIDE_INT) padding_size;
3553 #ifdef STACK_GROWS_DOWNWARD
3554 if (STACK_PUSH_CODE == POST_DEC)
3555 /* We have already decremented the stack pointer, so get the
3557 offset += (HOST_WIDE_INT) rounded_size;
3559 if (STACK_PUSH_CODE == POST_INC)
3560 /* We have already incremented the stack pointer, so get the
3562 offset -= (HOST_WIDE_INT) rounded_size;
3564 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3568 #ifdef STACK_GROWS_DOWNWARD
3569 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3570 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3571 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3573 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3574 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3575 GEN_INT (rounded_size));
3577 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3580 dest = gen_rtx_MEM (mode, dest_addr);
3584 set_mem_attributes (dest, type, 1);
3586 if (flag_optimize_sibling_calls)
3587 /* Function incoming arguments may overlap with sibling call
3588 outgoing arguments and we cannot allow reordering of reads
3589 from function arguments with stores to outgoing arguments
3590 of sibling calls. */
3591 set_mem_alias_set (dest, 0);
3593 emit_move_insn (dest, x);
3597 /* Generate code to push X onto the stack, assuming it has mode MODE and
3599 MODE is redundant except when X is a CONST_INT (since they don't
3601 SIZE is an rtx for the size of data to be copied (in bytes),
3602 needed only if X is BLKmode.
3604 ALIGN (in bits) is maximum alignment we can assume.
3606 If PARTIAL and REG are both nonzero, then copy that many of the first
3607 bytes of X into registers starting with REG, and push the rest of X.
3608 The amount of space pushed is decreased by PARTIAL bytes.
3609 REG must be a hard register in this case.
3610 If REG is zero but PARTIAL is not, take any all others actions for an
3611 argument partially in registers, but do not actually load any
3614 EXTRA is the amount in bytes of extra space to leave next to this arg.
3615 This is ignored if an argument block has already been allocated.
3617 On a machine that lacks real push insns, ARGS_ADDR is the address of
3618 the bottom of the argument block for this call. We use indexing off there
3619 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3620 argument block has not been preallocated.
3622 ARGS_SO_FAR is the size of args previously pushed for this call.
3624 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3625 for arguments passed in registers. If nonzero, it will be the number
3626 of bytes required. */
3629 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3630 unsigned int align, int partial, rtx reg, int extra,
3631 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3635 enum direction stack_direction
3636 #ifdef STACK_GROWS_DOWNWARD
3642 /* Decide where to pad the argument: `downward' for below,
3643 `upward' for above, or `none' for don't pad it.
3644 Default is below for small data on big-endian machines; else above. */
3645 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3647 /* Invert direction if stack is post-decrement.
3649 if (STACK_PUSH_CODE == POST_DEC)
3650 if (where_pad != none)
3651 where_pad = (where_pad == downward ? upward : downward);
3656 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3658 /* Copy a block into the stack, entirely or partially. */
3665 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3666 used = partial - offset;
3668 if (mode != BLKmode)
3670 /* A value is to be stored in an insufficiently aligned
3671 stack slot; copy via a suitably aligned slot if
3673 size = GEN_INT (GET_MODE_SIZE (mode));
3674 if (!MEM_P (xinner))
3676 temp = assign_temp (type, 0, 1, 1);
3677 emit_move_insn (temp, xinner);
3684 /* USED is now the # of bytes we need not copy to the stack
3685 because registers will take care of them. */
3688 xinner = adjust_address (xinner, BLKmode, used);
3690 /* If the partial register-part of the arg counts in its stack size,
3691 skip the part of stack space corresponding to the registers.
3692 Otherwise, start copying to the beginning of the stack space,
3693 by setting SKIP to 0. */
3694 skip = (reg_parm_stack_space == 0) ? 0 : used;
3696 #ifdef PUSH_ROUNDING
3697 /* Do it with several push insns if that doesn't take lots of insns
3698 and if there is no difficulty with push insns that skip bytes
3699 on the stack for alignment purposes. */
3702 && CONST_INT_P (size)
3704 && MEM_ALIGN (xinner) >= align
3705 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3706 /* Here we avoid the case of a structure whose weak alignment
3707 forces many pushes of a small amount of data,
3708 and such small pushes do rounding that causes trouble. */
3709 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3710 || align >= BIGGEST_ALIGNMENT
3711 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3712 == (align / BITS_PER_UNIT)))
3713 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3715 /* Push padding now if padding above and stack grows down,
3716 or if padding below and stack grows up.
3717 But if space already allocated, this has already been done. */
3718 if (extra && args_addr == 0
3719 && where_pad != none && where_pad != stack_direction)
3720 anti_adjust_stack (GEN_INT (extra));
3722 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3725 #endif /* PUSH_ROUNDING */
3729 /* Otherwise make space on the stack and copy the data
3730 to the address of that space. */
3732 /* Deduct words put into registers from the size we must copy. */
3735 if (CONST_INT_P (size))
3736 size = GEN_INT (INTVAL (size) - used);
3738 size = expand_binop (GET_MODE (size), sub_optab, size,
3739 GEN_INT (used), NULL_RTX, 0,
3743 /* Get the address of the stack space.
3744 In this case, we do not deal with EXTRA separately.
3745 A single stack adjust will do. */
3748 temp = push_block (size, extra, where_pad == downward);
3751 else if (CONST_INT_P (args_so_far))
3752 temp = memory_address (BLKmode,
3753 plus_constant (args_addr,
3754 skip + INTVAL (args_so_far)));
3756 temp = memory_address (BLKmode,
3757 plus_constant (gen_rtx_PLUS (Pmode,
3762 if (!ACCUMULATE_OUTGOING_ARGS)
3764 /* If the source is referenced relative to the stack pointer,
3765 copy it to another register to stabilize it. We do not need
3766 to do this if we know that we won't be changing sp. */
3768 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3769 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3770 temp = copy_to_reg (temp);
3773 target = gen_rtx_MEM (BLKmode, temp);
3775 /* We do *not* set_mem_attributes here, because incoming arguments
3776 may overlap with sibling call outgoing arguments and we cannot
3777 allow reordering of reads from function arguments with stores
3778 to outgoing arguments of sibling calls. We do, however, want
3779 to record the alignment of the stack slot. */
3780 /* ALIGN may well be better aligned than TYPE, e.g. due to
3781 PARM_BOUNDARY. Assume the caller isn't lying. */
3782 set_mem_align (target, align);
3784 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3787 else if (partial > 0)
3789 /* Scalar partly in registers. */
3791 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3794 /* # bytes of start of argument
3795 that we must make space for but need not store. */
3796 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3797 int args_offset = INTVAL (args_so_far);
3800 /* Push padding now if padding above and stack grows down,
3801 or if padding below and stack grows up.
3802 But if space already allocated, this has already been done. */
3803 if (extra && args_addr == 0
3804 && where_pad != none && where_pad != stack_direction)
3805 anti_adjust_stack (GEN_INT (extra));
3807 /* If we make space by pushing it, we might as well push
3808 the real data. Otherwise, we can leave OFFSET nonzero
3809 and leave the space uninitialized. */
3813 /* Now NOT_STACK gets the number of words that we don't need to
3814 allocate on the stack. Convert OFFSET to words too. */
3815 not_stack = (partial - offset) / UNITS_PER_WORD;
3816 offset /= UNITS_PER_WORD;
3818 /* If the partial register-part of the arg counts in its stack size,
3819 skip the part of stack space corresponding to the registers.
3820 Otherwise, start copying to the beginning of the stack space,
3821 by setting SKIP to 0. */
3822 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3824 if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
3825 x = validize_mem (force_const_mem (mode, x));
3827 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3828 SUBREGs of such registers are not allowed. */
3829 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3830 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3831 x = copy_to_reg (x);
3833 /* Loop over all the words allocated on the stack for this arg. */
3834 /* We can do it by words, because any scalar bigger than a word
3835 has a size a multiple of a word. */
3836 #ifndef PUSH_ARGS_REVERSED
3837 for (i = not_stack; i < size; i++)
3839 for (i = size - 1; i >= not_stack; i--)
3841 if (i >= not_stack + offset)
3842 emit_push_insn (operand_subword_force (x, i, mode),
3843 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3845 GEN_INT (args_offset + ((i - not_stack + skip)
3847 reg_parm_stack_space, alignment_pad);
3854 /* Push padding now if padding above and stack grows down,
3855 or if padding below and stack grows up.
3856 But if space already allocated, this has already been done. */
3857 if (extra && args_addr == 0
3858 && where_pad != none && where_pad != stack_direction)
3859 anti_adjust_stack (GEN_INT (extra));
3861 #ifdef PUSH_ROUNDING
3862 if (args_addr == 0 && PUSH_ARGS)
3863 emit_single_push_insn (mode, x, type);
3867 if (CONST_INT_P (args_so_far))
3869 = memory_address (mode,
3870 plus_constant (args_addr,
3871 INTVAL (args_so_far)));
3873 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3875 dest = gen_rtx_MEM (mode, addr);
3877 /* We do *not* set_mem_attributes here, because incoming arguments
3878 may overlap with sibling call outgoing arguments and we cannot
3879 allow reordering of reads from function arguments with stores
3880 to outgoing arguments of sibling calls. We do, however, want
3881 to record the alignment of the stack slot. */
3882 /* ALIGN may well be better aligned than TYPE, e.g. due to
3883 PARM_BOUNDARY. Assume the caller isn't lying. */
3884 set_mem_align (dest, align);
3886 emit_move_insn (dest, x);
3890 /* If part should go in registers, copy that part
3891 into the appropriate registers. Do this now, at the end,
3892 since mem-to-mem copies above may do function calls. */
3893 if (partial > 0 && reg != 0)
3895 /* Handle calls that pass values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 if (GET_CODE (reg) == PARALLEL)
3898 emit_group_load (reg, x, type, -1);
3901 gcc_assert (partial % UNITS_PER_WORD == 0);
3902 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3906 if (extra && args_addr == 0 && where_pad == stack_direction)
3907 anti_adjust_stack (GEN_INT (extra));
3909 if (alignment_pad && args_addr == 0)
3910 anti_adjust_stack (alignment_pad);
3913 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3917 get_subtarget (rtx x)
3921 /* Only registers can be subtargets. */
3923 /* Don't use hard regs to avoid extending their life. */
3924 || REGNO (x) < FIRST_PSEUDO_REGISTER
3928 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3929 FIELD is a bitfield. Returns true if the optimization was successful,
3930 and there's nothing else to do. */
3933 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3934 unsigned HOST_WIDE_INT bitpos,
3935 enum machine_mode mode1, rtx str_rtx,
3938 enum machine_mode str_mode = GET_MODE (str_rtx);
3939 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3944 enum tree_code code;
3946 if (mode1 != VOIDmode
3947 || bitsize >= BITS_PER_WORD
3948 || str_bitsize > BITS_PER_WORD
3949 || TREE_SIDE_EFFECTS (to)
3950 || TREE_THIS_VOLATILE (to))
3954 if (TREE_CODE (src) != SSA_NAME)
3956 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3959 srcstmt = get_gimple_for_ssa_name (src);
3961 || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
3964 code = gimple_assign_rhs_code (srcstmt);
3966 op0 = gimple_assign_rhs1 (srcstmt);
3968 /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
3969 to find its initialization. Hopefully the initialization will
3970 be from a bitfield load. */
3971 if (TREE_CODE (op0) == SSA_NAME)
3973 gimple op0stmt = get_gimple_for_ssa_name (op0);
3975 /* We want to eventually have OP0 be the same as TO, which
3976 should be a bitfield. */
3978 || !is_gimple_assign (op0stmt)
3979 || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
3981 op0 = gimple_assign_rhs1 (op0stmt);
3984 op1 = gimple_assign_rhs2 (srcstmt);
3986 if (!operand_equal_p (to, op0, 0))
3989 if (MEM_P (str_rtx))
3991 unsigned HOST_WIDE_INT offset1;
3993 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3994 str_mode = word_mode;
3995 str_mode = get_best_mode (bitsize, bitpos,
3996 MEM_ALIGN (str_rtx), str_mode, 0);
3997 if (str_mode == VOIDmode)
3999 str_bitsize = GET_MODE_BITSIZE (str_mode);
4002 bitpos %= str_bitsize;
4003 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4004 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4006 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4009 /* If the bit field covers the whole REG/MEM, store_field
4010 will likely generate better code. */
4011 if (bitsize >= str_bitsize)
4014 /* We can't handle fields split across multiple entities. */
4015 if (bitpos + bitsize > str_bitsize)
4018 if (BYTES_BIG_ENDIAN)
4019 bitpos = str_bitsize - bitpos - bitsize;
4025 /* For now, just optimize the case of the topmost bitfield
4026 where we don't need to do any masking and also
4027 1 bit bitfields where xor can be used.
4028 We might win by one instruction for the other bitfields
4029 too if insv/extv instructions aren't used, so that
4030 can be added later. */
4031 if (bitpos + bitsize != str_bitsize
4032 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4035 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4036 value = convert_modes (str_mode,
4037 TYPE_MODE (TREE_TYPE (op1)), value,
4038 TYPE_UNSIGNED (TREE_TYPE (op1)));
4040 /* We may be accessing data outside the field, which means
4041 we can alias adjacent data. */
4042 if (MEM_P (str_rtx))
4044 str_rtx = shallow_copy_rtx (str_rtx);
4045 set_mem_alias_set (str_rtx, 0);
4046 set_mem_expr (str_rtx, 0);
4049 binop = code == PLUS_EXPR ? add_optab : sub_optab;
4050 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4052 value = expand_and (str_mode, value, const1_rtx, NULL);
4055 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4056 bitpos, NULL_RTX, 1);
4057 result = expand_binop (str_mode, binop, str_rtx,
4058 value, str_rtx, 1, OPTAB_WIDEN);
4059 if (result != str_rtx)
4060 emit_move_insn (str_rtx, result);
4065 if (TREE_CODE (op1) != INTEGER_CST)
4067 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4068 value = convert_modes (GET_MODE (str_rtx),
4069 TYPE_MODE (TREE_TYPE (op1)), value,
4070 TYPE_UNSIGNED (TREE_TYPE (op1)));
4072 /* We may be accessing data outside the field, which means
4073 we can alias adjacent data. */
4074 if (MEM_P (str_rtx))
4076 str_rtx = shallow_copy_rtx (str_rtx);
4077 set_mem_alias_set (str_rtx, 0);
4078 set_mem_expr (str_rtx, 0);
4081 binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4082 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4084 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4086 value = expand_and (GET_MODE (str_rtx), value, mask,
4089 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4090 bitpos, NULL_RTX, 1);
4091 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4092 value, str_rtx, 1, OPTAB_WIDEN);
4093 if (result != str_rtx)
4094 emit_move_insn (str_rtx, result);
4105 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4106 is true, try generating a nontemporal store. */
4109 expand_assignment (tree to, tree from, bool nontemporal)
4113 enum machine_mode mode;
4115 enum insn_code icode;
4117 /* Don't crash if the lhs of the assignment was erroneous. */
4118 if (TREE_CODE (to) == ERROR_MARK)
4120 expand_normal (from);
4124 /* Optimize away no-op moves without side-effects. */
4125 if (operand_equal_p (to, from, 0))
4128 mode = TYPE_MODE (TREE_TYPE (to));
4129 if ((TREE_CODE (to) == MEM_REF
4130 || TREE_CODE (to) == TARGET_MEM_REF)
4132 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4133 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4134 < (signed) GET_MODE_ALIGNMENT (mode))
4135 && ((icode = optab_handler (movmisalign_optab, mode))
4136 != CODE_FOR_nothing))
4138 struct expand_operand ops[2];
4139 enum machine_mode address_mode;
4142 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4143 reg = force_not_mem (reg);
4145 if (TREE_CODE (to) == MEM_REF)
4148 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4149 tree base = TREE_OPERAND (to, 0);
4150 address_mode = targetm.addr_space.address_mode (as);
4151 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4152 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4153 if (!integer_zerop (TREE_OPERAND (to, 1)))
4156 = immed_double_int_const (mem_ref_offset (to), address_mode);
4157 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4159 op0 = memory_address_addr_space (mode, op0, as);
4160 mem = gen_rtx_MEM (mode, op0);
4161 set_mem_attributes (mem, to, 0);
4162 set_mem_addr_space (mem, as);
4164 else if (TREE_CODE (to) == TARGET_MEM_REF)
4166 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4167 struct mem_address addr;
4169 get_address_description (to, &addr);
4170 op0 = addr_for_mem_ref (&addr, as, true);
4171 op0 = memory_address_addr_space (mode, op0, as);
4172 mem = gen_rtx_MEM (mode, op0);
4173 set_mem_attributes (mem, to, 0);
4174 set_mem_addr_space (mem, as);
4178 if (TREE_THIS_VOLATILE (to))
4179 MEM_VOLATILE_P (mem) = 1;
4181 create_fixed_operand (&ops[0], mem);
4182 create_input_operand (&ops[1], reg, mode);
4183 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4184 silently be omitted. */
4185 expand_insn (icode, 2, ops);
4189 /* Assignment of a structure component needs special treatment
4190 if the structure component's rtx is not simply a MEM.
4191 Assignment of an array element at a constant index, and assignment of
4192 an array element in an unaligned packed structure field, has the same
4194 if (handled_component_p (to)
4195 /* ??? We only need to handle MEM_REF here if the access is not
4196 a full access of the base object. */
4197 || (TREE_CODE (to) == MEM_REF
4198 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4199 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4201 enum machine_mode mode1;
4202 HOST_WIDE_INT bitsize, bitpos;
4209 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4210 &unsignedp, &volatilep, true);
4212 /* If we are going to use store_bit_field and extract_bit_field,
4213 make sure to_rtx will be safe for multiple use. */
4215 to_rtx = expand_normal (tem);
4217 /* If the bitfield is volatile, we want to access it in the
4218 field's mode, not the computed mode.
4219 If a MEM has VOIDmode (external with incomplete type),
4220 use BLKmode for it instead. */
4223 if (volatilep && flag_strict_volatile_bitfields > 0)
4224 to_rtx = adjust_address (to_rtx, mode1, 0);
4225 else if (GET_MODE (to_rtx) == VOIDmode)
4226 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4231 enum machine_mode address_mode;
4234 if (!MEM_P (to_rtx))
4236 /* We can get constant negative offsets into arrays with broken
4237 user code. Translate this to a trap instead of ICEing. */
4238 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4239 expand_builtin_trap ();
4240 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4243 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4245 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4246 if (GET_MODE (offset_rtx) != address_mode)
4247 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4249 /* A constant address in TO_RTX can have VOIDmode, we must not try
4250 to call force_reg for that case. Avoid that case. */
4252 && GET_MODE (to_rtx) == BLKmode
4253 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4255 && (bitpos % bitsize) == 0
4256 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4257 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4259 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4263 to_rtx = offset_address (to_rtx, offset_rtx,
4264 highest_pow2_factor_for_target (to,
4268 /* No action is needed if the target is not a memory and the field
4269 lies completely outside that target. This can occur if the source
4270 code contains an out-of-bounds access to a small array. */
4272 && GET_MODE (to_rtx) != BLKmode
4273 && (unsigned HOST_WIDE_INT) bitpos
4274 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4276 expand_normal (from);
4279 /* Handle expand_expr of a complex value returning a CONCAT. */
4280 else if (GET_CODE (to_rtx) == CONCAT)
4282 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4283 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4285 && bitsize == mode_bitsize)
4286 result = store_expr (from, to_rtx, false, nontemporal);
4287 else if (bitsize == mode_bitsize / 2
4288 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4289 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4291 else if (bitpos + bitsize <= mode_bitsize / 2)
4292 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4293 mode1, from, TREE_TYPE (tem),
4294 get_alias_set (to), nontemporal);
4295 else if (bitpos >= mode_bitsize / 2)
4296 result = store_field (XEXP (to_rtx, 1), bitsize,
4297 bitpos - mode_bitsize / 2, mode1, from,
4298 TREE_TYPE (tem), get_alias_set (to),
4300 else if (bitpos == 0 && bitsize == mode_bitsize)
4303 result = expand_normal (from);
4304 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4305 TYPE_MODE (TREE_TYPE (from)), 0);
4306 emit_move_insn (XEXP (to_rtx, 0),
4307 read_complex_part (from_rtx, false));
4308 emit_move_insn (XEXP (to_rtx, 1),
4309 read_complex_part (from_rtx, true));
4313 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4314 GET_MODE_SIZE (GET_MODE (to_rtx)),
4316 write_complex_part (temp, XEXP (to_rtx, 0), false);
4317 write_complex_part (temp, XEXP (to_rtx, 1), true);
4318 result = store_field (temp, bitsize, bitpos, mode1, from,
4319 TREE_TYPE (tem), get_alias_set (to),
4321 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4322 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4329 /* If the field is at offset zero, we could have been given the
4330 DECL_RTX of the parent struct. Don't munge it. */
4331 to_rtx = shallow_copy_rtx (to_rtx);
4333 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4335 /* Deal with volatile and readonly fields. The former is only
4336 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4338 MEM_VOLATILE_P (to_rtx) = 1;
4339 if (component_uses_parent_alias_set (to))
4340 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4343 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4347 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4348 TREE_TYPE (tem), get_alias_set (to),
4353 preserve_temp_slots (result);
4359 /* If the rhs is a function call and its value is not an aggregate,
4360 call the function before we start to compute the lhs.
4361 This is needed for correct code for cases such as
4362 val = setjmp (buf) on machines where reference to val
4363 requires loading up part of an address in a separate insn.
4365 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4366 since it might be a promoted variable where the zero- or sign- extension
4367 needs to be done. Handling this in the normal way is safe because no
4368 computation is done before the call. The same is true for SSA names. */
4369 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4370 && COMPLETE_TYPE_P (TREE_TYPE (from))
4371 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4372 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4373 && REG_P (DECL_RTL (to)))
4374 || TREE_CODE (to) == SSA_NAME))
4379 value = expand_normal (from);
4381 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4383 /* Handle calls that return values in multiple non-contiguous locations.
4384 The Irix 6 ABI has examples of this. */
4385 if (GET_CODE (to_rtx) == PARALLEL)
4386 emit_group_load (to_rtx, value, TREE_TYPE (from),
4387 int_size_in_bytes (TREE_TYPE (from)));
4388 else if (GET_MODE (to_rtx) == BLKmode)
4389 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4392 if (POINTER_TYPE_P (TREE_TYPE (to)))
4393 value = convert_memory_address_addr_space
4394 (GET_MODE (to_rtx), value,
4395 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4397 emit_move_insn (to_rtx, value);
4399 preserve_temp_slots (to_rtx);
4405 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4406 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4409 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4411 /* Don't move directly into a return register. */
4412 if (TREE_CODE (to) == RESULT_DECL
4413 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4418 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4420 if (GET_CODE (to_rtx) == PARALLEL)
4421 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4422 int_size_in_bytes (TREE_TYPE (from)));
4424 emit_move_insn (to_rtx, temp);
4426 preserve_temp_slots (to_rtx);
4432 /* In case we are returning the contents of an object which overlaps
4433 the place the value is being stored, use a safe function when copying
4434 a value through a pointer into a structure value return block. */
4435 if (TREE_CODE (to) == RESULT_DECL
4436 && TREE_CODE (from) == INDIRECT_REF
4437 && ADDR_SPACE_GENERIC_P
4438 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4439 && refs_may_alias_p (to, from)
4440 && cfun->returns_struct
4441 && !cfun->returns_pcc_struct)
4446 size = expr_size (from);
4447 from_rtx = expand_normal (from);
4449 emit_library_call (memmove_libfunc, LCT_NORMAL,
4450 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4451 XEXP (from_rtx, 0), Pmode,
4452 convert_to_mode (TYPE_MODE (sizetype),
4453 size, TYPE_UNSIGNED (sizetype)),
4454 TYPE_MODE (sizetype));
4456 preserve_temp_slots (to_rtx);
4462 /* Compute FROM and store the value in the rtx we got. */
4465 result = store_expr (from, to_rtx, 0, nontemporal);
4466 preserve_temp_slots (result);
4472 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4473 succeeded, false otherwise. */
4476 emit_storent_insn (rtx to, rtx from)
4478 struct expand_operand ops[2];
4479 enum machine_mode mode = GET_MODE (to);
4480 enum insn_code code = optab_handler (storent_optab, mode);
4482 if (code == CODE_FOR_nothing)
4485 create_fixed_operand (&ops[0], to);
4486 create_input_operand (&ops[1], from, mode);
4487 return maybe_expand_insn (code, 2, ops);
4490 /* Generate code for computing expression EXP,
4491 and storing the value into TARGET.
4493 If the mode is BLKmode then we may return TARGET itself.
4494 It turns out that in BLKmode it doesn't cause a problem.
4495 because C has no operators that could combine two different
4496 assignments into the same BLKmode object with different values
4497 with no sequence point. Will other languages need this to
4500 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4501 stack, and block moves may need to be treated specially.
4503 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4506 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4509 rtx alt_rtl = NULL_RTX;
4510 location_t loc = EXPR_LOCATION (exp);
4512 if (VOID_TYPE_P (TREE_TYPE (exp)))
4514 /* C++ can generate ?: expressions with a throw expression in one
4515 branch and an rvalue in the other. Here, we resolve attempts to
4516 store the throw expression's nonexistent result. */
4517 gcc_assert (!call_param_p);
4518 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4521 if (TREE_CODE (exp) == COMPOUND_EXPR)
4523 /* Perform first part of compound expression, then assign from second
4525 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4526 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4527 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4530 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4532 /* For conditional expression, get safe form of the target. Then
4533 test the condition, doing the appropriate assignment on either
4534 side. This avoids the creation of unnecessary temporaries.
4535 For non-BLKmode, it is more efficient not to do this. */
4537 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4539 do_pending_stack_adjust ();
4541 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4542 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4544 emit_jump_insn (gen_jump (lab2));
4547 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4554 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4555 /* If this is a scalar in a register that is stored in a wider mode
4556 than the declared mode, compute the result into its declared mode
4557 and then convert to the wider mode. Our value is the computed
4560 rtx inner_target = 0;
4562 /* We can do the conversion inside EXP, which will often result
4563 in some optimizations. Do the conversion in two steps: first
4564 change the signedness, if needed, then the extend. But don't
4565 do this if the type of EXP is a subtype of something else
4566 since then the conversion might involve more than just
4567 converting modes. */
4568 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4569 && TREE_TYPE (TREE_TYPE (exp)) == 0
4570 && GET_MODE_PRECISION (GET_MODE (target))
4571 == TYPE_PRECISION (TREE_TYPE (exp)))
4573 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4574 != SUBREG_PROMOTED_UNSIGNED_P (target))
4576 /* Some types, e.g. Fortran's logical*4, won't have a signed
4577 version, so use the mode instead. */
4579 = (signed_or_unsigned_type_for
4580 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4582 ntype = lang_hooks.types.type_for_mode
4583 (TYPE_MODE (TREE_TYPE (exp)),
4584 SUBREG_PROMOTED_UNSIGNED_P (target));
4586 exp = fold_convert_loc (loc, ntype, exp);
4589 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4590 (GET_MODE (SUBREG_REG (target)),
4591 SUBREG_PROMOTED_UNSIGNED_P (target)),
4594 inner_target = SUBREG_REG (target);
4597 temp = expand_expr (exp, inner_target, VOIDmode,
4598 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4600 /* If TEMP is a VOIDmode constant, use convert_modes to make
4601 sure that we properly convert it. */
4602 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4604 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4605 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4606 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4607 GET_MODE (target), temp,
4608 SUBREG_PROMOTED_UNSIGNED_P (target));
4611 convert_move (SUBREG_REG (target), temp,
4612 SUBREG_PROMOTED_UNSIGNED_P (target));
4616 else if ((TREE_CODE (exp) == STRING_CST
4617 || (TREE_CODE (exp) == MEM_REF
4618 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4619 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4621 && integer_zerop (TREE_OPERAND (exp, 1))))
4622 && !nontemporal && !call_param_p
4625 /* Optimize initialization of an array with a STRING_CST. */
4626 HOST_WIDE_INT exp_len, str_copy_len;
4628 tree str = TREE_CODE (exp) == STRING_CST
4629 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4631 exp_len = int_expr_size (exp);
4635 if (TREE_STRING_LENGTH (str) <= 0)
4638 str_copy_len = strlen (TREE_STRING_POINTER (str));
4639 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4642 str_copy_len = TREE_STRING_LENGTH (str);
4643 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4644 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4646 str_copy_len += STORE_MAX_PIECES - 1;
4647 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4649 str_copy_len = MIN (str_copy_len, exp_len);
4650 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4651 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4652 MEM_ALIGN (target), false))
4657 dest_mem = store_by_pieces (dest_mem,
4658 str_copy_len, builtin_strncpy_read_str,
4660 TREE_STRING_POINTER (str)),
4661 MEM_ALIGN (target), false,
4662 exp_len > str_copy_len ? 1 : 0);
4663 if (exp_len > str_copy_len)
4664 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4665 GEN_INT (exp_len - str_copy_len),
4674 /* If we want to use a nontemporal store, force the value to
4676 tmp_target = nontemporal ? NULL_RTX : target;
4677 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4679 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4683 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4684 the same as that of TARGET, adjust the constant. This is needed, for
4685 example, in case it is a CONST_DOUBLE and we want only a word-sized
4687 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4688 && TREE_CODE (exp) != ERROR_MARK
4689 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4690 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4691 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4693 /* If value was not generated in the target, store it there.
4694 Convert the value to TARGET's type first if necessary and emit the
4695 pending incrementations that have been queued when expanding EXP.
4696 Note that we cannot emit the whole queue blindly because this will
4697 effectively disable the POST_INC optimization later.
4699 If TEMP and TARGET compare equal according to rtx_equal_p, but
4700 one or both of them are volatile memory refs, we have to distinguish
4702 - expand_expr has used TARGET. In this case, we must not generate
4703 another copy. This can be detected by TARGET being equal according
4705 - expand_expr has not used TARGET - that means that the source just
4706 happens to have the same RTX form. Since temp will have been created
4707 by expand_expr, it will compare unequal according to == .
4708 We must generate a copy in this case, to reach the correct number
4709 of volatile memory references. */
4711 if ((! rtx_equal_p (temp, target)
4712 || (temp != target && (side_effects_p (temp)
4713 || side_effects_p (target))))
4714 && TREE_CODE (exp) != ERROR_MARK
4715 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4716 but TARGET is not valid memory reference, TEMP will differ
4717 from TARGET although it is really the same location. */
4719 && rtx_equal_p (alt_rtl, target)
4720 && !side_effects_p (alt_rtl)
4721 && !side_effects_p (target))
4722 /* If there's nothing to copy, don't bother. Don't call
4723 expr_size unless necessary, because some front-ends (C++)
4724 expr_size-hook must not be given objects that are not
4725 supposed to be bit-copied or bit-initialized. */
4726 && expr_size (exp) != const0_rtx)
4728 if (GET_MODE (temp) != GET_MODE (target)
4729 && GET_MODE (temp) != VOIDmode)
4731 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4732 if (GET_MODE (target) == BLKmode
4733 && GET_MODE (temp) == BLKmode)
4734 emit_block_move (target, temp, expr_size (exp),
4736 ? BLOCK_OP_CALL_PARM
4737 : BLOCK_OP_NORMAL));
4738 else if (GET_MODE (target) == BLKmode)
4739 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4740 0, GET_MODE (temp), temp);
4742 convert_move (target, temp, unsignedp);
4745 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4747 /* Handle copying a string constant into an array. The string
4748 constant may be shorter than the array. So copy just the string's
4749 actual length, and clear the rest. First get the size of the data
4750 type of the string, which is actually the size of the target. */
4751 rtx size = expr_size (exp);
4753 if (CONST_INT_P (size)
4754 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4755 emit_block_move (target, temp, size,
4757 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4760 enum machine_mode pointer_mode
4761 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4762 enum machine_mode address_mode
4763 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4765 /* Compute the size of the data to copy from the string. */
4767 = size_binop_loc (loc, MIN_EXPR,
4768 make_tree (sizetype, size),
4769 size_int (TREE_STRING_LENGTH (exp)));
4771 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4773 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4776 /* Copy that much. */
4777 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4778 TYPE_UNSIGNED (sizetype));
4779 emit_block_move (target, temp, copy_size_rtx,
4781 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4783 /* Figure out how much is left in TARGET that we have to clear.
4784 Do all calculations in pointer_mode. */
4785 if (CONST_INT_P (copy_size_rtx))
4787 size = plus_constant (size, -INTVAL (copy_size_rtx));
4788 target = adjust_address (target, BLKmode,
4789 INTVAL (copy_size_rtx));
4793 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4794 copy_size_rtx, NULL_RTX, 0,
4797 if (GET_MODE (copy_size_rtx) != address_mode)
4798 copy_size_rtx = convert_to_mode (address_mode,
4800 TYPE_UNSIGNED (sizetype));
4802 target = offset_address (target, copy_size_rtx,
4803 highest_pow2_factor (copy_size));
4804 label = gen_label_rtx ();
4805 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4806 GET_MODE (size), 0, label);
4809 if (size != const0_rtx)
4810 clear_storage (target, size, BLOCK_OP_NORMAL);
4816 /* Handle calls that return values in multiple non-contiguous locations.
4817 The Irix 6 ABI has examples of this. */
4818 else if (GET_CODE (target) == PARALLEL)
4819 emit_group_load (target, temp, TREE_TYPE (exp),
4820 int_size_in_bytes (TREE_TYPE (exp)));
4821 else if (GET_MODE (temp) == BLKmode)
4822 emit_block_move (target, temp, expr_size (exp),
4824 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4825 else if (nontemporal
4826 && emit_storent_insn (target, temp))
4827 /* If we managed to emit a nontemporal store, there is nothing else to
4832 temp = force_operand (temp, target);
4834 emit_move_insn (target, temp);
4841 /* Helper for categorize_ctor_elements. Identical interface. */
4844 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4845 HOST_WIDE_INT *p_elt_count,
4848 unsigned HOST_WIDE_INT idx;
4849 HOST_WIDE_INT nz_elts, elt_count;
4850 tree value, purpose;
4852 /* Whether CTOR is a valid constant initializer, in accordance with what
4853 initializer_constant_valid_p does. If inferred from the constructor
4854 elements, true until proven otherwise. */
4855 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4856 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4861 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4863 HOST_WIDE_INT mult = 1;
4865 if (TREE_CODE (purpose) == RANGE_EXPR)
4867 tree lo_index = TREE_OPERAND (purpose, 0);
4868 tree hi_index = TREE_OPERAND (purpose, 1);
4870 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4871 mult = (tree_low_cst (hi_index, 1)
4872 - tree_low_cst (lo_index, 1) + 1);
4875 switch (TREE_CODE (value))
4879 HOST_WIDE_INT nz = 0, ic = 0;
4882 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4884 nz_elts += mult * nz;
4885 elt_count += mult * ic;
4887 if (const_from_elts_p && const_p)
4888 const_p = const_elt_p;
4895 if (!initializer_zerop (value))
4901 nz_elts += mult * TREE_STRING_LENGTH (value);
4902 elt_count += mult * TREE_STRING_LENGTH (value);
4906 if (!initializer_zerop (TREE_REALPART (value)))
4908 if (!initializer_zerop (TREE_IMAGPART (value)))
4916 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4918 if (!initializer_zerop (TREE_VALUE (v)))
4927 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4930 nz_elts += mult * tc;
4931 elt_count += mult * tc;
4933 if (const_from_elts_p && const_p)
4934 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4942 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4943 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4946 bool clear_this = true;
4948 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4950 /* We don't expect more than one element of the union to be
4951 initialized. Not sure what we should do otherwise... */
4952 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4955 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4956 CONSTRUCTOR_ELTS (ctor),
4959 /* ??? We could look at each element of the union, and find the
4960 largest element. Which would avoid comparing the size of the
4961 initialized element against any tail padding in the union.
4962 Doesn't seem worth the effort... */
4963 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4964 TYPE_SIZE (init_sub_type)) == 1)
4966 /* And now we have to find out if the element itself is fully
4967 constructed. E.g. for union { struct { int a, b; } s; } u
4968 = { .s = { .a = 1 } }. */
4969 if (elt_count == count_type_elements (init_sub_type, false))
4974 *p_must_clear = clear_this;
4977 *p_nz_elts += nz_elts;
4978 *p_elt_count += elt_count;
4983 /* Examine CTOR to discover:
4984 * how many scalar fields are set to nonzero values,
4985 and place it in *P_NZ_ELTS;
4986 * how many scalar fields in total are in CTOR,
4987 and place it in *P_ELT_COUNT.
4988 * if a type is a union, and the initializer from the constructor
4989 is not the largest element in the union, then set *p_must_clear.
4991 Return whether or not CTOR is a valid static constant initializer, the same
4992 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4995 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4996 HOST_WIDE_INT *p_elt_count,
5001 *p_must_clear = false;
5004 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5007 /* Count the number of scalars in TYPE. Return -1 on overflow or
5008 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5009 array member at the end of the structure. */
5012 count_type_elements (const_tree type, bool allow_flexarr)
5014 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5015 switch (TREE_CODE (type))
5019 tree telts = array_type_nelts (type);
5020 if (telts && host_integerp (telts, 1))
5022 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5023 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5026 else if (max / n > m)
5034 HOST_WIDE_INT n = 0, t;
5037 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5038 if (TREE_CODE (f) == FIELD_DECL)
5040 t = count_type_elements (TREE_TYPE (f), false);
5043 /* Check for structures with flexible array member. */
5044 tree tf = TREE_TYPE (f);
5046 && DECL_CHAIN (f) == NULL
5047 && TREE_CODE (tf) == ARRAY_TYPE
5049 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5050 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5051 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5052 && int_size_in_bytes (type) >= 0)
5064 case QUAL_UNION_TYPE:
5071 return TYPE_VECTOR_SUBPARTS (type);
5075 case FIXED_POINT_TYPE:
5080 case REFERENCE_TYPE:
5095 /* Return 1 if EXP contains mostly (3/4) zeros. */
5098 mostly_zeros_p (const_tree exp)
5100 if (TREE_CODE (exp) == CONSTRUCTOR)
5103 HOST_WIDE_INT nz_elts, count, elts;
5106 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5110 elts = count_type_elements (TREE_TYPE (exp), false);
5112 return nz_elts < elts / 4;
5115 return initializer_zerop (exp);
5118 /* Return 1 if EXP contains all zeros. */
5121 all_zeros_p (const_tree exp)
5123 if (TREE_CODE (exp) == CONSTRUCTOR)
5126 HOST_WIDE_INT nz_elts, count;
5129 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5130 return nz_elts == 0;
5133 return initializer_zerop (exp);
5136 /* Helper function for store_constructor.
5137 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5138 TYPE is the type of the CONSTRUCTOR, not the element type.
5139 CLEARED is as for store_constructor.
5140 ALIAS_SET is the alias set to use for any stores.
5142 This provides a recursive shortcut back to store_constructor when it isn't
5143 necessary to go through store_field. This is so that we can pass through
5144 the cleared field to let store_constructor know that we may not have to
5145 clear a substructure if the outer structure has already been cleared. */
5148 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5149 HOST_WIDE_INT bitpos, enum machine_mode mode,
5150 tree exp, tree type, int cleared,
5151 alias_set_type alias_set)
5153 if (TREE_CODE (exp) == CONSTRUCTOR
5154 /* We can only call store_constructor recursively if the size and
5155 bit position are on a byte boundary. */
5156 && bitpos % BITS_PER_UNIT == 0
5157 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5158 /* If we have a nonzero bitpos for a register target, then we just
5159 let store_field do the bitfield handling. This is unlikely to
5160 generate unnecessary clear instructions anyways. */
5161 && (bitpos == 0 || MEM_P (target)))
5165 = adjust_address (target,
5166 GET_MODE (target) == BLKmode
5168 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5169 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5172 /* Update the alias set, if required. */
5173 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5174 && MEM_ALIAS_SET (target) != 0)
5176 target = copy_rtx (target);
5177 set_mem_alias_set (target, alias_set);
5180 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5183 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5186 /* Store the value of constructor EXP into the rtx TARGET.
5187 TARGET is either a REG or a MEM; we know it cannot conflict, since
5188 safe_from_p has been called.
5189 CLEARED is true if TARGET is known to have been zero'd.
5190 SIZE is the number of bytes of TARGET we are allowed to modify: this
5191 may not be the same as the size of EXP if we are assigning to a field
5192 which has been packed to exclude padding bits. */
5195 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5197 tree type = TREE_TYPE (exp);
5198 #ifdef WORD_REGISTER_OPERATIONS
5199 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5202 switch (TREE_CODE (type))
5206 case QUAL_UNION_TYPE:
5208 unsigned HOST_WIDE_INT idx;
5211 /* If size is zero or the target is already cleared, do nothing. */
5212 if (size == 0 || cleared)
5214 /* We either clear the aggregate or indicate the value is dead. */
5215 else if ((TREE_CODE (type) == UNION_TYPE
5216 || TREE_CODE (type) == QUAL_UNION_TYPE)
5217 && ! CONSTRUCTOR_ELTS (exp))
5218 /* If the constructor is empty, clear the union. */
5220 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5224 /* If we are building a static constructor into a register,
5225 set the initial value as zero so we can fold the value into
5226 a constant. But if more than one register is involved,
5227 this probably loses. */
5228 else if (REG_P (target) && TREE_STATIC (exp)
5229 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5231 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5235 /* If the constructor has fewer fields than the structure or
5236 if we are initializing the structure to mostly zeros, clear
5237 the whole structure first. Don't do this if TARGET is a
5238 register whose mode size isn't equal to SIZE since
5239 clear_storage can't handle this case. */
5241 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5242 != fields_length (type))
5243 || mostly_zeros_p (exp))
5245 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5248 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5252 if (REG_P (target) && !cleared)
5253 emit_clobber (target);
5255 /* Store each element of the constructor into the
5256 corresponding field of TARGET. */
5257 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5259 enum machine_mode mode;
5260 HOST_WIDE_INT bitsize;
5261 HOST_WIDE_INT bitpos = 0;
5263 rtx to_rtx = target;
5265 /* Just ignore missing fields. We cleared the whole
5266 structure, above, if any fields are missing. */
5270 if (cleared && initializer_zerop (value))
5273 if (host_integerp (DECL_SIZE (field), 1))
5274 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5278 mode = DECL_MODE (field);
5279 if (DECL_BIT_FIELD (field))
5282 offset = DECL_FIELD_OFFSET (field);
5283 if (host_integerp (offset, 0)
5284 && host_integerp (bit_position (field), 0))
5286 bitpos = int_bit_position (field);
5290 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5294 enum machine_mode address_mode;
5298 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5299 make_tree (TREE_TYPE (exp),
5302 offset_rtx = expand_normal (offset);
5303 gcc_assert (MEM_P (to_rtx));
5306 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5307 if (GET_MODE (offset_rtx) != address_mode)
5308 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5310 to_rtx = offset_address (to_rtx, offset_rtx,
5311 highest_pow2_factor (offset));
5314 #ifdef WORD_REGISTER_OPERATIONS
5315 /* If this initializes a field that is smaller than a
5316 word, at the start of a word, try to widen it to a full
5317 word. This special case allows us to output C++ member
5318 function initializations in a form that the optimizers
5321 && bitsize < BITS_PER_WORD
5322 && bitpos % BITS_PER_WORD == 0
5323 && GET_MODE_CLASS (mode) == MODE_INT
5324 && TREE_CODE (value) == INTEGER_CST
5326 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5328 tree type = TREE_TYPE (value);
5330 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5332 type = lang_hooks.types.type_for_size
5333 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5334 value = fold_convert (type, value);
5337 if (BYTES_BIG_ENDIAN)
5339 = fold_build2 (LSHIFT_EXPR, type, value,
5340 build_int_cst (type,
5341 BITS_PER_WORD - bitsize));
5342 bitsize = BITS_PER_WORD;
5347 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5348 && DECL_NONADDRESSABLE_P (field))
5350 to_rtx = copy_rtx (to_rtx);
5351 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5354 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5355 value, type, cleared,
5356 get_alias_set (TREE_TYPE (field)));
5363 unsigned HOST_WIDE_INT i;
5366 tree elttype = TREE_TYPE (type);
5368 HOST_WIDE_INT minelt = 0;
5369 HOST_WIDE_INT maxelt = 0;
5371 domain = TYPE_DOMAIN (type);
5372 const_bounds_p = (TYPE_MIN_VALUE (domain)
5373 && TYPE_MAX_VALUE (domain)
5374 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5375 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5377 /* If we have constant bounds for the range of the type, get them. */
5380 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5381 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5384 /* If the constructor has fewer elements than the array, clear
5385 the whole array first. Similarly if this is static
5386 constructor of a non-BLKmode object. */
5389 else if (REG_P (target) && TREE_STATIC (exp))
5393 unsigned HOST_WIDE_INT idx;
5395 HOST_WIDE_INT count = 0, zero_count = 0;
5396 need_to_clear = ! const_bounds_p;
5398 /* This loop is a more accurate version of the loop in
5399 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5400 is also needed to check for missing elements. */
5401 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5403 HOST_WIDE_INT this_node_count;
5408 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5410 tree lo_index = TREE_OPERAND (index, 0);
5411 tree hi_index = TREE_OPERAND (index, 1);
5413 if (! host_integerp (lo_index, 1)
5414 || ! host_integerp (hi_index, 1))
5420 this_node_count = (tree_low_cst (hi_index, 1)
5421 - tree_low_cst (lo_index, 1) + 1);
5424 this_node_count = 1;
5426 count += this_node_count;
5427 if (mostly_zeros_p (value))
5428 zero_count += this_node_count;
5431 /* Clear the entire array first if there are any missing
5432 elements, or if the incidence of zero elements is >=
5435 && (count < maxelt - minelt + 1
5436 || 4 * zero_count >= 3 * count))
5440 if (need_to_clear && size > 0)
5443 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5445 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5449 if (!cleared && REG_P (target))
5450 /* Inform later passes that the old value is dead. */
5451 emit_clobber (target);
5453 /* Store each element of the constructor into the
5454 corresponding element of TARGET, determined by counting the
5456 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5458 enum machine_mode mode;
5459 HOST_WIDE_INT bitsize;
5460 HOST_WIDE_INT bitpos;
5461 rtx xtarget = target;
5463 if (cleared && initializer_zerop (value))
5466 mode = TYPE_MODE (elttype);
5467 if (mode == BLKmode)
5468 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5469 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5472 bitsize = GET_MODE_BITSIZE (mode);
5474 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5476 tree lo_index = TREE_OPERAND (index, 0);
5477 tree hi_index = TREE_OPERAND (index, 1);
5478 rtx index_r, pos_rtx;
5479 HOST_WIDE_INT lo, hi, count;
5482 /* If the range is constant and "small", unroll the loop. */
5484 && host_integerp (lo_index, 0)
5485 && host_integerp (hi_index, 0)
5486 && (lo = tree_low_cst (lo_index, 0),
5487 hi = tree_low_cst (hi_index, 0),
5488 count = hi - lo + 1,
5491 || (host_integerp (TYPE_SIZE (elttype), 1)
5492 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5495 lo -= minelt; hi -= minelt;
5496 for (; lo <= hi; lo++)
5498 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5501 && !MEM_KEEP_ALIAS_SET_P (target)
5502 && TREE_CODE (type) == ARRAY_TYPE
5503 && TYPE_NONALIASED_COMPONENT (type))
5505 target = copy_rtx (target);
5506 MEM_KEEP_ALIAS_SET_P (target) = 1;
5509 store_constructor_field
5510 (target, bitsize, bitpos, mode, value, type, cleared,
5511 get_alias_set (elttype));
5516 rtx loop_start = gen_label_rtx ();
5517 rtx loop_end = gen_label_rtx ();
5520 expand_normal (hi_index);
5522 index = build_decl (EXPR_LOCATION (exp),
5523 VAR_DECL, NULL_TREE, domain);
5524 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5525 SET_DECL_RTL (index, index_r);
5526 store_expr (lo_index, index_r, 0, false);
5528 /* Build the head of the loop. */
5529 do_pending_stack_adjust ();
5530 emit_label (loop_start);
5532 /* Assign value to element index. */
5534 fold_convert (ssizetype,
5535 fold_build2 (MINUS_EXPR,
5538 TYPE_MIN_VALUE (domain)));
5541 size_binop (MULT_EXPR, position,
5542 fold_convert (ssizetype,
5543 TYPE_SIZE_UNIT (elttype)));
5545 pos_rtx = expand_normal (position);
5546 xtarget = offset_address (target, pos_rtx,
5547 highest_pow2_factor (position));
5548 xtarget = adjust_address (xtarget, mode, 0);
5549 if (TREE_CODE (value) == CONSTRUCTOR)
5550 store_constructor (value, xtarget, cleared,
5551 bitsize / BITS_PER_UNIT);
5553 store_expr (value, xtarget, 0, false);
5555 /* Generate a conditional jump to exit the loop. */
5556 exit_cond = build2 (LT_EXPR, integer_type_node,
5558 jumpif (exit_cond, loop_end, -1);
5560 /* Update the loop counter, and jump to the head of
5562 expand_assignment (index,
5563 build2 (PLUS_EXPR, TREE_TYPE (index),
5564 index, integer_one_node),
5567 emit_jump (loop_start);
5569 /* Build the end of the loop. */
5570 emit_label (loop_end);
5573 else if ((index != 0 && ! host_integerp (index, 0))
5574 || ! host_integerp (TYPE_SIZE (elttype), 1))
5579 index = ssize_int (1);
5582 index = fold_convert (ssizetype,
5583 fold_build2 (MINUS_EXPR,
5586 TYPE_MIN_VALUE (domain)));
5589 size_binop (MULT_EXPR, index,
5590 fold_convert (ssizetype,
5591 TYPE_SIZE_UNIT (elttype)));
5592 xtarget = offset_address (target,
5593 expand_normal (position),
5594 highest_pow2_factor (position));
5595 xtarget = adjust_address (xtarget, mode, 0);
5596 store_expr (value, xtarget, 0, false);
5601 bitpos = ((tree_low_cst (index, 0) - minelt)
5602 * tree_low_cst (TYPE_SIZE (elttype), 1));
5604 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5606 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5607 && TREE_CODE (type) == ARRAY_TYPE
5608 && TYPE_NONALIASED_COMPONENT (type))
5610 target = copy_rtx (target);
5611 MEM_KEEP_ALIAS_SET_P (target) = 1;
5613 store_constructor_field (target, bitsize, bitpos, mode, value,
5614 type, cleared, get_alias_set (elttype));
5622 unsigned HOST_WIDE_INT idx;
5623 constructor_elt *ce;
5627 tree elttype = TREE_TYPE (type);
5628 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5629 enum machine_mode eltmode = TYPE_MODE (elttype);
5630 HOST_WIDE_INT bitsize;
5631 HOST_WIDE_INT bitpos;
5632 rtvec vector = NULL;
5634 alias_set_type alias;
5636 gcc_assert (eltmode != BLKmode);
5638 n_elts = TYPE_VECTOR_SUBPARTS (type);
5639 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5641 enum machine_mode mode = GET_MODE (target);
5643 icode = (int) optab_handler (vec_init_optab, mode);
5644 if (icode != CODE_FOR_nothing)
5648 vector = rtvec_alloc (n_elts);
5649 for (i = 0; i < n_elts; i++)
5650 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5654 /* If the constructor has fewer elements than the vector,
5655 clear the whole array first. Similarly if this is static
5656 constructor of a non-BLKmode object. */
5659 else if (REG_P (target) && TREE_STATIC (exp))
5663 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5666 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5668 int n_elts_here = tree_low_cst
5669 (int_const_binop (TRUNC_DIV_EXPR,
5670 TYPE_SIZE (TREE_TYPE (value)),
5671 TYPE_SIZE (elttype)), 1);
5673 count += n_elts_here;
5674 if (mostly_zeros_p (value))
5675 zero_count += n_elts_here;
5678 /* Clear the entire vector first if there are any missing elements,
5679 or if the incidence of zero elements is >= 75%. */
5680 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5683 if (need_to_clear && size > 0 && !vector)
5686 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5688 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5692 /* Inform later passes that the old value is dead. */
5693 if (!cleared && !vector && REG_P (target))
5694 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5697 alias = MEM_ALIAS_SET (target);
5699 alias = get_alias_set (elttype);
5701 /* Store each element of the constructor into the corresponding
5702 element of TARGET, determined by counting the elements. */
5703 for (idx = 0, i = 0;
5704 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5705 idx++, i += bitsize / elt_size)
5707 HOST_WIDE_INT eltpos;
5708 tree value = ce->value;
5710 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5711 if (cleared && initializer_zerop (value))
5715 eltpos = tree_low_cst (ce->index, 1);
5721 /* Vector CONSTRUCTORs should only be built from smaller
5722 vectors in the case of BLKmode vectors. */
5723 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5724 RTVEC_ELT (vector, eltpos)
5725 = expand_normal (value);
5729 enum machine_mode value_mode =
5730 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5731 ? TYPE_MODE (TREE_TYPE (value))
5733 bitpos = eltpos * elt_size;
5734 store_constructor_field (target, bitsize, bitpos,
5735 value_mode, value, type,
5741 emit_insn (GEN_FCN (icode)
5743 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5752 /* Store the value of EXP (an expression tree)
5753 into a subfield of TARGET which has mode MODE and occupies
5754 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5755 If MODE is VOIDmode, it means that we are storing into a bit-field.
5757 Always return const0_rtx unless we have something particular to
5760 TYPE is the type of the underlying object,
5762 ALIAS_SET is the alias set for the destination. This value will
5763 (in general) be different from that for TARGET, since TARGET is a
5764 reference to the containing structure.
5766 If NONTEMPORAL is true, try generating a nontemporal store. */
5769 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5770 enum machine_mode mode, tree exp, tree type,
5771 alias_set_type alias_set, bool nontemporal)
5773 if (TREE_CODE (exp) == ERROR_MARK)
5776 /* If we have nothing to store, do nothing unless the expression has
5779 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5781 /* If we are storing into an unaligned field of an aligned union that is
5782 in a register, we may have the mode of TARGET being an integer mode but
5783 MODE == BLKmode. In that case, get an aligned object whose size and
5784 alignment are the same as TARGET and store TARGET into it (we can avoid
5785 the store if the field being stored is the entire width of TARGET). Then
5786 call ourselves recursively to store the field into a BLKmode version of
5787 that object. Finally, load from the object into TARGET. This is not
5788 very efficient in general, but should only be slightly more expensive
5789 than the otherwise-required unaligned accesses. Perhaps this can be
5790 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5791 twice, once with emit_move_insn and once via store_field. */
5794 && (REG_P (target) || GET_CODE (target) == SUBREG))
5796 rtx object = assign_temp (type, 0, 1, 1);
5797 rtx blk_object = adjust_address (object, BLKmode, 0);
5799 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5800 emit_move_insn (object, target);
5802 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5805 emit_move_insn (target, object);
5807 /* We want to return the BLKmode version of the data. */
5811 if (GET_CODE (target) == CONCAT)
5813 /* We're storing into a struct containing a single __complex. */
5815 gcc_assert (!bitpos);
5816 return store_expr (exp, target, 0, nontemporal);
5819 /* If the structure is in a register or if the component
5820 is a bit field, we cannot use addressing to access it.
5821 Use bit-field techniques or SUBREG to store in it. */
5823 if (mode == VOIDmode
5824 || (mode != BLKmode && ! direct_store[(int) mode]
5825 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5826 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5828 || GET_CODE (target) == SUBREG
5829 /* If the field isn't aligned enough to store as an ordinary memref,
5830 store it as a bit field. */
5832 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5833 || bitpos % GET_MODE_ALIGNMENT (mode))
5834 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5835 || (bitpos % BITS_PER_UNIT != 0)))
5836 /* If the RHS and field are a constant size and the size of the
5837 RHS isn't the same size as the bitfield, we must use bitfield
5840 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5841 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5842 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5843 decl we must use bitfield operations. */
5845 && TREE_CODE (exp) == MEM_REF
5846 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5847 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5848 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5849 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5854 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5855 implies a mask operation. If the precision is the same size as
5856 the field we're storing into, that mask is redundant. This is
5857 particularly common with bit field assignments generated by the
5859 nop_def = get_def_for_expr (exp, NOP_EXPR);
5862 tree type = TREE_TYPE (exp);
5863 if (INTEGRAL_TYPE_P (type)
5864 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5865 && bitsize == TYPE_PRECISION (type))
5867 tree op = gimple_assign_rhs1 (nop_def);
5868 type = TREE_TYPE (op);
5869 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5874 temp = expand_normal (exp);
5876 /* If BITSIZE is narrower than the size of the type of EXP
5877 we will be narrowing TEMP. Normally, what's wanted are the
5878 low-order bits. However, if EXP's type is a record and this is
5879 big-endian machine, we want the upper BITSIZE bits. */
5880 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5881 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5882 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5883 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5884 GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
5887 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5889 if (mode != VOIDmode && mode != BLKmode
5890 && mode != TYPE_MODE (TREE_TYPE (exp)))
5891 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5893 /* If the modes of TEMP and TARGET are both BLKmode, both
5894 must be in memory and BITPOS must be aligned on a byte
5895 boundary. If so, we simply do a block copy. Likewise
5896 for a BLKmode-like TARGET. */
5897 if (GET_MODE (temp) == BLKmode
5898 && (GET_MODE (target) == BLKmode
5900 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5901 && (bitpos % BITS_PER_UNIT) == 0
5902 && (bitsize % BITS_PER_UNIT) == 0)))
5904 gcc_assert (MEM_P (target) && MEM_P (temp)
5905 && (bitpos % BITS_PER_UNIT) == 0);
5907 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5908 emit_block_move (target, temp,
5909 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5916 /* Store the value in the bitfield. */
5917 store_bit_field (target, bitsize, bitpos, mode, temp);
5923 /* Now build a reference to just the desired component. */
5924 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5926 if (to_rtx == target)
5927 to_rtx = copy_rtx (to_rtx);
5929 if (!MEM_SCALAR_P (to_rtx))
5930 MEM_IN_STRUCT_P (to_rtx) = 1;
5931 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5932 set_mem_alias_set (to_rtx, alias_set);
5934 return store_expr (exp, to_rtx, 0, nontemporal);
5938 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5939 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5940 codes and find the ultimate containing object, which we return.
5942 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5943 bit position, and *PUNSIGNEDP to the signedness of the field.
5944 If the position of the field is variable, we store a tree
5945 giving the variable offset (in units) in *POFFSET.
5946 This offset is in addition to the bit position.
5947 If the position is not variable, we store 0 in *POFFSET.
5949 If any of the extraction expressions is volatile,
5950 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5952 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5953 Otherwise, it is a mode that can be used to access the field.
5955 If the field describes a variable-sized object, *PMODE is set to
5956 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5957 this case, but the address of the object can be found.
5959 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5960 look through nodes that serve as markers of a greater alignment than
5961 the one that can be deduced from the expression. These nodes make it
5962 possible for front-ends to prevent temporaries from being created by
5963 the middle-end on alignment considerations. For that purpose, the
5964 normal operating mode at high-level is to always pass FALSE so that
5965 the ultimate containing object is really returned; moreover, the
5966 associated predicate handled_component_p will always return TRUE
5967 on these nodes, thus indicating that they are essentially handled
5968 by get_inner_reference. TRUE should only be passed when the caller
5969 is scanning the expression in order to build another representation
5970 and specifically knows how to handle these nodes; as such, this is
5971 the normal operating mode in the RTL expanders. */
5974 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5975 HOST_WIDE_INT *pbitpos, tree *poffset,
5976 enum machine_mode *pmode, int *punsignedp,
5977 int *pvolatilep, bool keep_aligning)
5980 enum machine_mode mode = VOIDmode;
5981 bool blkmode_bitfield = false;
5982 tree offset = size_zero_node;
5983 double_int bit_offset = double_int_zero;
5985 /* First get the mode, signedness, and size. We do this from just the
5986 outermost expression. */
5988 if (TREE_CODE (exp) == COMPONENT_REF)
5990 tree field = TREE_OPERAND (exp, 1);
5991 size_tree = DECL_SIZE (field);
5992 if (!DECL_BIT_FIELD (field))
5993 mode = DECL_MODE (field);
5994 else if (DECL_MODE (field) == BLKmode)
5995 blkmode_bitfield = true;
5996 else if (TREE_THIS_VOLATILE (exp)
5997 && flag_strict_volatile_bitfields > 0)
5998 /* Volatile bitfields should be accessed in the mode of the
5999 field's type, not the mode computed based on the bit
6001 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6003 *punsignedp = DECL_UNSIGNED (field);
6005 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6007 size_tree = TREE_OPERAND (exp, 1);
6008 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6009 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6011 /* For vector types, with the correct size of access, use the mode of
6013 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6014 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6015 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6016 mode = TYPE_MODE (TREE_TYPE (exp));
6020 mode = TYPE_MODE (TREE_TYPE (exp));
6021 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6023 if (mode == BLKmode)
6024 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6026 *pbitsize = GET_MODE_BITSIZE (mode);
6031 if (! host_integerp (size_tree, 1))
6032 mode = BLKmode, *pbitsize = -1;
6034 *pbitsize = tree_low_cst (size_tree, 1);
6037 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6038 and find the ultimate containing object. */
6041 switch (TREE_CODE (exp))
6045 = double_int_add (bit_offset,
6046 tree_to_double_int (TREE_OPERAND (exp, 2)));
6051 tree field = TREE_OPERAND (exp, 1);
6052 tree this_offset = component_ref_field_offset (exp);
6054 /* If this field hasn't been filled in yet, don't go past it.
6055 This should only happen when folding expressions made during
6056 type construction. */
6057 if (this_offset == 0)
6060 offset = size_binop (PLUS_EXPR, offset, this_offset);
6061 bit_offset = double_int_add (bit_offset,
6063 (DECL_FIELD_BIT_OFFSET (field)));
6065 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6070 case ARRAY_RANGE_REF:
6072 tree index = TREE_OPERAND (exp, 1);
6073 tree low_bound = array_ref_low_bound (exp);
6074 tree unit_size = array_ref_element_size (exp);
6076 /* We assume all arrays have sizes that are a multiple of a byte.
6077 First subtract the lower bound, if any, in the type of the
6078 index, then convert to sizetype and multiply by the size of
6079 the array element. */
6080 if (! integer_zerop (low_bound))
6081 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6084 offset = size_binop (PLUS_EXPR, offset,
6085 size_binop (MULT_EXPR,
6086 fold_convert (sizetype, index),
6095 bit_offset = double_int_add (bit_offset,
6096 uhwi_to_double_int (*pbitsize));
6099 case VIEW_CONVERT_EXPR:
6100 if (keep_aligning && STRICT_ALIGNMENT
6101 && (TYPE_ALIGN (TREE_TYPE (exp))
6102 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6103 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6104 < BIGGEST_ALIGNMENT)
6105 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6106 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6111 /* Hand back the decl for MEM[&decl, off]. */
6112 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6114 tree off = TREE_OPERAND (exp, 1);
6115 if (!integer_zerop (off))
6117 double_int boff, coff = mem_ref_offset (exp);
6118 boff = double_int_lshift (coff,
6120 ? 3 : exact_log2 (BITS_PER_UNIT),
6121 HOST_BITS_PER_DOUBLE_INT, true);
6122 bit_offset = double_int_add (bit_offset, boff);
6124 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6132 /* If any reference in the chain is volatile, the effect is volatile. */
6133 if (TREE_THIS_VOLATILE (exp))
6136 exp = TREE_OPERAND (exp, 0);
6140 /* If OFFSET is constant, see if we can return the whole thing as a
6141 constant bit position. Make sure to handle overflow during
6143 if (host_integerp (offset, 0))
6145 double_int tem = double_int_lshift (tree_to_double_int (offset),
6147 ? 3 : exact_log2 (BITS_PER_UNIT),
6148 HOST_BITS_PER_DOUBLE_INT, true);
6149 tem = double_int_add (tem, bit_offset);
6150 if (double_int_fits_in_shwi_p (tem))
6152 *pbitpos = double_int_to_shwi (tem);
6153 *poffset = offset = NULL_TREE;
6157 /* Otherwise, split it up. */
6160 *pbitpos = double_int_to_shwi (bit_offset);
6164 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6165 if (mode == VOIDmode
6167 && (*pbitpos % BITS_PER_UNIT) == 0
6168 && (*pbitsize % BITS_PER_UNIT) == 0)
6176 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6177 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6178 EXP is marked as PACKED. */
6181 contains_packed_reference (const_tree exp)
6183 bool packed_p = false;
6187 switch (TREE_CODE (exp))
6191 tree field = TREE_OPERAND (exp, 1);
6192 packed_p = DECL_PACKED (field)
6193 || TYPE_PACKED (TREE_TYPE (field))
6194 || TYPE_PACKED (TREE_TYPE (exp));
6202 case ARRAY_RANGE_REF:
6205 case VIEW_CONVERT_EXPR:
6211 exp = TREE_OPERAND (exp, 0);
6217 /* Return a tree of sizetype representing the size, in bytes, of the element
6218 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6221 array_ref_element_size (tree exp)
6223 tree aligned_size = TREE_OPERAND (exp, 3);
6224 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6225 location_t loc = EXPR_LOCATION (exp);
6227 /* If a size was specified in the ARRAY_REF, it's the size measured
6228 in alignment units of the element type. So multiply by that value. */
6231 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6232 sizetype from another type of the same width and signedness. */
6233 if (TREE_TYPE (aligned_size) != sizetype)
6234 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6235 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6236 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6239 /* Otherwise, take the size from that of the element type. Substitute
6240 any PLACEHOLDER_EXPR that we have. */
6242 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6245 /* Return a tree representing the lower bound of the array mentioned in
6246 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6249 array_ref_low_bound (tree exp)
6251 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6253 /* If a lower bound is specified in EXP, use it. */
6254 if (TREE_OPERAND (exp, 2))
6255 return TREE_OPERAND (exp, 2);
6257 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6258 substituting for a PLACEHOLDER_EXPR as needed. */
6259 if (domain_type && TYPE_MIN_VALUE (domain_type))
6260 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6262 /* Otherwise, return a zero of the appropriate type. */
6263 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6266 /* Return a tree representing the upper bound of the array mentioned in
6267 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6270 array_ref_up_bound (tree exp)
6272 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6274 /* If there is a domain type and it has an upper bound, use it, substituting
6275 for a PLACEHOLDER_EXPR as needed. */
6276 if (domain_type && TYPE_MAX_VALUE (domain_type))
6277 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6279 /* Otherwise fail. */
6283 /* Return a tree representing the offset, in bytes, of the field referenced
6284 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6287 component_ref_field_offset (tree exp)
6289 tree aligned_offset = TREE_OPERAND (exp, 2);
6290 tree field = TREE_OPERAND (exp, 1);
6291 location_t loc = EXPR_LOCATION (exp);
6293 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6294 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6298 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6299 sizetype from another type of the same width and signedness. */
6300 if (TREE_TYPE (aligned_offset) != sizetype)
6301 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6302 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6303 size_int (DECL_OFFSET_ALIGN (field)
6307 /* Otherwise, take the offset from that of the field. Substitute
6308 any PLACEHOLDER_EXPR that we have. */
6310 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6313 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6315 static unsigned HOST_WIDE_INT
6316 target_align (const_tree target)
6318 /* We might have a chain of nested references with intermediate misaligning
6319 bitfields components, so need to recurse to find out. */
6321 unsigned HOST_WIDE_INT this_align, outer_align;
6323 switch (TREE_CODE (target))
6329 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6330 outer_align = target_align (TREE_OPERAND (target, 0));
6331 return MIN (this_align, outer_align);
6334 case ARRAY_RANGE_REF:
6335 this_align = TYPE_ALIGN (TREE_TYPE (target));
6336 outer_align = target_align (TREE_OPERAND (target, 0));
6337 return MIN (this_align, outer_align);
6340 case NON_LVALUE_EXPR:
6341 case VIEW_CONVERT_EXPR:
6342 this_align = TYPE_ALIGN (TREE_TYPE (target));
6343 outer_align = target_align (TREE_OPERAND (target, 0));
6344 return MAX (this_align, outer_align);
6347 return TYPE_ALIGN (TREE_TYPE (target));
6352 /* Given an rtx VALUE that may contain additions and multiplications, return
6353 an equivalent value that just refers to a register, memory, or constant.
6354 This is done by generating instructions to perform the arithmetic and
6355 returning a pseudo-register containing the value.
6357 The returned value may be a REG, SUBREG, MEM or constant. */
6360 force_operand (rtx value, rtx target)
6363 /* Use subtarget as the target for operand 0 of a binary operation. */
6364 rtx subtarget = get_subtarget (target);
6365 enum rtx_code code = GET_CODE (value);
6367 /* Check for subreg applied to an expression produced by loop optimizer. */
6369 && !REG_P (SUBREG_REG (value))
6370 && !MEM_P (SUBREG_REG (value)))
6373 = simplify_gen_subreg (GET_MODE (value),
6374 force_reg (GET_MODE (SUBREG_REG (value)),
6375 force_operand (SUBREG_REG (value),
6377 GET_MODE (SUBREG_REG (value)),
6378 SUBREG_BYTE (value));
6379 code = GET_CODE (value);
6382 /* Check for a PIC address load. */
6383 if ((code == PLUS || code == MINUS)
6384 && XEXP (value, 0) == pic_offset_table_rtx
6385 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6386 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6387 || GET_CODE (XEXP (value, 1)) == CONST))
6390 subtarget = gen_reg_rtx (GET_MODE (value));
6391 emit_move_insn (subtarget, value);
6395 if (ARITHMETIC_P (value))
6397 op2 = XEXP (value, 1);
6398 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6400 if (code == MINUS && CONST_INT_P (op2))
6403 op2 = negate_rtx (GET_MODE (value), op2);
6406 /* Check for an addition with OP2 a constant integer and our first
6407 operand a PLUS of a virtual register and something else. In that
6408 case, we want to emit the sum of the virtual register and the
6409 constant first and then add the other value. This allows virtual
6410 register instantiation to simply modify the constant rather than
6411 creating another one around this addition. */
6412 if (code == PLUS && CONST_INT_P (op2)
6413 && GET_CODE (XEXP (value, 0)) == PLUS
6414 && REG_P (XEXP (XEXP (value, 0), 0))
6415 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6416 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6418 rtx temp = expand_simple_binop (GET_MODE (value), code,
6419 XEXP (XEXP (value, 0), 0), op2,
6420 subtarget, 0, OPTAB_LIB_WIDEN);
6421 return expand_simple_binop (GET_MODE (value), code, temp,
6422 force_operand (XEXP (XEXP (value,
6424 target, 0, OPTAB_LIB_WIDEN);
6427 op1 = force_operand (XEXP (value, 0), subtarget);
6428 op2 = force_operand (op2, NULL_RTX);
6432 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6434 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6435 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6436 target, 1, OPTAB_LIB_WIDEN);
6438 return expand_divmod (0,
6439 FLOAT_MODE_P (GET_MODE (value))
6440 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6441 GET_MODE (value), op1, op2, target, 0);
6443 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6446 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6449 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6452 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6453 target, 0, OPTAB_LIB_WIDEN);
6455 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6456 target, 1, OPTAB_LIB_WIDEN);
6459 if (UNARY_P (value))
6462 target = gen_reg_rtx (GET_MODE (value));
6463 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6470 case FLOAT_TRUNCATE:
6471 convert_move (target, op1, code == ZERO_EXTEND);
6476 expand_fix (target, op1, code == UNSIGNED_FIX);
6480 case UNSIGNED_FLOAT:
6481 expand_float (target, op1, code == UNSIGNED_FLOAT);
6485 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6489 #ifdef INSN_SCHEDULING
6490 /* On machines that have insn scheduling, we want all memory reference to be
6491 explicit, so we need to deal with such paradoxical SUBREGs. */
6492 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6493 && (GET_MODE_SIZE (GET_MODE (value))
6494 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6496 = simplify_gen_subreg (GET_MODE (value),
6497 force_reg (GET_MODE (SUBREG_REG (value)),
6498 force_operand (SUBREG_REG (value),
6500 GET_MODE (SUBREG_REG (value)),
6501 SUBREG_BYTE (value));
6507 /* Subroutine of expand_expr: return nonzero iff there is no way that
6508 EXP can reference X, which is being modified. TOP_P is nonzero if this
6509 call is going to be used to determine whether we need a temporary
6510 for EXP, as opposed to a recursive call to this function.
6512 It is always safe for this routine to return zero since it merely
6513 searches for optimization opportunities. */
6516 safe_from_p (const_rtx x, tree exp, int top_p)
6522 /* If EXP has varying size, we MUST use a target since we currently
6523 have no way of allocating temporaries of variable size
6524 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6525 So we assume here that something at a higher level has prevented a
6526 clash. This is somewhat bogus, but the best we can do. Only
6527 do this when X is BLKmode and when we are at the top level. */
6528 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6529 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6530 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6531 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6532 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6534 && GET_MODE (x) == BLKmode)
6535 /* If X is in the outgoing argument area, it is always safe. */
6537 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6538 || (GET_CODE (XEXP (x, 0)) == PLUS
6539 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6542 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6543 find the underlying pseudo. */
6544 if (GET_CODE (x) == SUBREG)
6547 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6551 /* Now look at our tree code and possibly recurse. */
6552 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6554 case tcc_declaration:
6555 exp_rtl = DECL_RTL_IF_SET (exp);
6561 case tcc_exceptional:
6562 if (TREE_CODE (exp) == TREE_LIST)
6566 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6568 exp = TREE_CHAIN (exp);
6571 if (TREE_CODE (exp) != TREE_LIST)
6572 return safe_from_p (x, exp, 0);
6575 else if (TREE_CODE (exp) == CONSTRUCTOR)
6577 constructor_elt *ce;
6578 unsigned HOST_WIDE_INT idx;
6580 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6581 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6582 || !safe_from_p (x, ce->value, 0))
6586 else if (TREE_CODE (exp) == ERROR_MARK)
6587 return 1; /* An already-visited SAVE_EXPR? */
6592 /* The only case we look at here is the DECL_INITIAL inside a
6594 return (TREE_CODE (exp) != DECL_EXPR
6595 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6596 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6597 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6600 case tcc_comparison:
6601 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6606 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6608 case tcc_expression:
6611 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6612 the expression. If it is set, we conflict iff we are that rtx or
6613 both are in memory. Otherwise, we check all operands of the
6614 expression recursively. */
6616 switch (TREE_CODE (exp))
6619 /* If the operand is static or we are static, we can't conflict.
6620 Likewise if we don't conflict with the operand at all. */
6621 if (staticp (TREE_OPERAND (exp, 0))
6622 || TREE_STATIC (exp)
6623 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6626 /* Otherwise, the only way this can conflict is if we are taking
6627 the address of a DECL a that address if part of X, which is
6629 exp = TREE_OPERAND (exp, 0);
6632 if (!DECL_RTL_SET_P (exp)
6633 || !MEM_P (DECL_RTL (exp)))
6636 exp_rtl = XEXP (DECL_RTL (exp), 0);
6642 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6643 get_alias_set (exp)))
6648 /* Assume that the call will clobber all hard registers and
6650 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6655 case WITH_CLEANUP_EXPR:
6656 case CLEANUP_POINT_EXPR:
6657 /* Lowered by gimplify.c. */
6661 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6667 /* If we have an rtx, we do not need to scan our operands. */
6671 nops = TREE_OPERAND_LENGTH (exp);
6672 for (i = 0; i < nops; i++)
6673 if (TREE_OPERAND (exp, i) != 0
6674 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6680 /* Should never get a type here. */
6684 /* If we have an rtl, find any enclosed object. Then see if we conflict
6688 if (GET_CODE (exp_rtl) == SUBREG)
6690 exp_rtl = SUBREG_REG (exp_rtl);
6692 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6696 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6697 are memory and they conflict. */
6698 return ! (rtx_equal_p (x, exp_rtl)
6699 || (MEM_P (x) && MEM_P (exp_rtl)
6700 && true_dependence (exp_rtl, VOIDmode, x,
6701 rtx_addr_varies_p)));
6704 /* If we reach here, it is safe. */
6709 /* Return the highest power of two that EXP is known to be a multiple of.
6710 This is used in updating alignment of MEMs in array references. */
6712 unsigned HOST_WIDE_INT
6713 highest_pow2_factor (const_tree exp)
6715 unsigned HOST_WIDE_INT c0, c1;
6717 switch (TREE_CODE (exp))
6720 /* We can find the lowest bit that's a one. If the low
6721 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6722 We need to handle this case since we can find it in a COND_EXPR,
6723 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6724 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6726 if (TREE_OVERFLOW (exp))
6727 return BIGGEST_ALIGNMENT;
6730 /* Note: tree_low_cst is intentionally not used here,
6731 we don't care about the upper bits. */
6732 c0 = TREE_INT_CST_LOW (exp);
6734 return c0 ? c0 : BIGGEST_ALIGNMENT;
6738 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6739 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6740 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6741 return MIN (c0, c1);
6744 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6745 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6748 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6750 if (integer_pow2p (TREE_OPERAND (exp, 1))
6751 && host_integerp (TREE_OPERAND (exp, 1), 1))
6753 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6754 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6755 return MAX (1, c0 / c1);
6760 /* The highest power of two of a bit-and expression is the maximum of
6761 that of its operands. We typically get here for a complex LHS and
6762 a constant negative power of two on the RHS to force an explicit
6763 alignment, so don't bother looking at the LHS. */
6764 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6768 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6771 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6774 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6775 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6776 return MIN (c0, c1);
6785 /* Similar, except that the alignment requirements of TARGET are
6786 taken into account. Assume it is at least as aligned as its
6787 type, unless it is a COMPONENT_REF in which case the layout of
6788 the structure gives the alignment. */
6790 static unsigned HOST_WIDE_INT
6791 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6793 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6794 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6796 return MAX (factor, talign);
6799 /* Subroutine of expand_expr. Expand the two operands of a binary
6800 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6801 The value may be stored in TARGET if TARGET is nonzero. The
6802 MODIFIER argument is as documented by expand_expr. */
6805 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6806 enum expand_modifier modifier)
6808 if (! safe_from_p (target, exp1, 1))
6810 if (operand_equal_p (exp0, exp1, 0))
6812 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6813 *op1 = copy_rtx (*op0);
6817 /* If we need to preserve evaluation order, copy exp0 into its own
6818 temporary variable so that it can't be clobbered by exp1. */
6819 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6820 exp0 = save_expr (exp0);
6821 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6822 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6827 /* Return a MEM that contains constant EXP. DEFER is as for
6828 output_constant_def and MODIFIER is as for expand_expr. */
6831 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6835 mem = output_constant_def (exp, defer);
6836 if (modifier != EXPAND_INITIALIZER)
6837 mem = use_anchored_address (mem);
6841 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6842 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6845 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6846 enum expand_modifier modifier, addr_space_t as)
6848 rtx result, subtarget;
6850 HOST_WIDE_INT bitsize, bitpos;
6851 int volatilep, unsignedp;
6852 enum machine_mode mode1;
6854 /* If we are taking the address of a constant and are at the top level,
6855 we have to use output_constant_def since we can't call force_const_mem
6857 /* ??? This should be considered a front-end bug. We should not be
6858 generating ADDR_EXPR of something that isn't an LVALUE. The only
6859 exception here is STRING_CST. */
6860 if (CONSTANT_CLASS_P (exp))
6861 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6863 /* Everything must be something allowed by is_gimple_addressable. */
6864 switch (TREE_CODE (exp))
6867 /* This case will happen via recursion for &a->b. */
6868 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6872 tree tem = TREE_OPERAND (exp, 0);
6873 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6874 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6876 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6877 return expand_expr (tem, target, tmode, modifier);
6881 /* Expand the initializer like constants above. */
6882 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6885 /* The real part of the complex number is always first, therefore
6886 the address is the same as the address of the parent object. */
6889 inner = TREE_OPERAND (exp, 0);
6893 /* The imaginary part of the complex number is always second.
6894 The expression is therefore always offset by the size of the
6897 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6898 inner = TREE_OPERAND (exp, 0);
6902 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6903 expand_expr, as that can have various side effects; LABEL_DECLs for
6904 example, may not have their DECL_RTL set yet. Expand the rtl of
6905 CONSTRUCTORs too, which should yield a memory reference for the
6906 constructor's contents. Assume language specific tree nodes can
6907 be expanded in some interesting way. */
6908 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6910 || TREE_CODE (exp) == CONSTRUCTOR
6911 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6913 result = expand_expr (exp, target, tmode,
6914 modifier == EXPAND_INITIALIZER
6915 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6917 /* If the DECL isn't in memory, then the DECL wasn't properly
6918 marked TREE_ADDRESSABLE, which will be either a front-end
6919 or a tree optimizer bug. */
6920 gcc_assert (MEM_P (result));
6921 result = XEXP (result, 0);
6923 /* ??? Is this needed anymore? */
6924 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6926 assemble_external (exp);
6927 TREE_USED (exp) = 1;
6930 if (modifier != EXPAND_INITIALIZER
6931 && modifier != EXPAND_CONST_ADDRESS)
6932 result = force_operand (result, target);
6936 /* Pass FALSE as the last argument to get_inner_reference although
6937 we are expanding to RTL. The rationale is that we know how to
6938 handle "aligning nodes" here: we can just bypass them because
6939 they won't change the final object whose address will be returned
6940 (they actually exist only for that purpose). */
6941 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6942 &mode1, &unsignedp, &volatilep, false);
6946 /* We must have made progress. */
6947 gcc_assert (inner != exp);
6949 subtarget = offset || bitpos ? NULL_RTX : target;
6950 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6951 inner alignment, force the inner to be sufficiently aligned. */
6952 if (CONSTANT_CLASS_P (inner)
6953 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6955 inner = copy_node (inner);
6956 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6957 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6958 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6960 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6966 if (modifier != EXPAND_NORMAL)
6967 result = force_operand (result, NULL);
6968 tmp = expand_expr (offset, NULL_RTX, tmode,
6969 modifier == EXPAND_INITIALIZER
6970 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6972 result = convert_memory_address_addr_space (tmode, result, as);
6973 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6975 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6976 result = simplify_gen_binary (PLUS, tmode, result, tmp);
6979 subtarget = bitpos ? NULL_RTX : target;
6980 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6981 1, OPTAB_LIB_WIDEN);
6987 /* Someone beforehand should have rejected taking the address
6988 of such an object. */
6989 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6991 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6992 if (modifier < EXPAND_SUM)
6993 result = force_operand (result, target);
6999 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7000 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7003 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7004 enum expand_modifier modifier)
7006 addr_space_t as = ADDR_SPACE_GENERIC;
7007 enum machine_mode address_mode = Pmode;
7008 enum machine_mode pointer_mode = ptr_mode;
7009 enum machine_mode rmode;
7012 /* Target mode of VOIDmode says "whatever's natural". */
7013 if (tmode == VOIDmode)
7014 tmode = TYPE_MODE (TREE_TYPE (exp));
7016 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7018 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7019 address_mode = targetm.addr_space.address_mode (as);
7020 pointer_mode = targetm.addr_space.pointer_mode (as);
7023 /* We can get called with some Weird Things if the user does silliness
7024 like "(short) &a". In that case, convert_memory_address won't do
7025 the right thing, so ignore the given target mode. */
7026 if (tmode != address_mode && tmode != pointer_mode)
7027 tmode = address_mode;
7029 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7030 tmode, modifier, as);
7032 /* Despite expand_expr claims concerning ignoring TMODE when not
7033 strictly convenient, stuff breaks if we don't honor it. Note
7034 that combined with the above, we only do this for pointer modes. */
7035 rmode = GET_MODE (result);
7036 if (rmode == VOIDmode)
7039 result = convert_memory_address_addr_space (tmode, result, as);
7044 /* Generate code for computing CONSTRUCTOR EXP.
7045 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7046 is TRUE, instead of creating a temporary variable in memory
7047 NULL is returned and the caller needs to handle it differently. */
7050 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7051 bool avoid_temp_mem)
7053 tree type = TREE_TYPE (exp);
7054 enum machine_mode mode = TYPE_MODE (type);
7056 /* Try to avoid creating a temporary at all. This is possible
7057 if all of the initializer is zero.
7058 FIXME: try to handle all [0..255] initializers we can handle
7060 if (TREE_STATIC (exp)
7061 && !TREE_ADDRESSABLE (exp)
7062 && target != 0 && mode == BLKmode
7063 && all_zeros_p (exp))
7065 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7069 /* All elts simple constants => refer to a constant in memory. But
7070 if this is a non-BLKmode mode, let it store a field at a time
7071 since that should make a CONST_INT or CONST_DOUBLE when we
7072 fold. Likewise, if we have a target we can use, it is best to
7073 store directly into the target unless the type is large enough
7074 that memcpy will be used. If we are making an initializer and
7075 all operands are constant, put it in memory as well.
7077 FIXME: Avoid trying to fill vector constructors piece-meal.
7078 Output them with output_constant_def below unless we're sure
7079 they're zeros. This should go away when vector initializers
7080 are treated like VECTOR_CST instead of arrays. */
7081 if ((TREE_STATIC (exp)
7082 && ((mode == BLKmode
7083 && ! (target != 0 && safe_from_p (target, exp, 1)))
7084 || TREE_ADDRESSABLE (exp)
7085 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7086 && (! MOVE_BY_PIECES_P
7087 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7089 && ! mostly_zeros_p (exp))))
7090 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7091 && TREE_CONSTANT (exp)))
7098 constructor = expand_expr_constant (exp, 1, modifier);
7100 if (modifier != EXPAND_CONST_ADDRESS
7101 && modifier != EXPAND_INITIALIZER
7102 && modifier != EXPAND_SUM)
7103 constructor = validize_mem (constructor);
7108 /* Handle calls that pass values in multiple non-contiguous
7109 locations. The Irix 6 ABI has examples of this. */
7110 if (target == 0 || ! safe_from_p (target, exp, 1)
7111 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7117 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7118 | (TREE_READONLY (exp)
7119 * TYPE_QUAL_CONST))),
7120 0, TREE_ADDRESSABLE (exp), 1);
7123 store_constructor (exp, target, 0, int_expr_size (exp));
7128 /* expand_expr: generate code for computing expression EXP.
7129 An rtx for the computed value is returned. The value is never null.
7130 In the case of a void EXP, const0_rtx is returned.
7132 The value may be stored in TARGET if TARGET is nonzero.
7133 TARGET is just a suggestion; callers must assume that
7134 the rtx returned may not be the same as TARGET.
7136 If TARGET is CONST0_RTX, it means that the value will be ignored.
7138 If TMODE is not VOIDmode, it suggests generating the
7139 result in mode TMODE. But this is done only when convenient.
7140 Otherwise, TMODE is ignored and the value generated in its natural mode.
7141 TMODE is just a suggestion; callers must assume that
7142 the rtx returned may not have mode TMODE.
7144 Note that TARGET may have neither TMODE nor MODE. In that case, it
7145 probably will not be used.
7147 If MODIFIER is EXPAND_SUM then when EXP is an addition
7148 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7149 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7150 products as above, or REG or MEM, or constant.
7151 Ordinarily in such cases we would output mul or add instructions
7152 and then return a pseudo reg containing the sum.
7154 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7155 it also marks a label as absolutely required (it can't be dead).
7156 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7157 This is used for outputting expressions used in initializers.
7159 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7160 with a constant address even if that address is not normally legitimate.
7161 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7163 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7164 a call parameter. Such targets require special care as we haven't yet
7165 marked TARGET so that it's safe from being trashed by libcalls. We
7166 don't want to use TARGET for anything but the final result;
7167 Intermediate values must go elsewhere. Additionally, calls to
7168 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7170 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7171 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7172 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7173 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7177 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7178 enum expand_modifier modifier, rtx *alt_rtl)
7182 /* Handle ERROR_MARK before anybody tries to access its type. */
7183 if (TREE_CODE (exp) == ERROR_MARK
7184 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7186 ret = CONST0_RTX (tmode);
7187 return ret ? ret : const0_rtx;
7190 /* If this is an expression of some kind and it has an associated line
7191 number, then emit the line number before expanding the expression.
7193 We need to save and restore the file and line information so that
7194 errors discovered during expansion are emitted with the right
7195 information. It would be better of the diagnostic routines
7196 used the file/line information embedded in the tree nodes rather
7198 if (cfun && EXPR_HAS_LOCATION (exp))
7200 location_t saved_location = input_location;
7201 location_t saved_curr_loc = get_curr_insn_source_location ();
7202 tree saved_block = get_curr_insn_block ();
7203 input_location = EXPR_LOCATION (exp);
7204 set_curr_insn_source_location (input_location);
7206 /* Record where the insns produced belong. */
7207 set_curr_insn_block (TREE_BLOCK (exp));
7209 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7211 input_location = saved_location;
7212 set_curr_insn_block (saved_block);
7213 set_curr_insn_source_location (saved_curr_loc);
7217 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7224 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7225 enum expand_modifier modifier)
7227 rtx op0, op1, op2, temp;
7230 enum machine_mode mode;
7231 enum tree_code code = ops->code;
7233 rtx subtarget, original_target;
7235 bool reduce_bit_field;
7236 location_t loc = ops->location;
7237 tree treeop0, treeop1, treeop2;
7238 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7239 ? reduce_to_bit_field_precision ((expr), \
7245 mode = TYPE_MODE (type);
7246 unsignedp = TYPE_UNSIGNED (type);
7252 /* We should be called only on simple (binary or unary) expressions,
7253 exactly those that are valid in gimple expressions that aren't
7254 GIMPLE_SINGLE_RHS (or invalid). */
7255 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7256 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7257 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7259 ignore = (target == const0_rtx
7260 || ((CONVERT_EXPR_CODE_P (code)
7261 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7262 && TREE_CODE (type) == VOID_TYPE));
7264 /* We should be called only if we need the result. */
7265 gcc_assert (!ignore);
7267 /* An operation in what may be a bit-field type needs the
7268 result to be reduced to the precision of the bit-field type,
7269 which is narrower than that of the type's mode. */
7270 reduce_bit_field = (INTEGRAL_TYPE_P (type)
7271 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7273 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7276 /* Use subtarget as the target for operand 0 of a binary operation. */
7277 subtarget = get_subtarget (target);
7278 original_target = target;
7282 case NON_LVALUE_EXPR:
7285 if (treeop0 == error_mark_node)
7288 if (TREE_CODE (type) == UNION_TYPE)
7290 tree valtype = TREE_TYPE (treeop0);
7292 /* If both input and output are BLKmode, this conversion isn't doing
7293 anything except possibly changing memory attribute. */
7294 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7296 rtx result = expand_expr (treeop0, target, tmode,
7299 result = copy_rtx (result);
7300 set_mem_attributes (result, type, 0);
7306 if (TYPE_MODE (type) != BLKmode)
7307 target = gen_reg_rtx (TYPE_MODE (type));
7309 target = assign_temp (type, 0, 1, 1);
7313 /* Store data into beginning of memory target. */
7314 store_expr (treeop0,
7315 adjust_address (target, TYPE_MODE (valtype), 0),
7316 modifier == EXPAND_STACK_PARM,
7321 gcc_assert (REG_P (target));
7323 /* Store this field into a union of the proper type. */
7324 store_field (target,
7325 MIN ((int_size_in_bytes (TREE_TYPE
7328 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7329 0, TYPE_MODE (valtype), treeop0,
7333 /* Return the entire union. */
7337 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7339 op0 = expand_expr (treeop0, target, VOIDmode,
7342 /* If the signedness of the conversion differs and OP0 is
7343 a promoted SUBREG, clear that indication since we now
7344 have to do the proper extension. */
7345 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7346 && GET_CODE (op0) == SUBREG)
7347 SUBREG_PROMOTED_VAR_P (op0) = 0;
7349 return REDUCE_BIT_FIELD (op0);
7352 op0 = expand_expr (treeop0, NULL_RTX, mode,
7353 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7354 if (GET_MODE (op0) == mode)
7357 /* If OP0 is a constant, just convert it into the proper mode. */
7358 else if (CONSTANT_P (op0))
7360 tree inner_type = TREE_TYPE (treeop0);
7361 enum machine_mode inner_mode = GET_MODE (op0);
7363 if (inner_mode == VOIDmode)
7364 inner_mode = TYPE_MODE (inner_type);
7366 if (modifier == EXPAND_INITIALIZER)
7367 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7368 subreg_lowpart_offset (mode,
7371 op0= convert_modes (mode, inner_mode, op0,
7372 TYPE_UNSIGNED (inner_type));
7375 else if (modifier == EXPAND_INITIALIZER)
7376 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7378 else if (target == 0)
7379 op0 = convert_to_mode (mode, op0,
7380 TYPE_UNSIGNED (TREE_TYPE
7384 convert_move (target, op0,
7385 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7389 return REDUCE_BIT_FIELD (op0);
7391 case ADDR_SPACE_CONVERT_EXPR:
7393 tree treeop0_type = TREE_TYPE (treeop0);
7395 addr_space_t as_from;
7397 gcc_assert (POINTER_TYPE_P (type));
7398 gcc_assert (POINTER_TYPE_P (treeop0_type));
7400 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7401 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7403 /* Conversions between pointers to the same address space should
7404 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7405 gcc_assert (as_to != as_from);
7407 /* Ask target code to handle conversion between pointers
7408 to overlapping address spaces. */
7409 if (targetm.addr_space.subset_p (as_to, as_from)
7410 || targetm.addr_space.subset_p (as_from, as_to))
7412 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7413 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7418 /* For disjoint address spaces, converting anything but
7419 a null pointer invokes undefined behaviour. We simply
7420 always return a null pointer here. */
7421 return CONST0_RTX (mode);
7424 case POINTER_PLUS_EXPR:
7425 /* Even though the sizetype mode and the pointer's mode can be different
7426 expand is able to handle this correctly and get the correct result out
7427 of the PLUS_EXPR code. */
7428 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7429 if sizetype precision is smaller than pointer precision. */
7430 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7431 treeop1 = fold_convert_loc (loc, type,
7432 fold_convert_loc (loc, ssizetype,
7435 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7436 something else, make sure we add the register to the constant and
7437 then to the other thing. This case can occur during strength
7438 reduction and doing it this way will produce better code if the
7439 frame pointer or argument pointer is eliminated.
7441 fold-const.c will ensure that the constant is always in the inner
7442 PLUS_EXPR, so the only case we need to do anything about is if
7443 sp, ap, or fp is our second argument, in which case we must swap
7444 the innermost first argument and our second argument. */
7446 if (TREE_CODE (treeop0) == PLUS_EXPR
7447 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7448 && TREE_CODE (treeop1) == VAR_DECL
7449 && (DECL_RTL (treeop1) == frame_pointer_rtx
7450 || DECL_RTL (treeop1) == stack_pointer_rtx
7451 || DECL_RTL (treeop1) == arg_pointer_rtx))
7455 treeop1 = TREE_OPERAND (treeop0, 0);
7456 TREE_OPERAND (treeop0, 0) = t;
7459 /* If the result is to be ptr_mode and we are adding an integer to
7460 something, we might be forming a constant. So try to use
7461 plus_constant. If it produces a sum and we can't accept it,
7462 use force_operand. This allows P = &ARR[const] to generate
7463 efficient code on machines where a SYMBOL_REF is not a valid
7466 If this is an EXPAND_SUM call, always return the sum. */
7467 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7468 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7470 if (modifier == EXPAND_STACK_PARM)
7472 if (TREE_CODE (treeop0) == INTEGER_CST
7473 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7474 && TREE_CONSTANT (treeop1))
7478 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7480 /* Use immed_double_const to ensure that the constant is
7481 truncated according to the mode of OP1, then sign extended
7482 to a HOST_WIDE_INT. Using the constant directly can result
7483 in non-canonical RTL in a 64x32 cross compile. */
7485 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7487 TYPE_MODE (TREE_TYPE (treeop1)));
7488 op1 = plus_constant (op1, INTVAL (constant_part));
7489 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7490 op1 = force_operand (op1, target);
7491 return REDUCE_BIT_FIELD (op1);
7494 else if (TREE_CODE (treeop1) == INTEGER_CST
7495 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7496 && TREE_CONSTANT (treeop0))
7500 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7501 (modifier == EXPAND_INITIALIZER
7502 ? EXPAND_INITIALIZER : EXPAND_SUM));
7503 if (! CONSTANT_P (op0))
7505 op1 = expand_expr (treeop1, NULL_RTX,
7506 VOIDmode, modifier);
7507 /* Return a PLUS if modifier says it's OK. */
7508 if (modifier == EXPAND_SUM
7509 || modifier == EXPAND_INITIALIZER)
7510 return simplify_gen_binary (PLUS, mode, op0, op1);
7513 /* Use immed_double_const to ensure that the constant is
7514 truncated according to the mode of OP1, then sign extended
7515 to a HOST_WIDE_INT. Using the constant directly can result
7516 in non-canonical RTL in a 64x32 cross compile. */
7518 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7520 TYPE_MODE (TREE_TYPE (treeop0)));
7521 op0 = plus_constant (op0, INTVAL (constant_part));
7522 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7523 op0 = force_operand (op0, target);
7524 return REDUCE_BIT_FIELD (op0);
7528 /* Use TER to expand pointer addition of a negated value
7529 as pointer subtraction. */
7530 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7531 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7532 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7533 && TREE_CODE (treeop1) == SSA_NAME
7534 && TYPE_MODE (TREE_TYPE (treeop0))
7535 == TYPE_MODE (TREE_TYPE (treeop1)))
7537 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7540 treeop1 = gimple_assign_rhs1 (def);
7546 /* No sense saving up arithmetic to be done
7547 if it's all in the wrong mode to form part of an address.
7548 And force_operand won't know whether to sign-extend or
7550 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7551 || mode != ptr_mode)
7553 expand_operands (treeop0, treeop1,
7554 subtarget, &op0, &op1, EXPAND_NORMAL);
7555 if (op0 == const0_rtx)
7557 if (op1 == const0_rtx)
7562 expand_operands (treeop0, treeop1,
7563 subtarget, &op0, &op1, modifier);
7564 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7568 /* For initializers, we are allowed to return a MINUS of two
7569 symbolic constants. Here we handle all cases when both operands
7571 /* Handle difference of two symbolic constants,
7572 for the sake of an initializer. */
7573 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7574 && really_constant_p (treeop0)
7575 && really_constant_p (treeop1))
7577 expand_operands (treeop0, treeop1,
7578 NULL_RTX, &op0, &op1, modifier);
7580 /* If the last operand is a CONST_INT, use plus_constant of
7581 the negated constant. Else make the MINUS. */
7582 if (CONST_INT_P (op1))
7583 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7585 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7588 /* No sense saving up arithmetic to be done
7589 if it's all in the wrong mode to form part of an address.
7590 And force_operand won't know whether to sign-extend or
7592 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7593 || mode != ptr_mode)
7596 expand_operands (treeop0, treeop1,
7597 subtarget, &op0, &op1, modifier);
7599 /* Convert A - const to A + (-const). */
7600 if (CONST_INT_P (op1))
7602 op1 = negate_rtx (mode, op1);
7603 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7608 case WIDEN_MULT_PLUS_EXPR:
7609 case WIDEN_MULT_MINUS_EXPR:
7610 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7611 op2 = expand_normal (treeop2);
7612 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7616 case WIDEN_MULT_EXPR:
7617 /* If first operand is constant, swap them.
7618 Thus the following special case checks need only
7619 check the second operand. */
7620 if (TREE_CODE (treeop0) == INTEGER_CST)
7627 /* First, check if we have a multiplication of one signed and one
7628 unsigned operand. */
7629 if (TREE_CODE (treeop1) != INTEGER_CST
7630 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7631 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7633 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7634 this_optab = usmul_widen_optab;
7635 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7637 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7639 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7640 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7643 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7649 /* Check for a multiplication with matching signedness. */
7650 else if ((TREE_CODE (treeop1) == INTEGER_CST
7651 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7652 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7653 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7655 tree op0type = TREE_TYPE (treeop0);
7656 enum machine_mode innermode = TYPE_MODE (op0type);
7657 bool zextend_p = TYPE_UNSIGNED (op0type);
7658 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7659 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7661 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7662 && TREE_CODE (treeop0) != INTEGER_CST)
7664 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7666 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7668 temp = expand_widening_mult (mode, op0, op1, target,
7669 unsignedp, this_optab);
7670 return REDUCE_BIT_FIELD (temp);
7672 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7673 && innermode == word_mode)
7676 op0 = expand_normal (treeop0);
7677 if (TREE_CODE (treeop1) == INTEGER_CST)
7678 op1 = convert_modes (innermode, mode,
7679 expand_normal (treeop1), unsignedp);
7681 op1 = expand_normal (treeop1);
7682 temp = expand_binop (mode, other_optab, op0, op1, target,
7683 unsignedp, OPTAB_LIB_WIDEN);
7684 hipart = gen_highpart (innermode, temp);
7685 htem = expand_mult_highpart_adjust (innermode, hipart,
7689 emit_move_insn (hipart, htem);
7690 return REDUCE_BIT_FIELD (temp);
7694 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7695 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7696 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7697 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7701 optab opt = fma_optab;
7704 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7706 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7708 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7711 gcc_assert (fn != NULL_TREE);
7712 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7713 return expand_builtin (call_expr, target, subtarget, mode, false);
7716 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7717 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7722 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7725 op0 = expand_normal (gimple_assign_rhs1 (def0));
7726 op2 = expand_normal (gimple_assign_rhs1 (def2));
7729 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7732 op0 = expand_normal (gimple_assign_rhs1 (def0));
7735 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7738 op2 = expand_normal (gimple_assign_rhs1 (def2));
7742 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7744 op2 = expand_normal (treeop2);
7745 op1 = expand_normal (treeop1);
7747 return expand_ternary_op (TYPE_MODE (type), opt,
7748 op0, op1, op2, target, 0);
7752 /* If this is a fixed-point operation, then we cannot use the code
7753 below because "expand_mult" doesn't support sat/no-sat fixed-point
7755 if (ALL_FIXED_POINT_MODE_P (mode))
7758 /* If first operand is constant, swap them.
7759 Thus the following special case checks need only
7760 check the second operand. */
7761 if (TREE_CODE (treeop0) == INTEGER_CST)
7768 /* Attempt to return something suitable for generating an
7769 indexed address, for machines that support that. */
7771 if (modifier == EXPAND_SUM && mode == ptr_mode
7772 && host_integerp (treeop1, 0))
7774 tree exp1 = treeop1;
7776 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7780 op0 = force_operand (op0, NULL_RTX);
7782 op0 = copy_to_mode_reg (mode, op0);
7784 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7785 gen_int_mode (tree_low_cst (exp1, 0),
7786 TYPE_MODE (TREE_TYPE (exp1)))));
7789 if (modifier == EXPAND_STACK_PARM)
7792 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7793 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7795 case TRUNC_DIV_EXPR:
7796 case FLOOR_DIV_EXPR:
7798 case ROUND_DIV_EXPR:
7799 case EXACT_DIV_EXPR:
7800 /* If this is a fixed-point operation, then we cannot use the code
7801 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7803 if (ALL_FIXED_POINT_MODE_P (mode))
7806 if (modifier == EXPAND_STACK_PARM)
7808 /* Possible optimization: compute the dividend with EXPAND_SUM
7809 then if the divisor is constant can optimize the case
7810 where some terms of the dividend have coeffs divisible by it. */
7811 expand_operands (treeop0, treeop1,
7812 subtarget, &op0, &op1, EXPAND_NORMAL);
7813 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7818 case TRUNC_MOD_EXPR:
7819 case FLOOR_MOD_EXPR:
7821 case ROUND_MOD_EXPR:
7822 if (modifier == EXPAND_STACK_PARM)
7824 expand_operands (treeop0, treeop1,
7825 subtarget, &op0, &op1, EXPAND_NORMAL);
7826 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7828 case FIXED_CONVERT_EXPR:
7829 op0 = expand_normal (treeop0);
7830 if (target == 0 || modifier == EXPAND_STACK_PARM)
7831 target = gen_reg_rtx (mode);
7833 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7834 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7835 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7836 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7838 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7841 case FIX_TRUNC_EXPR:
7842 op0 = expand_normal (treeop0);
7843 if (target == 0 || modifier == EXPAND_STACK_PARM)
7844 target = gen_reg_rtx (mode);
7845 expand_fix (target, op0, unsignedp);
7849 op0 = expand_normal (treeop0);
7850 if (target == 0 || modifier == EXPAND_STACK_PARM)
7851 target = gen_reg_rtx (mode);
7852 /* expand_float can't figure out what to do if FROM has VOIDmode.
7853 So give it the correct mode. With -O, cse will optimize this. */
7854 if (GET_MODE (op0) == VOIDmode)
7855 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7857 expand_float (target, op0,
7858 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7862 op0 = expand_expr (treeop0, subtarget,
7863 VOIDmode, EXPAND_NORMAL);
7864 if (modifier == EXPAND_STACK_PARM)
7866 temp = expand_unop (mode,
7867 optab_for_tree_code (NEGATE_EXPR, type,
7871 return REDUCE_BIT_FIELD (temp);
7874 op0 = expand_expr (treeop0, subtarget,
7875 VOIDmode, EXPAND_NORMAL);
7876 if (modifier == EXPAND_STACK_PARM)
7879 /* ABS_EXPR is not valid for complex arguments. */
7880 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7881 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7883 /* Unsigned abs is simply the operand. Testing here means we don't
7884 risk generating incorrect code below. */
7885 if (TYPE_UNSIGNED (type))
7888 return expand_abs (mode, op0, target, unsignedp,
7889 safe_from_p (target, treeop0, 1));
7893 target = original_target;
7895 || modifier == EXPAND_STACK_PARM
7896 || (MEM_P (target) && MEM_VOLATILE_P (target))
7897 || GET_MODE (target) != mode
7899 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7900 target = gen_reg_rtx (mode);
7901 expand_operands (treeop0, treeop1,
7902 target, &op0, &op1, EXPAND_NORMAL);
7904 /* First try to do it with a special MIN or MAX instruction.
7905 If that does not win, use a conditional jump to select the proper
7907 this_optab = optab_for_tree_code (code, type, optab_default);
7908 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7913 /* At this point, a MEM target is no longer useful; we will get better
7916 if (! REG_P (target))
7917 target = gen_reg_rtx (mode);
7919 /* If op1 was placed in target, swap op0 and op1. */
7920 if (target != op0 && target == op1)
7927 /* We generate better code and avoid problems with op1 mentioning
7928 target by forcing op1 into a pseudo if it isn't a constant. */
7929 if (! CONSTANT_P (op1))
7930 op1 = force_reg (mode, op1);
7933 enum rtx_code comparison_code;
7936 if (code == MAX_EXPR)
7937 comparison_code = unsignedp ? GEU : GE;
7939 comparison_code = unsignedp ? LEU : LE;
7941 /* Canonicalize to comparisons against 0. */
7942 if (op1 == const1_rtx)
7944 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7945 or (a != 0 ? a : 1) for unsigned.
7946 For MIN we are safe converting (a <= 1 ? a : 1)
7947 into (a <= 0 ? a : 1) */
7948 cmpop1 = const0_rtx;
7949 if (code == MAX_EXPR)
7950 comparison_code = unsignedp ? NE : GT;
7952 if (op1 == constm1_rtx && !unsignedp)
7954 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7955 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7956 cmpop1 = const0_rtx;
7957 if (code == MIN_EXPR)
7958 comparison_code = LT;
7960 #ifdef HAVE_conditional_move
7961 /* Use a conditional move if possible. */
7962 if (can_conditionally_move_p (mode))
7966 /* ??? Same problem as in expmed.c: emit_conditional_move
7967 forces a stack adjustment via compare_from_rtx, and we
7968 lose the stack adjustment if the sequence we are about
7969 to create is discarded. */
7970 do_pending_stack_adjust ();
7974 /* Try to emit the conditional move. */
7975 insn = emit_conditional_move (target, comparison_code,
7980 /* If we could do the conditional move, emit the sequence,
7984 rtx seq = get_insns ();
7990 /* Otherwise discard the sequence and fall back to code with
7996 emit_move_insn (target, op0);
7998 temp = gen_label_rtx ();
7999 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8000 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8003 emit_move_insn (target, op1);
8008 op0 = expand_expr (treeop0, subtarget,
8009 VOIDmode, EXPAND_NORMAL);
8010 if (modifier == EXPAND_STACK_PARM)
8012 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8016 /* ??? Can optimize bitwise operations with one arg constant.
8017 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8018 and (a bitwise1 b) bitwise2 b (etc)
8019 but that is probably not worth while. */
8021 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8022 boolean values when we want in all cases to compute both of them. In
8023 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8024 as actual zero-or-1 values and then bitwise anding. In cases where
8025 there cannot be any side effects, better code would be made by
8026 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8027 how to recognize those cases. */
8029 case TRUTH_AND_EXPR:
8030 code = BIT_AND_EXPR;
8035 code = BIT_IOR_EXPR;
8039 case TRUTH_XOR_EXPR:
8040 code = BIT_XOR_EXPR;
8046 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8047 || (GET_MODE_PRECISION (TYPE_MODE (type))
8048 == TYPE_PRECISION (type)));
8053 /* If this is a fixed-point operation, then we cannot use the code
8054 below because "expand_shift" doesn't support sat/no-sat fixed-point
8056 if (ALL_FIXED_POINT_MODE_P (mode))
8059 if (! safe_from_p (subtarget, treeop1, 1))
8061 if (modifier == EXPAND_STACK_PARM)
8063 op0 = expand_expr (treeop0, subtarget,
8064 VOIDmode, EXPAND_NORMAL);
8065 temp = expand_variable_shift (code, mode, op0, treeop1, target,
8067 if (code == LSHIFT_EXPR)
8068 temp = REDUCE_BIT_FIELD (temp);
8071 /* Could determine the answer when only additive constants differ. Also,
8072 the addition of one can be handled by changing the condition. */
8079 case UNORDERED_EXPR:
8087 temp = do_store_flag (ops,
8088 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8089 tmode != VOIDmode ? tmode : mode);
8093 /* Use a compare and a jump for BLKmode comparisons, or for function
8094 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8097 || modifier == EXPAND_STACK_PARM
8098 || ! safe_from_p (target, treeop0, 1)
8099 || ! safe_from_p (target, treeop1, 1)
8100 /* Make sure we don't have a hard reg (such as function's return
8101 value) live across basic blocks, if not optimizing. */
8102 || (!optimize && REG_P (target)
8103 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8104 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8106 emit_move_insn (target, const0_rtx);
8108 op1 = gen_label_rtx ();
8109 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8111 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8112 emit_move_insn (target, constm1_rtx);
8114 emit_move_insn (target, const1_rtx);
8119 case TRUTH_NOT_EXPR:
8120 if (modifier == EXPAND_STACK_PARM)
8122 op0 = expand_expr (treeop0, target,
8123 VOIDmode, EXPAND_NORMAL);
8124 /* The parser is careful to generate TRUTH_NOT_EXPR
8125 only with operands that are always zero or one. */
8126 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8127 target, 1, OPTAB_LIB_WIDEN);
8132 /* Get the rtx code of the operands. */
8133 op0 = expand_normal (treeop0);
8134 op1 = expand_normal (treeop1);
8137 target = gen_reg_rtx (TYPE_MODE (type));
8139 /* Move the real (op0) and imaginary (op1) parts to their location. */
8140 write_complex_part (target, op0, false);
8141 write_complex_part (target, op1, true);
8145 case WIDEN_SUM_EXPR:
8147 tree oprnd0 = treeop0;
8148 tree oprnd1 = treeop1;
8150 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8151 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8156 case REDUC_MAX_EXPR:
8157 case REDUC_MIN_EXPR:
8158 case REDUC_PLUS_EXPR:
8160 op0 = expand_normal (treeop0);
8161 this_optab = optab_for_tree_code (code, type, optab_default);
8162 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8167 case VEC_EXTRACT_EVEN_EXPR:
8168 case VEC_EXTRACT_ODD_EXPR:
8170 expand_operands (treeop0, treeop1,
8171 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8172 this_optab = optab_for_tree_code (code, type, optab_default);
8173 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8179 case VEC_INTERLEAVE_HIGH_EXPR:
8180 case VEC_INTERLEAVE_LOW_EXPR:
8182 expand_operands (treeop0, treeop1,
8183 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8184 this_optab = optab_for_tree_code (code, type, optab_default);
8185 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8191 case VEC_LSHIFT_EXPR:
8192 case VEC_RSHIFT_EXPR:
8194 target = expand_vec_shift_expr (ops, target);
8198 case VEC_UNPACK_HI_EXPR:
8199 case VEC_UNPACK_LO_EXPR:
8201 op0 = expand_normal (treeop0);
8202 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8208 case VEC_UNPACK_FLOAT_HI_EXPR:
8209 case VEC_UNPACK_FLOAT_LO_EXPR:
8211 op0 = expand_normal (treeop0);
8212 /* The signedness is determined from input operand. */
8213 temp = expand_widen_pattern_expr
8214 (ops, op0, NULL_RTX, NULL_RTX,
8215 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8221 case VEC_WIDEN_MULT_HI_EXPR:
8222 case VEC_WIDEN_MULT_LO_EXPR:
8224 tree oprnd0 = treeop0;
8225 tree oprnd1 = treeop1;
8227 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8228 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8230 gcc_assert (target);
8234 case VEC_PACK_TRUNC_EXPR:
8235 case VEC_PACK_SAT_EXPR:
8236 case VEC_PACK_FIX_TRUNC_EXPR:
8237 mode = TYPE_MODE (TREE_TYPE (treeop0));
8242 tree oprnd0 = treeop0;
8243 tree oprnd1 = treeop1;
8244 tree oprnd2 = treeop2;
8247 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8248 op2 = expand_normal (oprnd2);
8249 target = expand_widen_pattern_expr (ops, op0, op1, op2,
8254 case REALIGN_LOAD_EXPR:
8256 tree oprnd0 = treeop0;
8257 tree oprnd1 = treeop1;
8258 tree oprnd2 = treeop2;
8261 this_optab = optab_for_tree_code (code, type, optab_default);
8262 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8263 op2 = expand_normal (oprnd2);
8264 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8274 /* Here to do an ordinary binary operator. */
8276 expand_operands (treeop0, treeop1,
8277 subtarget, &op0, &op1, EXPAND_NORMAL);
8279 this_optab = optab_for_tree_code (code, type, optab_default);
8281 if (modifier == EXPAND_STACK_PARM)
8283 temp = expand_binop (mode, this_optab, op0, op1, target,
8284 unsignedp, OPTAB_LIB_WIDEN);
8286 return REDUCE_BIT_FIELD (temp);
8288 #undef REDUCE_BIT_FIELD
8291 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8292 enum expand_modifier modifier, rtx *alt_rtl)
8294 rtx op0, op1, temp, decl_rtl;
8297 enum machine_mode mode;
8298 enum tree_code code = TREE_CODE (exp);
8299 rtx subtarget, original_target;
8302 bool reduce_bit_field;
8303 location_t loc = EXPR_LOCATION (exp);
8304 struct separate_ops ops;
8305 tree treeop0, treeop1, treeop2;
8306 tree ssa_name = NULL_TREE;
8309 type = TREE_TYPE (exp);
8310 mode = TYPE_MODE (type);
8311 unsignedp = TYPE_UNSIGNED (type);
8313 treeop0 = treeop1 = treeop2 = NULL_TREE;
8314 if (!VL_EXP_CLASS_P (exp))
8315 switch (TREE_CODE_LENGTH (code))
8318 case 3: treeop2 = TREE_OPERAND (exp, 2);
8319 case 2: treeop1 = TREE_OPERAND (exp, 1);
8320 case 1: treeop0 = TREE_OPERAND (exp, 0);
8330 ignore = (target == const0_rtx
8331 || ((CONVERT_EXPR_CODE_P (code)
8332 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8333 && TREE_CODE (type) == VOID_TYPE));
8335 /* An operation in what may be a bit-field type needs the
8336 result to be reduced to the precision of the bit-field type,
8337 which is narrower than that of the type's mode. */
8338 reduce_bit_field = (!ignore
8339 && INTEGRAL_TYPE_P (type)
8340 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8342 /* If we are going to ignore this result, we need only do something
8343 if there is a side-effect somewhere in the expression. If there
8344 is, short-circuit the most common cases here. Note that we must
8345 not call expand_expr with anything but const0_rtx in case this
8346 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8350 if (! TREE_SIDE_EFFECTS (exp))
8353 /* Ensure we reference a volatile object even if value is ignored, but
8354 don't do this if all we are doing is taking its address. */
8355 if (TREE_THIS_VOLATILE (exp)
8356 && TREE_CODE (exp) != FUNCTION_DECL
8357 && mode != VOIDmode && mode != BLKmode
8358 && modifier != EXPAND_CONST_ADDRESS)
8360 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8366 if (TREE_CODE_CLASS (code) == tcc_unary
8367 || code == COMPONENT_REF || code == INDIRECT_REF)
8368 return expand_expr (treeop0, const0_rtx, VOIDmode,
8371 else if (TREE_CODE_CLASS (code) == tcc_binary
8372 || TREE_CODE_CLASS (code) == tcc_comparison
8373 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8375 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8376 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8379 else if (code == BIT_FIELD_REF)
8381 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8382 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8383 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8390 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8393 /* Use subtarget as the target for operand 0 of a binary operation. */
8394 subtarget = get_subtarget (target);
8395 original_target = target;
8401 tree function = decl_function_context (exp);
8403 temp = label_rtx (exp);
8404 temp = gen_rtx_LABEL_REF (Pmode, temp);
8406 if (function != current_function_decl
8408 LABEL_REF_NONLOCAL_P (temp) = 1;
8410 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8415 /* ??? ivopts calls expander, without any preparation from
8416 out-of-ssa. So fake instructions as if this was an access to the
8417 base variable. This unnecessarily allocates a pseudo, see how we can
8418 reuse it, if partition base vars have it set already. */
8419 if (!currently_expanding_to_rtl)
8420 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8423 g = get_gimple_for_ssa_name (exp);
8424 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8426 && modifier == EXPAND_INITIALIZER
8427 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8428 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8429 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8430 g = SSA_NAME_DEF_STMT (exp);
8432 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8436 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8437 exp = SSA_NAME_VAR (ssa_name);
8438 goto expand_decl_rtl;
8442 /* If a static var's type was incomplete when the decl was written,
8443 but the type is complete now, lay out the decl now. */
8444 if (DECL_SIZE (exp) == 0
8445 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8446 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8447 layout_decl (exp, 0);
8449 /* ... fall through ... */
8453 decl_rtl = DECL_RTL (exp);
8455 gcc_assert (decl_rtl);
8456 decl_rtl = copy_rtx (decl_rtl);
8457 /* Record writes to register variables. */
8458 if (modifier == EXPAND_WRITE
8460 && HARD_REGISTER_P (decl_rtl))
8461 add_to_hard_reg_set (&crtl->asm_clobbers,
8462 GET_MODE (decl_rtl), REGNO (decl_rtl));
8464 /* Ensure variable marked as used even if it doesn't go through
8465 a parser. If it hasn't be used yet, write out an external
8467 if (! TREE_USED (exp))
8469 assemble_external (exp);
8470 TREE_USED (exp) = 1;
8473 /* Show we haven't gotten RTL for this yet. */
8476 /* Variables inherited from containing functions should have
8477 been lowered by this point. */
8478 context = decl_function_context (exp);
8479 gcc_assert (!context
8480 || context == current_function_decl
8481 || TREE_STATIC (exp)
8482 || DECL_EXTERNAL (exp)
8483 /* ??? C++ creates functions that are not TREE_STATIC. */
8484 || TREE_CODE (exp) == FUNCTION_DECL);
8486 /* This is the case of an array whose size is to be determined
8487 from its initializer, while the initializer is still being parsed.
8490 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8491 temp = validize_mem (decl_rtl);
8493 /* If DECL_RTL is memory, we are in the normal case and the
8494 address is not valid, get the address into a register. */
8496 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8499 *alt_rtl = decl_rtl;
8500 decl_rtl = use_anchored_address (decl_rtl);
8501 if (modifier != EXPAND_CONST_ADDRESS
8502 && modifier != EXPAND_SUM
8503 && !memory_address_addr_space_p (DECL_MODE (exp),
8505 MEM_ADDR_SPACE (decl_rtl)))
8506 temp = replace_equiv_address (decl_rtl,
8507 copy_rtx (XEXP (decl_rtl, 0)));
8510 /* If we got something, return it. But first, set the alignment
8511 if the address is a register. */
8514 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8515 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8520 /* If the mode of DECL_RTL does not match that of the decl, it
8521 must be a promoted value. We return a SUBREG of the wanted mode,
8522 but mark it so that we know that it was already extended. */
8523 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8525 enum machine_mode pmode;
8527 /* Get the signedness to be used for this variable. Ensure we get
8528 the same mode we got when the variable was declared. */
8529 if (code == SSA_NAME
8530 && (g = SSA_NAME_DEF_STMT (ssa_name))
8531 && gimple_code (g) == GIMPLE_CALL)
8533 gcc_assert (!gimple_call_internal_p (g));
8534 pmode = promote_function_mode (type, mode, &unsignedp,
8535 gimple_call_fntype (g),
8539 pmode = promote_decl_mode (exp, &unsignedp);
8540 gcc_assert (GET_MODE (decl_rtl) == pmode);
8542 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8543 SUBREG_PROMOTED_VAR_P (temp) = 1;
8544 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8551 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8552 TREE_INT_CST_HIGH (exp), mode);
8558 tree tmp = NULL_TREE;
8559 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8560 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8561 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8562 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8563 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8564 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8565 return const_vector_from_tree (exp);
8566 if (GET_MODE_CLASS (mode) == MODE_INT)
8568 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8570 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8573 tmp = build_constructor_from_list (type,
8574 TREE_VECTOR_CST_ELTS (exp));
8575 return expand_expr (tmp, ignore ? const0_rtx : target,
8580 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8583 /* If optimized, generate immediate CONST_DOUBLE
8584 which will be turned into memory by reload if necessary.
8586 We used to force a register so that loop.c could see it. But
8587 this does not allow gen_* patterns to perform optimizations with
8588 the constants. It also produces two insns in cases like "x = 1.0;".
8589 On most machines, floating-point constants are not permitted in
8590 many insns, so we'd end up copying it to a register in any case.
8592 Now, we do the copying in expand_binop, if appropriate. */
8593 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8594 TYPE_MODE (TREE_TYPE (exp)));
8597 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8598 TYPE_MODE (TREE_TYPE (exp)));
8601 /* Handle evaluating a complex constant in a CONCAT target. */
8602 if (original_target && GET_CODE (original_target) == CONCAT)
8604 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8607 rtarg = XEXP (original_target, 0);
8608 itarg = XEXP (original_target, 1);
8610 /* Move the real and imaginary parts separately. */
8611 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8612 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8615 emit_move_insn (rtarg, op0);
8617 emit_move_insn (itarg, op1);
8619 return original_target;
8622 /* ... fall through ... */
8625 temp = expand_expr_constant (exp, 1, modifier);
8627 /* temp contains a constant address.
8628 On RISC machines where a constant address isn't valid,
8629 make some insns to get that address into a register. */
8630 if (modifier != EXPAND_CONST_ADDRESS
8631 && modifier != EXPAND_INITIALIZER
8632 && modifier != EXPAND_SUM
8633 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8634 MEM_ADDR_SPACE (temp)))
8635 return replace_equiv_address (temp,
8636 copy_rtx (XEXP (temp, 0)));
8642 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8644 if (!SAVE_EXPR_RESOLVED_P (exp))
8646 /* We can indeed still hit this case, typically via builtin
8647 expanders calling save_expr immediately before expanding
8648 something. Assume this means that we only have to deal
8649 with non-BLKmode values. */
8650 gcc_assert (GET_MODE (ret) != BLKmode);
8652 val = build_decl (EXPR_LOCATION (exp),
8653 VAR_DECL, NULL, TREE_TYPE (exp));
8654 DECL_ARTIFICIAL (val) = 1;
8655 DECL_IGNORED_P (val) = 1;
8657 TREE_OPERAND (exp, 0) = treeop0;
8658 SAVE_EXPR_RESOLVED_P (exp) = 1;
8660 if (!CONSTANT_P (ret))
8661 ret = copy_to_reg (ret);
8662 SET_DECL_RTL (val, ret);
8670 /* If we don't need the result, just ensure we evaluate any
8674 unsigned HOST_WIDE_INT idx;
8677 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8678 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8683 return expand_constructor (exp, target, modifier, false);
8685 case TARGET_MEM_REF:
8687 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8688 struct mem_address addr;
8691 get_address_description (exp, &addr);
8692 op0 = addr_for_mem_ref (&addr, as, true);
8693 op0 = memory_address_addr_space (mode, op0, as);
8694 temp = gen_rtx_MEM (mode, op0);
8695 set_mem_attributes (temp, exp, 0);
8696 set_mem_addr_space (temp, as);
8697 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8698 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8700 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8701 /* If the target does not have special handling for unaligned
8702 loads of mode then it can use regular moves for them. */
8703 && ((icode = optab_handler (movmisalign_optab, mode))
8704 != CODE_FOR_nothing))
8708 /* We've already validated the memory, and we're creating a
8709 new pseudo destination. The predicates really can't fail. */
8710 reg = gen_reg_rtx (mode);
8712 /* Nor can the insn generator. */
8713 insn = GEN_FCN (icode) (reg, temp);
8714 gcc_assert (insn != NULL_RTX);
8725 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8726 enum machine_mode address_mode;
8727 tree base = TREE_OPERAND (exp, 0);
8730 /* Handle expansion of non-aliased memory with non-BLKmode. That
8731 might end up in a register. */
8732 if (TREE_CODE (base) == ADDR_EXPR)
8734 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8736 base = TREE_OPERAND (base, 0);
8740 base = get_addr_base_and_unit_offset (base, &off);
8744 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8745 decl we must use bitfield operations. */
8747 && !TREE_ADDRESSABLE (base)
8748 && DECL_MODE (base) != BLKmode
8749 && DECL_RTL_SET_P (base)
8750 && !MEM_P (DECL_RTL (base)))
8754 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8755 && (GET_MODE_BITSIZE (DECL_MODE (base))
8756 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8757 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8758 TREE_TYPE (exp), base),
8759 target, tmode, modifier);
8760 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8761 bftype = TREE_TYPE (base);
8762 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8763 bftype = TREE_TYPE (exp);
8764 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8766 TYPE_SIZE (TREE_TYPE (exp)),
8768 target, tmode, modifier);
8771 address_mode = targetm.addr_space.address_mode (as);
8772 base = TREE_OPERAND (exp, 0);
8773 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8775 tree mask = gimple_assign_rhs2 (def_stmt);
8776 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8777 gimple_assign_rhs1 (def_stmt), mask);
8778 TREE_OPERAND (exp, 0) = base;
8780 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8781 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8782 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8783 op0 = memory_address_addr_space (address_mode, op0, as);
8784 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8787 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8788 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8790 op0 = memory_address_addr_space (mode, op0, as);
8791 temp = gen_rtx_MEM (mode, op0);
8792 set_mem_attributes (temp, exp, 0);
8793 set_mem_addr_space (temp, as);
8794 if (TREE_THIS_VOLATILE (exp))
8795 MEM_VOLATILE_P (temp) = 1;
8797 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8798 /* If the target does not have special handling for unaligned
8799 loads of mode then it can use regular moves for them. */
8800 && ((icode = optab_handler (movmisalign_optab, mode))
8801 != CODE_FOR_nothing))
8805 /* We've already validated the memory, and we're creating a
8806 new pseudo destination. The predicates really can't fail. */
8807 reg = gen_reg_rtx (mode);
8809 /* Nor can the insn generator. */
8810 insn = GEN_FCN (icode) (reg, temp);
8821 tree array = treeop0;
8822 tree index = treeop1;
8824 /* Fold an expression like: "foo"[2].
8825 This is not done in fold so it won't happen inside &.
8826 Don't fold if this is for wide characters since it's too
8827 difficult to do correctly and this is a very rare case. */
8829 if (modifier != EXPAND_CONST_ADDRESS
8830 && modifier != EXPAND_INITIALIZER
8831 && modifier != EXPAND_MEMORY)
8833 tree t = fold_read_from_constant_string (exp);
8836 return expand_expr (t, target, tmode, modifier);
8839 /* If this is a constant index into a constant array,
8840 just get the value from the array. Handle both the cases when
8841 we have an explicit constructor and when our operand is a variable
8842 that was declared const. */
8844 if (modifier != EXPAND_CONST_ADDRESS
8845 && modifier != EXPAND_INITIALIZER
8846 && modifier != EXPAND_MEMORY
8847 && TREE_CODE (array) == CONSTRUCTOR
8848 && ! TREE_SIDE_EFFECTS (array)
8849 && TREE_CODE (index) == INTEGER_CST)
8851 unsigned HOST_WIDE_INT ix;
8854 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8856 if (tree_int_cst_equal (field, index))
8858 if (!TREE_SIDE_EFFECTS (value))
8859 return expand_expr (fold (value), target, tmode, modifier);
8864 else if (optimize >= 1
8865 && modifier != EXPAND_CONST_ADDRESS
8866 && modifier != EXPAND_INITIALIZER
8867 && modifier != EXPAND_MEMORY
8868 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8869 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8870 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8871 && const_value_known_p (array))
8873 if (TREE_CODE (index) == INTEGER_CST)
8875 tree init = DECL_INITIAL (array);
8877 if (TREE_CODE (init) == CONSTRUCTOR)
8879 unsigned HOST_WIDE_INT ix;
8882 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8884 if (tree_int_cst_equal (field, index))
8886 if (TREE_SIDE_EFFECTS (value))
8889 if (TREE_CODE (value) == CONSTRUCTOR)
8891 /* If VALUE is a CONSTRUCTOR, this
8892 optimization is only useful if
8893 this doesn't store the CONSTRUCTOR
8894 into memory. If it does, it is more
8895 efficient to just load the data from
8896 the array directly. */
8897 rtx ret = expand_constructor (value, target,
8899 if (ret == NULL_RTX)
8903 return expand_expr (fold (value), target, tmode,
8907 else if(TREE_CODE (init) == STRING_CST)
8909 tree index1 = index;
8910 tree low_bound = array_ref_low_bound (exp);
8911 index1 = fold_convert_loc (loc, sizetype,
8914 /* Optimize the special-case of a zero lower bound.
8916 We convert the low_bound to sizetype to avoid some problems
8917 with constant folding. (E.g. suppose the lower bound is 1,
8918 and its mode is QI. Without the conversion,l (ARRAY
8919 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8920 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8922 if (! integer_zerop (low_bound))
8923 index1 = size_diffop_loc (loc, index1,
8924 fold_convert_loc (loc, sizetype,
8927 if (0 > compare_tree_int (index1,
8928 TREE_STRING_LENGTH (init)))
8930 tree type = TREE_TYPE (TREE_TYPE (init));
8931 enum machine_mode mode = TYPE_MODE (type);
8933 if (GET_MODE_CLASS (mode) == MODE_INT
8934 && GET_MODE_SIZE (mode) == 1)
8935 return gen_int_mode (TREE_STRING_POINTER (init)
8936 [TREE_INT_CST_LOW (index1)],
8943 goto normal_inner_ref;
8946 /* If the operand is a CONSTRUCTOR, we can just extract the
8947 appropriate field if it is present. */
8948 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8950 unsigned HOST_WIDE_INT idx;
8953 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8955 if (field == treeop1
8956 /* We can normally use the value of the field in the
8957 CONSTRUCTOR. However, if this is a bitfield in
8958 an integral mode that we can fit in a HOST_WIDE_INT,
8959 we must mask only the number of bits in the bitfield,
8960 since this is done implicitly by the constructor. If
8961 the bitfield does not meet either of those conditions,
8962 we can't do this optimization. */
8963 && (! DECL_BIT_FIELD (field)
8964 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8965 && (GET_MODE_BITSIZE (DECL_MODE (field))
8966 <= HOST_BITS_PER_WIDE_INT))))
8968 if (DECL_BIT_FIELD (field)
8969 && modifier == EXPAND_STACK_PARM)
8971 op0 = expand_expr (value, target, tmode, modifier);
8972 if (DECL_BIT_FIELD (field))
8974 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8975 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8977 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8979 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8980 op0 = expand_and (imode, op0, op1, target);
8984 int count = GET_MODE_BITSIZE (imode) - bitsize;
8986 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8988 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8996 goto normal_inner_ref;
8999 case ARRAY_RANGE_REF:
9002 enum machine_mode mode1, mode2;
9003 HOST_WIDE_INT bitsize, bitpos;
9005 int volatilep = 0, must_force_mem;
9006 bool packedp = false;
9007 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9008 &mode1, &unsignedp, &volatilep, true);
9009 rtx orig_op0, memloc;
9011 /* If we got back the original object, something is wrong. Perhaps
9012 we are evaluating an expression too early. In any event, don't
9013 infinitely recurse. */
9014 gcc_assert (tem != exp);
9016 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9017 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9018 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9021 /* If TEM's type is a union of variable size, pass TARGET to the inner
9022 computation, since it will need a temporary and TARGET is known
9023 to have to do. This occurs in unchecked conversion in Ada. */
9026 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9027 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9029 && modifier != EXPAND_STACK_PARM
9030 ? target : NULL_RTX),
9032 (modifier == EXPAND_INITIALIZER
9033 || modifier == EXPAND_CONST_ADDRESS
9034 || modifier == EXPAND_STACK_PARM)
9035 ? modifier : EXPAND_NORMAL);
9038 /* If the bitfield is volatile, we want to access it in the
9039 field's mode, not the computed mode.
9040 If a MEM has VOIDmode (external with incomplete type),
9041 use BLKmode for it instead. */
9044 if (volatilep && flag_strict_volatile_bitfields > 0)
9045 op0 = adjust_address (op0, mode1, 0);
9046 else if (GET_MODE (op0) == VOIDmode)
9047 op0 = adjust_address (op0, BLKmode, 0);
9051 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9053 /* If we have either an offset, a BLKmode result, or a reference
9054 outside the underlying object, we must force it to memory.
9055 Such a case can occur in Ada if we have unchecked conversion
9056 of an expression from a scalar type to an aggregate type or
9057 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9058 passed a partially uninitialized object or a view-conversion
9059 to a larger size. */
9060 must_force_mem = (offset
9062 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9064 /* Handle CONCAT first. */
9065 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9068 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9071 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9074 op0 = XEXP (op0, 0);
9075 mode2 = GET_MODE (op0);
9077 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9078 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9082 op0 = XEXP (op0, 1);
9084 mode2 = GET_MODE (op0);
9087 /* Otherwise force into memory. */
9091 /* If this is a constant, put it in a register if it is a legitimate
9092 constant and we don't need a memory reference. */
9093 if (CONSTANT_P (op0)
9095 && targetm.legitimate_constant_p (mode2, op0)
9097 op0 = force_reg (mode2, op0);
9099 /* Otherwise, if this is a constant, try to force it to the constant
9100 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9101 is a legitimate constant. */
9102 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9103 op0 = validize_mem (memloc);
9105 /* Otherwise, if this is a constant or the object is not in memory
9106 and need be, put it there. */
9107 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9109 tree nt = build_qualified_type (TREE_TYPE (tem),
9110 (TYPE_QUALS (TREE_TYPE (tem))
9111 | TYPE_QUAL_CONST));
9112 memloc = assign_temp (nt, 1, 1, 1);
9113 emit_move_insn (memloc, op0);
9119 enum machine_mode address_mode;
9120 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9123 gcc_assert (MEM_P (op0));
9126 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9127 if (GET_MODE (offset_rtx) != address_mode)
9128 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9130 if (GET_MODE (op0) == BLKmode
9131 /* A constant address in OP0 can have VOIDmode, we must
9132 not try to call force_reg in that case. */
9133 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9135 && (bitpos % bitsize) == 0
9136 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9137 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9139 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9143 op0 = offset_address (op0, offset_rtx,
9144 highest_pow2_factor (offset));
9147 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9148 record its alignment as BIGGEST_ALIGNMENT. */
9149 if (MEM_P (op0) && bitpos == 0 && offset != 0
9150 && is_aligning_offset (offset, tem))
9151 set_mem_align (op0, BIGGEST_ALIGNMENT);
9153 /* Don't forget about volatility even if this is a bitfield. */
9154 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9156 if (op0 == orig_op0)
9157 op0 = copy_rtx (op0);
9159 MEM_VOLATILE_P (op0) = 1;
9162 /* In cases where an aligned union has an unaligned object
9163 as a field, we might be extracting a BLKmode value from
9164 an integer-mode (e.g., SImode) object. Handle this case
9165 by doing the extract into an object as wide as the field
9166 (which we know to be the width of a basic mode), then
9167 storing into memory, and changing the mode to BLKmode. */
9168 if (mode1 == VOIDmode
9169 || REG_P (op0) || GET_CODE (op0) == SUBREG
9170 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9171 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9172 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9173 && modifier != EXPAND_CONST_ADDRESS
9174 && modifier != EXPAND_INITIALIZER)
9175 /* If the field is volatile, we always want an aligned
9176 access. Only do this if the access is not already naturally
9177 aligned, otherwise "normal" (non-bitfield) volatile fields
9178 become non-addressable. */
9179 || (volatilep && flag_strict_volatile_bitfields > 0
9180 && (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
9181 /* If the field isn't aligned enough to fetch as a memref,
9182 fetch it as a bit field. */
9183 || (mode1 != BLKmode
9184 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9185 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9187 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9188 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9189 && ((modifier == EXPAND_CONST_ADDRESS
9190 || modifier == EXPAND_INITIALIZER)
9192 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9193 || (bitpos % BITS_PER_UNIT != 0)))
9194 /* If the type and the field are a constant size and the
9195 size of the type isn't the same size as the bitfield,
9196 we must use bitfield operations. */
9198 && TYPE_SIZE (TREE_TYPE (exp))
9199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9200 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9203 enum machine_mode ext_mode = mode;
9205 if (ext_mode == BLKmode
9206 && ! (target != 0 && MEM_P (op0)
9208 && bitpos % BITS_PER_UNIT == 0))
9209 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9211 if (ext_mode == BLKmode)
9214 target = assign_temp (type, 0, 1, 1);
9219 /* In this case, BITPOS must start at a byte boundary and
9220 TARGET, if specified, must be a MEM. */
9221 gcc_assert (MEM_P (op0)
9222 && (!target || MEM_P (target))
9223 && !(bitpos % BITS_PER_UNIT));
9225 emit_block_move (target,
9226 adjust_address (op0, VOIDmode,
9227 bitpos / BITS_PER_UNIT),
9228 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9230 (modifier == EXPAND_STACK_PARM
9231 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9236 op0 = validize_mem (op0);
9238 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9239 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9241 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9242 (modifier == EXPAND_STACK_PARM
9243 ? NULL_RTX : target),
9244 ext_mode, ext_mode);
9246 /* If the result is a record type and BITSIZE is narrower than
9247 the mode of OP0, an integral mode, and this is a big endian
9248 machine, we must put the field into the high-order bits. */
9249 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9250 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9251 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9252 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9253 GET_MODE_BITSIZE (GET_MODE (op0))
9256 /* If the result type is BLKmode, store the data into a temporary
9257 of the appropriate type, but with the mode corresponding to the
9258 mode for the data we have (op0's mode). It's tempting to make
9259 this a constant type, since we know it's only being stored once,
9260 but that can cause problems if we are taking the address of this
9261 COMPONENT_REF because the MEM of any reference via that address
9262 will have flags corresponding to the type, which will not
9263 necessarily be constant. */
9264 if (mode == BLKmode)
9266 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9269 /* If the reference doesn't use the alias set of its type,
9270 we cannot create the temporary using that type. */
9271 if (component_uses_parent_alias_set (exp))
9273 new_rtx = assign_stack_local (ext_mode, size, 0);
9274 set_mem_alias_set (new_rtx, get_alias_set (exp));
9277 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9279 emit_move_insn (new_rtx, op0);
9280 op0 = copy_rtx (new_rtx);
9281 PUT_MODE (op0, BLKmode);
9282 set_mem_attributes (op0, exp, 1);
9288 /* If the result is BLKmode, use that to access the object
9290 if (mode == BLKmode)
9293 /* Get a reference to just this component. */
9294 if (modifier == EXPAND_CONST_ADDRESS
9295 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9296 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9298 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9300 if (op0 == orig_op0)
9301 op0 = copy_rtx (op0);
9303 set_mem_attributes (op0, exp, 0);
9304 if (REG_P (XEXP (op0, 0)))
9305 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9307 MEM_VOLATILE_P (op0) |= volatilep;
9308 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9309 || modifier == EXPAND_CONST_ADDRESS
9310 || modifier == EXPAND_INITIALIZER)
9312 else if (target == 0)
9313 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9315 convert_move (target, op0, unsignedp);
9320 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9323 /* All valid uses of __builtin_va_arg_pack () are removed during
9325 if (CALL_EXPR_VA_ARG_PACK (exp))
9326 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9328 tree fndecl = get_callee_fndecl (exp), attr;
9331 && (attr = lookup_attribute ("error",
9332 DECL_ATTRIBUTES (fndecl))) != NULL)
9333 error ("%Kcall to %qs declared with attribute error: %s",
9334 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9335 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9337 && (attr = lookup_attribute ("warning",
9338 DECL_ATTRIBUTES (fndecl))) != NULL)
9339 warning_at (tree_nonartificial_location (exp),
9340 0, "%Kcall to %qs declared with attribute warning: %s",
9341 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9342 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9344 /* Check for a built-in function. */
9345 if (fndecl && DECL_BUILT_IN (fndecl))
9347 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9348 return expand_builtin (exp, target, subtarget, tmode, ignore);
9351 return expand_call (exp, target, ignore);
9353 case VIEW_CONVERT_EXPR:
9356 /* If we are converting to BLKmode, try to avoid an intermediate
9357 temporary by fetching an inner memory reference. */
9359 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9360 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9361 && handled_component_p (treeop0))
9363 enum machine_mode mode1;
9364 HOST_WIDE_INT bitsize, bitpos;
9369 = get_inner_reference (treeop0, &bitsize, &bitpos,
9370 &offset, &mode1, &unsignedp, &volatilep,
9374 /* ??? We should work harder and deal with non-zero offsets. */
9376 && (bitpos % BITS_PER_UNIT) == 0
9378 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9380 /* See the normal_inner_ref case for the rationale. */
9383 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9384 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9386 && modifier != EXPAND_STACK_PARM
9387 ? target : NULL_RTX),
9389 (modifier == EXPAND_INITIALIZER
9390 || modifier == EXPAND_CONST_ADDRESS
9391 || modifier == EXPAND_STACK_PARM)
9392 ? modifier : EXPAND_NORMAL);
9394 if (MEM_P (orig_op0))
9398 /* Get a reference to just this component. */
9399 if (modifier == EXPAND_CONST_ADDRESS
9400 || modifier == EXPAND_SUM
9401 || modifier == EXPAND_INITIALIZER)
9402 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9404 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9406 if (op0 == orig_op0)
9407 op0 = copy_rtx (op0);
9409 set_mem_attributes (op0, treeop0, 0);
9410 if (REG_P (XEXP (op0, 0)))
9411 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9413 MEM_VOLATILE_P (op0) |= volatilep;
9419 op0 = expand_expr (treeop0,
9420 NULL_RTX, VOIDmode, modifier);
9422 /* If the input and output modes are both the same, we are done. */
9423 if (mode == GET_MODE (op0))
9425 /* If neither mode is BLKmode, and both modes are the same size
9426 then we can use gen_lowpart. */
9427 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9428 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9429 && !COMPLEX_MODE_P (GET_MODE (op0)))
9431 if (GET_CODE (op0) == SUBREG)
9432 op0 = force_reg (GET_MODE (op0), op0);
9433 temp = gen_lowpart_common (mode, op0);
9438 if (!REG_P (op0) && !MEM_P (op0))
9439 op0 = force_reg (GET_MODE (op0), op0);
9440 op0 = gen_lowpart (mode, op0);
9443 /* If both types are integral, convert from one mode to the other. */
9444 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9445 op0 = convert_modes (mode, GET_MODE (op0), op0,
9446 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9447 /* As a last resort, spill op0 to memory, and reload it in a
9449 else if (!MEM_P (op0))
9451 /* If the operand is not a MEM, force it into memory. Since we
9452 are going to be changing the mode of the MEM, don't call
9453 force_const_mem for constants because we don't allow pool
9454 constants to change mode. */
9455 tree inner_type = TREE_TYPE (treeop0);
9457 gcc_assert (!TREE_ADDRESSABLE (exp));
9459 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9461 = assign_stack_temp_for_type
9462 (TYPE_MODE (inner_type),
9463 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9465 emit_move_insn (target, op0);
9469 /* At this point, OP0 is in the correct mode. If the output type is
9470 such that the operand is known to be aligned, indicate that it is.
9471 Otherwise, we need only be concerned about alignment for non-BLKmode
9475 op0 = copy_rtx (op0);
9477 if (TYPE_ALIGN_OK (type))
9478 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9479 else if (STRICT_ALIGNMENT
9481 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9483 tree inner_type = TREE_TYPE (treeop0);
9484 HOST_WIDE_INT temp_size
9485 = MAX (int_size_in_bytes (inner_type),
9486 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9488 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9489 rtx new_with_op0_mode
9490 = adjust_address (new_rtx, GET_MODE (op0), 0);
9492 gcc_assert (!TREE_ADDRESSABLE (exp));
9494 if (GET_MODE (op0) == BLKmode)
9495 emit_block_move (new_with_op0_mode, op0,
9496 GEN_INT (GET_MODE_SIZE (mode)),
9497 (modifier == EXPAND_STACK_PARM
9498 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9500 emit_move_insn (new_with_op0_mode, op0);
9505 op0 = adjust_address (op0, mode, 0);
9510 /* Use a compare and a jump for BLKmode comparisons, or for function
9511 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9513 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9514 are occassionally created by folding during expansion. */
9515 case TRUTH_ANDIF_EXPR:
9516 case TRUTH_ORIF_EXPR:
9519 || modifier == EXPAND_STACK_PARM
9520 || ! safe_from_p (target, treeop0, 1)
9521 || ! safe_from_p (target, treeop1, 1)
9522 /* Make sure we don't have a hard reg (such as function's return
9523 value) live across basic blocks, if not optimizing. */
9524 || (!optimize && REG_P (target)
9525 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9526 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9529 emit_move_insn (target, const0_rtx);
9531 op1 = gen_label_rtx ();
9532 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9535 emit_move_insn (target, const1_rtx);
9538 return ignore ? const0_rtx : target;
9540 case STATEMENT_LIST:
9542 tree_stmt_iterator iter;
9544 gcc_assert (ignore);
9546 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9547 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9552 /* A COND_EXPR with its type being VOID_TYPE represents a
9553 conditional jump and is handled in
9554 expand_gimple_cond_expr. */
9555 gcc_assert (!VOID_TYPE_P (type));
9557 /* Note that COND_EXPRs whose type is a structure or union
9558 are required to be constructed to contain assignments of
9559 a temporary variable, so that we can evaluate them here
9560 for side effect only. If type is void, we must do likewise. */
9562 gcc_assert (!TREE_ADDRESSABLE (type)
9564 && TREE_TYPE (treeop1) != void_type_node
9565 && TREE_TYPE (treeop2) != void_type_node);
9567 /* If we are not to produce a result, we have no target. Otherwise,
9568 if a target was specified use it; it will not be used as an
9569 intermediate target unless it is safe. If no target, use a
9572 if (modifier != EXPAND_STACK_PARM
9574 && safe_from_p (original_target, treeop0, 1)
9575 && GET_MODE (original_target) == mode
9576 #ifdef HAVE_conditional_move
9577 && (! can_conditionally_move_p (mode)
9578 || REG_P (original_target))
9580 && !MEM_P (original_target))
9581 temp = original_target;
9583 temp = assign_temp (type, 0, 0, 1);
9585 do_pending_stack_adjust ();
9587 op0 = gen_label_rtx ();
9588 op1 = gen_label_rtx ();
9589 jumpifnot (treeop0, op0, -1);
9590 store_expr (treeop1, temp,
9591 modifier == EXPAND_STACK_PARM,
9594 emit_jump_insn (gen_jump (op1));
9597 store_expr (treeop2, temp,
9598 modifier == EXPAND_STACK_PARM,
9606 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9613 gcc_assert (ignore);
9615 /* Check for |= or &= of a bitfield of size one into another bitfield
9616 of size 1. In this case, (unless we need the result of the
9617 assignment) we can do this more efficiently with a
9618 test followed by an assignment, if necessary.
9620 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9621 things change so we do, this code should be enhanced to
9623 if (TREE_CODE (lhs) == COMPONENT_REF
9624 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9625 || TREE_CODE (rhs) == BIT_AND_EXPR)
9626 && TREE_OPERAND (rhs, 0) == lhs
9627 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9628 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9629 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9631 rtx label = gen_label_rtx ();
9632 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9633 do_jump (TREE_OPERAND (rhs, 1),
9635 value ? 0 : label, -1);
9636 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9637 MOVE_NONTEMPORAL (exp));
9638 do_pending_stack_adjust ();
9643 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9648 return expand_expr_addr_expr (exp, target, tmode, modifier);
9651 op0 = expand_normal (treeop0);
9652 return read_complex_part (op0, false);
9655 op0 = expand_normal (treeop0);
9656 return read_complex_part (op0, true);
9663 /* Expanded in cfgexpand.c. */
9666 case TRY_CATCH_EXPR:
9668 case EH_FILTER_EXPR:
9669 case TRY_FINALLY_EXPR:
9670 /* Lowered by tree-eh.c. */
9673 case WITH_CLEANUP_EXPR:
9674 case CLEANUP_POINT_EXPR:
9676 case CASE_LABEL_EXPR:
9682 case PREINCREMENT_EXPR:
9683 case PREDECREMENT_EXPR:
9684 case POSTINCREMENT_EXPR:
9685 case POSTDECREMENT_EXPR:
9688 /* Lowered by gimplify.c. */
9692 /* Function descriptors are not valid except for as
9693 initialization constants, and should not be expanded. */
9696 case WITH_SIZE_EXPR:
9697 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9698 have pulled out the size to use in whatever context it needed. */
9699 return expand_expr_real (treeop0, original_target, tmode,
9702 case COMPOUND_LITERAL_EXPR:
9704 /* Initialize the anonymous variable declared in the compound
9705 literal, then return the variable. */
9706 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9708 /* Create RTL for this variable. */
9709 if (!DECL_RTL_SET_P (decl))
9711 if (DECL_HARD_REGISTER (decl))
9712 /* The user specified an assembler name for this variable.
9714 rest_of_decl_compilation (decl, 0, 0);
9719 return expand_expr_real (decl, original_target, tmode,
9724 return expand_expr_real_2 (&ops, target, tmode, modifier);
9728 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9729 signedness of TYPE), possibly returning the result in TARGET. */
9731 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9733 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9734 if (target && GET_MODE (target) != GET_MODE (exp))
9736 /* For constant values, reduce using build_int_cst_type. */
9737 if (CONST_INT_P (exp))
9739 HOST_WIDE_INT value = INTVAL (exp);
9740 tree t = build_int_cst_type (type, value);
9741 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9743 else if (TYPE_UNSIGNED (type))
9745 rtx mask = immed_double_int_const (double_int_mask (prec),
9747 return expand_and (GET_MODE (exp), exp, mask, target);
9751 int count = GET_MODE_BITSIZE (GET_MODE (exp)) - prec;
9752 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
9753 exp, count, target, 0);
9754 return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
9755 exp, count, target, 0);
9759 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9760 when applied to the address of EXP produces an address known to be
9761 aligned more than BIGGEST_ALIGNMENT. */
9764 is_aligning_offset (const_tree offset, const_tree exp)
9766 /* Strip off any conversions. */
9767 while (CONVERT_EXPR_P (offset))
9768 offset = TREE_OPERAND (offset, 0);
9770 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9771 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9772 if (TREE_CODE (offset) != BIT_AND_EXPR
9773 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9774 || compare_tree_int (TREE_OPERAND (offset, 1),
9775 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9776 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9779 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9780 It must be NEGATE_EXPR. Then strip any more conversions. */
9781 offset = TREE_OPERAND (offset, 0);
9782 while (CONVERT_EXPR_P (offset))
9783 offset = TREE_OPERAND (offset, 0);
9785 if (TREE_CODE (offset) != NEGATE_EXPR)
9788 offset = TREE_OPERAND (offset, 0);
9789 while (CONVERT_EXPR_P (offset))
9790 offset = TREE_OPERAND (offset, 0);
9792 /* This must now be the address of EXP. */
9793 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9796 /* Return the tree node if an ARG corresponds to a string constant or zero
9797 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9798 in bytes within the string that ARG is accessing. The type of the
9799 offset will be `sizetype'. */
9802 string_constant (tree arg, tree *ptr_offset)
9804 tree array, offset, lower_bound;
9807 if (TREE_CODE (arg) == ADDR_EXPR)
9809 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9811 *ptr_offset = size_zero_node;
9812 return TREE_OPERAND (arg, 0);
9814 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9816 array = TREE_OPERAND (arg, 0);
9817 offset = size_zero_node;
9819 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9821 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9822 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9823 if (TREE_CODE (array) != STRING_CST
9824 && TREE_CODE (array) != VAR_DECL)
9827 /* Check if the array has a nonzero lower bound. */
9828 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9829 if (!integer_zerop (lower_bound))
9831 /* If the offset and base aren't both constants, return 0. */
9832 if (TREE_CODE (lower_bound) != INTEGER_CST)
9834 if (TREE_CODE (offset) != INTEGER_CST)
9836 /* Adjust offset by the lower bound. */
9837 offset = size_diffop (fold_convert (sizetype, offset),
9838 fold_convert (sizetype, lower_bound));
9844 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9846 tree arg0 = TREE_OPERAND (arg, 0);
9847 tree arg1 = TREE_OPERAND (arg, 1);
9852 if (TREE_CODE (arg0) == ADDR_EXPR
9853 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9854 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9856 array = TREE_OPERAND (arg0, 0);
9859 else if (TREE_CODE (arg1) == ADDR_EXPR
9860 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9861 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9863 array = TREE_OPERAND (arg1, 0);
9872 if (TREE_CODE (array) == STRING_CST)
9874 *ptr_offset = fold_convert (sizetype, offset);
9877 else if (TREE_CODE (array) == VAR_DECL
9878 || TREE_CODE (array) == CONST_DECL)
9882 /* Variables initialized to string literals can be handled too. */
9883 if (!const_value_known_p (array)
9884 || !DECL_INITIAL (array)
9885 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9888 /* Avoid const char foo[4] = "abcde"; */
9889 if (DECL_SIZE_UNIT (array) == NULL_TREE
9890 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9891 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9892 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9895 /* If variable is bigger than the string literal, OFFSET must be constant
9896 and inside of the bounds of the string literal. */
9897 offset = fold_convert (sizetype, offset);
9898 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9899 && (! host_integerp (offset, 1)
9900 || compare_tree_int (offset, length) >= 0))
9903 *ptr_offset = offset;
9904 return DECL_INITIAL (array);
9910 /* Generate code to calculate OPS, and exploded expression
9911 using a store-flag instruction and return an rtx for the result.
9912 OPS reflects a comparison.
9914 If TARGET is nonzero, store the result there if convenient.
9916 Return zero if there is no suitable set-flag instruction
9917 available on this machine.
9919 Once expand_expr has been called on the arguments of the comparison,
9920 we are committed to doing the store flag, since it is not safe to
9921 re-evaluate the expression. We emit the store-flag insn by calling
9922 emit_store_flag, but only expand the arguments if we have a reason
9923 to believe that emit_store_flag will be successful. If we think that
9924 it will, but it isn't, we have to simulate the store-flag with a
9925 set/jump/set sequence. */
9928 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9931 tree arg0, arg1, type;
9933 enum machine_mode operand_mode;
9936 rtx subtarget = target;
9937 location_t loc = ops->location;
9942 /* Don't crash if the comparison was erroneous. */
9943 if (arg0 == error_mark_node || arg1 == error_mark_node)
9946 type = TREE_TYPE (arg0);
9947 operand_mode = TYPE_MODE (type);
9948 unsignedp = TYPE_UNSIGNED (type);
9950 /* We won't bother with BLKmode store-flag operations because it would mean
9951 passing a lot of information to emit_store_flag. */
9952 if (operand_mode == BLKmode)
9955 /* We won't bother with store-flag operations involving function pointers
9956 when function pointers must be canonicalized before comparisons. */
9957 #ifdef HAVE_canonicalize_funcptr_for_compare
9958 if (HAVE_canonicalize_funcptr_for_compare
9959 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9960 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9962 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9963 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9964 == FUNCTION_TYPE))))
9971 /* Get the rtx comparison code to use. We know that EXP is a comparison
9972 operation of some type. Some comparisons against 1 and -1 can be
9973 converted to comparisons with zero. Do so here so that the tests
9974 below will be aware that we have a comparison with zero. These
9975 tests will not catch constants in the first operand, but constants
9976 are rarely passed as the first operand. */
9987 if (integer_onep (arg1))
9988 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9990 code = unsignedp ? LTU : LT;
9993 if (! unsignedp && integer_all_onesp (arg1))
9994 arg1 = integer_zero_node, code = LT;
9996 code = unsignedp ? LEU : LE;
9999 if (! unsignedp && integer_all_onesp (arg1))
10000 arg1 = integer_zero_node, code = GE;
10002 code = unsignedp ? GTU : GT;
10005 if (integer_onep (arg1))
10006 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10008 code = unsignedp ? GEU : GE;
10011 case UNORDERED_EXPR:
10037 gcc_unreachable ();
10040 /* Put a constant second. */
10041 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10042 || TREE_CODE (arg0) == FIXED_CST)
10044 tem = arg0; arg0 = arg1; arg1 = tem;
10045 code = swap_condition (code);
10048 /* If this is an equality or inequality test of a single bit, we can
10049 do this by shifting the bit being tested to the low-order bit and
10050 masking the result with the constant 1. If the condition was EQ,
10051 we xor it with 1. This does not require an scc insn and is faster
10052 than an scc insn even if we have it.
10054 The code to make this transformation was moved into fold_single_bit_test,
10055 so we just call into the folder and expand its result. */
10057 if ((code == NE || code == EQ)
10058 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10059 && integer_pow2p (TREE_OPERAND (arg0, 1))
10060 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10062 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10063 return expand_expr (fold_single_bit_test (loc,
10064 code == NE ? NE_EXPR : EQ_EXPR,
10066 target, VOIDmode, EXPAND_NORMAL);
10069 if (! get_subtarget (target)
10070 || GET_MODE (subtarget) != operand_mode)
10073 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10076 target = gen_reg_rtx (mode);
10078 /* Try a cstore if possible. */
10079 return emit_store_flag_force (target, code, op0, op1,
10080 operand_mode, unsignedp,
10081 (TYPE_PRECISION (ops->type) == 1
10082 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10086 /* Stubs in case we haven't got a casesi insn. */
10087 #ifndef HAVE_casesi
10088 # define HAVE_casesi 0
10089 # define gen_casesi(a, b, c, d, e) (0)
10090 # define CODE_FOR_casesi CODE_FOR_nothing
10093 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10094 0 otherwise (i.e. if there is no casesi instruction). */
10096 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10097 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10098 rtx fallback_label ATTRIBUTE_UNUSED)
10100 struct expand_operand ops[5];
10101 enum machine_mode index_mode = SImode;
10102 int index_bits = GET_MODE_BITSIZE (index_mode);
10103 rtx op1, op2, index;
10108 /* Convert the index to SImode. */
10109 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10111 enum machine_mode omode = TYPE_MODE (index_type);
10112 rtx rangertx = expand_normal (range);
10114 /* We must handle the endpoints in the original mode. */
10115 index_expr = build2 (MINUS_EXPR, index_type,
10116 index_expr, minval);
10117 minval = integer_zero_node;
10118 index = expand_normal (index_expr);
10120 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10121 omode, 1, default_label);
10122 /* Now we can safely truncate. */
10123 index = convert_to_mode (index_mode, index, 0);
10127 if (TYPE_MODE (index_type) != index_mode)
10129 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10130 index_expr = fold_convert (index_type, index_expr);
10133 index = expand_normal (index_expr);
10136 do_pending_stack_adjust ();
10138 op1 = expand_normal (minval);
10139 op2 = expand_normal (range);
10141 create_input_operand (&ops[0], index, index_mode);
10142 create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10143 create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10144 create_fixed_operand (&ops[3], table_label);
10145 create_fixed_operand (&ops[4], (default_label
10147 : fallback_label));
10148 expand_jump_insn (CODE_FOR_casesi, 5, ops);
10152 /* Attempt to generate a tablejump instruction; same concept. */
10153 #ifndef HAVE_tablejump
10154 #define HAVE_tablejump 0
10155 #define gen_tablejump(x, y) (0)
10158 /* Subroutine of the next function.
10160 INDEX is the value being switched on, with the lowest value
10161 in the table already subtracted.
10162 MODE is its expected mode (needed if INDEX is constant).
10163 RANGE is the length of the jump table.
10164 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10166 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10167 index value is out of range. */
10170 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10175 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10176 cfun->cfg->max_jumptable_ents = INTVAL (range);
10178 /* Do an unsigned comparison (in the proper mode) between the index
10179 expression and the value which represents the length of the range.
10180 Since we just finished subtracting the lower bound of the range
10181 from the index expression, this comparison allows us to simultaneously
10182 check that the original index expression value is both greater than
10183 or equal to the minimum value of the range and less than or equal to
10184 the maximum value of the range. */
10187 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10190 /* If index is in range, it must fit in Pmode.
10191 Convert to Pmode so we can index with it. */
10193 index = convert_to_mode (Pmode, index, 1);
10195 /* Don't let a MEM slip through, because then INDEX that comes
10196 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10197 and break_out_memory_refs will go to work on it and mess it up. */
10198 #ifdef PIC_CASE_VECTOR_ADDRESS
10199 if (flag_pic && !REG_P (index))
10200 index = copy_to_mode_reg (Pmode, index);
10203 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10204 GET_MODE_SIZE, because this indicates how large insns are. The other
10205 uses should all be Pmode, because they are addresses. This code
10206 could fail if addresses and insns are not the same size. */
10207 index = gen_rtx_PLUS (Pmode,
10208 gen_rtx_MULT (Pmode, index,
10209 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10210 gen_rtx_LABEL_REF (Pmode, table_label));
10211 #ifdef PIC_CASE_VECTOR_ADDRESS
10213 index = PIC_CASE_VECTOR_ADDRESS (index);
10216 index = memory_address (CASE_VECTOR_MODE, index);
10217 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10218 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10219 convert_move (temp, vector, 0);
10221 emit_jump_insn (gen_tablejump (temp, table_label));
10223 /* If we are generating PIC code or if the table is PC-relative, the
10224 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10225 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10230 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10231 rtx table_label, rtx default_label)
10235 if (! HAVE_tablejump)
10238 index_expr = fold_build2 (MINUS_EXPR, index_type,
10239 fold_convert (index_type, index_expr),
10240 fold_convert (index_type, minval));
10241 index = expand_normal (index_expr);
10242 do_pending_stack_adjust ();
10244 do_tablejump (index, TYPE_MODE (index_type),
10245 convert_modes (TYPE_MODE (index_type),
10246 TYPE_MODE (TREE_TYPE (range)),
10247 expand_normal (range),
10248 TYPE_UNSIGNED (TREE_TYPE (range))),
10249 table_label, default_label);
10253 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10255 const_vector_from_tree (tree exp)
10260 enum machine_mode inner, mode;
10262 mode = TYPE_MODE (TREE_TYPE (exp));
10264 if (initializer_zerop (exp))
10265 return CONST0_RTX (mode);
10267 units = GET_MODE_NUNITS (mode);
10268 inner = GET_MODE_INNER (mode);
10270 v = rtvec_alloc (units);
10272 link = TREE_VECTOR_CST_ELTS (exp);
10273 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10275 elt = TREE_VALUE (link);
10277 if (TREE_CODE (elt) == REAL_CST)
10278 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10280 else if (TREE_CODE (elt) == FIXED_CST)
10281 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10284 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10288 /* Initialize remaining elements to 0. */
10289 for (; i < units; ++i)
10290 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10292 return gen_rtx_CONST_VECTOR (mode, v);
10295 /* Build a decl for a personality function given a language prefix. */
10298 build_personality_function (const char *lang)
10300 const char *unwind_and_version;
10304 switch (targetm_common.except_unwind_info (&global_options))
10309 unwind_and_version = "_sj0";
10313 unwind_and_version = "_v0";
10316 gcc_unreachable ();
10319 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10321 type = build_function_type_list (integer_type_node, integer_type_node,
10322 long_long_unsigned_type_node,
10323 ptr_type_node, ptr_type_node, NULL_TREE);
10324 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10325 get_identifier (name), type);
10326 DECL_ARTIFICIAL (decl) = 1;
10327 DECL_EXTERNAL (decl) = 1;
10328 TREE_PUBLIC (decl) = 1;
10330 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10331 are the flags assigned by targetm.encode_section_info. */
10332 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10337 /* Extracts the personality function of DECL and returns the corresponding
10341 get_personality_function (tree decl)
10343 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10344 enum eh_personality_kind pk;
10346 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10347 if (pk == eh_personality_none)
10351 && pk == eh_personality_any)
10352 personality = lang_hooks.eh_personality ();
10354 if (pk == eh_personality_lang)
10355 gcc_assert (personality != NULL_TREE);
10357 return XEXP (DECL_RTL (personality), 0);
10360 #include "gt-expr.h"