1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 if (! HARD_REGNO_MODE_OK (regno, mode))
248 SET_REGNO (reg, regno);
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 insns = get_insns ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
473 expand_fixed_convert (to, from, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
543 fill_value = const0_rtx;
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 emit_unop_insn (code, to, from, equiv_code);
617 enum machine_mode intermediate;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* STORE_MAX_PIECES is the number of bytes at a time that we can
796 store efficiently. Due to internal GCC limitations, this is
797 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
798 for an immediate constant. */
800 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
807 can_move_by_pieces (unsigned HOST_WIDE_INT len,
808 unsigned int align ATTRIBUTE_UNUSED)
810 return MOVE_BY_PIECES_P (len, align);
813 /* Generate several move instructions to copy LEN bytes from block FROM to
814 block TO. (These are MEM rtx's with BLKmode).
816 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
817 used to push FROM to the stack.
819 ALIGN is maximum stack alignment we can assume.
821 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
822 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
826 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
827 unsigned int align, int endp)
829 struct move_by_pieces_d data;
830 enum machine_mode to_addr_mode, from_addr_mode
831 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
832 rtx to_addr, from_addr = XEXP (from, 0);
833 unsigned int max_size = MOVE_MAX_PIECES + 1;
834 enum machine_mode mode = VOIDmode, tmode;
835 enum insn_code icode;
837 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
840 data.from_addr = from_addr;
843 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
844 to_addr = XEXP (to, 0);
847 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
848 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
854 to_addr_mode = VOIDmode;
858 #ifdef STACK_GROWS_DOWNWARD
864 data.to_addr = to_addr;
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
882 /* Find the mode of the largest move... */
883 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
884 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
885 if (GET_MODE_SIZE (tmode) < max_size)
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr, len));
892 data.autinc_from = 1;
893 data.explicit_inc_from = -1;
895 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
897 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
898 data.autinc_from = 1;
899 data.explicit_inc_from = 1;
901 if (!data.autinc_from && CONSTANT_P (from_addr))
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
905 data.to_addr = copy_to_mode_reg (to_addr_mode,
906 plus_constant (to_addr, len));
908 data.explicit_inc_to = -1;
910 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
912 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
914 data.explicit_inc_to = 1;
916 if (!data.autinc_to && CONSTANT_P (to_addr))
917 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
921 if (align >= GET_MODE_ALIGNMENT (tmode))
922 align = GET_MODE_ALIGNMENT (tmode);
925 enum machine_mode xmode;
927 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
929 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
931 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
937 /* First move what we can in the largest integer mode, then go to
938 successively smaller modes. */
942 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
943 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
944 if (GET_MODE_SIZE (tmode) < max_size)
947 if (mode == VOIDmode)
950 icode = optab_handler (mov_optab, mode);
951 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
952 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
954 max_size = GET_MODE_SIZE (mode);
957 /* The code above should have handled everything. */
958 gcc_assert (!data.len);
964 gcc_assert (!data.reverse);
969 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
970 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
972 data.to_addr = copy_to_mode_reg (to_addr_mode,
973 plus_constant (data.to_addr,
976 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
983 to1 = adjust_address (data.to, QImode, data.offset);
991 /* Return number of insns required to move L bytes by pieces.
992 ALIGN (in bits) is maximum alignment we can assume. */
994 static unsigned HOST_WIDE_INT
995 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
996 unsigned int max_size)
998 unsigned HOST_WIDE_INT n_insns = 0;
999 enum machine_mode tmode;
1001 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1002 if (align >= GET_MODE_ALIGNMENT (tmode))
1003 align = GET_MODE_ALIGNMENT (tmode);
1006 enum machine_mode tmode, xmode;
1008 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1011 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1012 || SLOW_UNALIGNED_ACCESS (tmode, align))
1015 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1018 while (max_size > 1)
1020 enum machine_mode mode = VOIDmode;
1021 enum insn_code icode;
1023 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1024 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1025 if (GET_MODE_SIZE (tmode) < max_size)
1028 if (mode == VOIDmode)
1031 icode = optab_handler (mov_optab, mode);
1032 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1033 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1035 max_size = GET_MODE_SIZE (mode);
1042 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1043 with move instructions for mode MODE. GENFUN is the gen_... function
1044 to make a move insn for that mode. DATA has all the other info. */
1047 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1048 struct move_by_pieces_d *data)
1050 unsigned int size = GET_MODE_SIZE (mode);
1051 rtx to1 = NULL_RTX, from1;
1053 while (data->len >= size)
1056 data->offset -= size;
1060 if (data->autinc_to)
1061 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1064 to1 = adjust_address (data->to, mode, data->offset);
1067 if (data->autinc_from)
1068 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1071 from1 = adjust_address (data->from, mode, data->offset);
1073 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1074 emit_insn (gen_add2_insn (data->to_addr,
1075 GEN_INT (-(HOST_WIDE_INT)size)));
1076 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1077 emit_insn (gen_add2_insn (data->from_addr,
1078 GEN_INT (-(HOST_WIDE_INT)size)));
1081 emit_insn ((*genfun) (to1, from1));
1084 #ifdef PUSH_ROUNDING
1085 emit_single_push_insn (mode, from1, NULL);
1091 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1092 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1094 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096 if (! data->reverse)
1097 data->offset += size;
1103 /* Emit code to move a block Y to a block X. This may be done with
1104 string-move instructions, with multiple scalar move instructions,
1105 or with a library call.
1107 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1108 SIZE is an rtx that says how long they are.
1109 ALIGN is the maximum alignment we can assume they have.
1110 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112 Return the address of the new block, if memcpy is called and returns it,
1116 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1117 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 if (CONST_INT_P (size)
1125 && INTVAL (size) == 0)
1130 case BLOCK_OP_NORMAL:
1131 case BLOCK_OP_TAILCALL:
1132 may_use_call = true;
1135 case BLOCK_OP_CALL_PARM:
1136 may_use_call = block_move_libcall_safe_for_call_parm ();
1138 /* Make inhibit_defer_pop nonzero around the library call
1139 to force it to pop the arguments right away. */
1143 case BLOCK_OP_NO_LIBCALL:
1144 may_use_call = false;
1151 gcc_assert (MEM_P (x) && MEM_P (y));
1152 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1153 gcc_assert (align >= BITS_PER_UNIT);
1155 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1156 block copy is more efficient for other large modes, e.g. DCmode. */
1157 x = adjust_address (x, BLKmode, 0);
1158 y = adjust_address (y, BLKmode, 0);
1160 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1161 can be incorrect is coming from __builtin_memcpy. */
1162 if (CONST_INT_P (size))
1164 x = shallow_copy_rtx (x);
1165 y = shallow_copy_rtx (y);
1166 set_mem_size (x, size);
1167 set_mem_size (y, size);
1170 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1171 move_by_pieces (x, y, INTVAL (size), align, 0);
1172 else if (emit_block_move_via_movmem (x, y, size, align,
1173 expected_align, expected_size))
1175 else if (may_use_call
1176 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1178 retval = emit_block_move_via_libcall (x, y, size,
1179 method == BLOCK_OP_TAILCALL);
1181 emit_block_move_via_loop (x, y, size, align);
1183 if (method == BLOCK_OP_CALL_PARM)
1190 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1192 return emit_block_move_hints (x, y, size, method, 0, -1);
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1200 block_move_libcall_safe_for_call_parm (void)
1202 #if defined (REG_PARM_STACK_SPACE)
1206 /* If arguments are pushed on the stack, then they're safe. */
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn = emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1218 && REG_PARM_STACK_SPACE (fn) != 0)
1222 /* If any argument goes in memory, then it might clobber an outgoing
1225 CUMULATIVE_ARGS args_so_far;
1228 fn = emit_block_move_libcall_fn (false);
1229 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1231 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1232 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1234 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1235 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1237 if (!tmp || !REG_P (tmp))
1239 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1241 targetm.calls.function_arg_advance (&args_so_far, mode,
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1252 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1253 unsigned int expected_align, HOST_WIDE_INT expected_size)
1255 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1256 int save_volatile_ok = volatile_ok;
1257 enum machine_mode mode;
1259 if (expected_align < align)
1260 expected_align = align;
1262 /* Since this is a move insn, we don't care about volatility. */
1265 /* Try the most limited insn first, because there's no point
1266 including more than one in the machine description unless
1267 the more limited one has some advantage. */
1269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1270 mode = GET_MODE_WIDER_MODE (mode))
1272 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1273 insn_operand_predicate_fn pred;
1275 if (code != CODE_FOR_nothing
1276 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1277 here because if SIZE is less than the mode mask, as it is
1278 returned by the macro, it will definitely be less than the
1279 actual mode mask. */
1280 && ((CONST_INT_P (size)
1281 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1282 <= (GET_MODE_MASK (mode) >> 1)))
1283 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1284 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1285 || (*pred) (x, BLKmode))
1286 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1287 || (*pred) (y, BLKmode))
1288 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1289 || (*pred) (opalign, VOIDmode)))
1292 rtx last = get_last_insn ();
1295 op2 = convert_to_mode (mode, size, 1);
1296 pred = insn_data[(int) code].operand[2].predicate;
1297 if (pred != 0 && ! (*pred) (op2, mode))
1298 op2 = copy_to_mode_reg (mode, op2);
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1305 if (insn_data[(int) code].n_operands == 4)
1306 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1308 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1309 GEN_INT (expected_align
1311 GEN_INT (expected_size));
1315 volatile_ok = save_volatile_ok;
1319 delete_insns_since (last);
1323 volatile_ok = save_volatile_ok;
1327 /* A subroutine of emit_block_move. Expand a call to memcpy.
1328 Return the return value from memcpy, 0 otherwise. */
1331 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1333 rtx dst_addr, src_addr;
1334 tree call_expr, fn, src_tree, dst_tree, size_tree;
1335 enum machine_mode size_mode;
1338 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1339 pseudos. We can then place those new pseudos into a VAR_DECL and
1342 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1343 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1345 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1346 src_addr = convert_memory_address (ptr_mode, src_addr);
1348 dst_tree = make_tree (ptr_type_node, dst_addr);
1349 src_tree = make_tree (ptr_type_node, src_addr);
1351 size_mode = TYPE_MODE (sizetype);
1353 size = convert_to_mode (size_mode, size, 1);
1354 size = copy_to_mode_reg (size_mode, size);
1356 /* It is incorrect to use the libcall calling conventions to call
1357 memcpy in this context. This could be a user call to memcpy and
1358 the user may wish to examine the return value from memcpy. For
1359 targets where libcalls and normal calls have different conventions
1360 for returning pointers, we could end up generating incorrect code. */
1362 size_tree = make_tree (sizetype, size);
1364 fn = emit_block_move_libcall_fn (true);
1365 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1366 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1368 retval = expand_normal (call_expr);
1373 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1374 for the function we use for block copies. The first time FOR_CALL
1375 is true, we call assemble_external. */
1377 static GTY(()) tree block_move_fn;
1380 init_block_move_fn (const char *asmspec)
1386 fn = get_identifier ("memcpy");
1387 args = build_function_type_list (ptr_type_node, ptr_type_node,
1388 const_ptr_type_node, sizetype,
1391 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1392 DECL_EXTERNAL (fn) = 1;
1393 TREE_PUBLIC (fn) = 1;
1394 DECL_ARTIFICIAL (fn) = 1;
1395 TREE_NOTHROW (fn) = 1;
1396 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1397 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1403 set_user_assembler_name (block_move_fn, asmspec);
1407 emit_block_move_libcall_fn (int for_call)
1409 static bool emitted_extern;
1412 init_block_move_fn (NULL);
1414 if (for_call && !emitted_extern)
1416 emitted_extern = true;
1417 make_decl_rtl (block_move_fn);
1418 assemble_external (block_move_fn);
1421 return block_move_fn;
1424 /* A subroutine of emit_block_move. Copy the data via an explicit
1425 loop. This is used only when libcalls are forbidden. */
1426 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1430 unsigned int align ATTRIBUTE_UNUSED)
1432 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1433 enum machine_mode x_addr_mode
1434 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1435 enum machine_mode y_addr_mode
1436 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1437 enum machine_mode iter_mode;
1439 iter_mode = GET_MODE (size);
1440 if (iter_mode == VOIDmode)
1441 iter_mode = word_mode;
1443 top_label = gen_label_rtx ();
1444 cmp_label = gen_label_rtx ();
1445 iter = gen_reg_rtx (iter_mode);
1447 emit_move_insn (iter, const0_rtx);
1449 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1450 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1451 do_pending_stack_adjust ();
1453 emit_jump (cmp_label);
1454 emit_label (top_label);
1456 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1457 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1459 if (x_addr_mode != y_addr_mode)
1460 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1461 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1466 emit_move_insn (x, y);
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1471 emit_move_insn (iter, tmp);
1473 emit_label (cmp_label);
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1483 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1486 #ifdef HAVE_load_multiple
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple)
1501 last = get_last_insn ();
1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 delete_insns_since (last);
1514 for (i = 0; i < nregs; i++)
1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1516 operand_subword_force (x, i, mode));
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1523 move_block_from_reg (int regno, rtx x, int nregs)
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple)
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 delete_insns_since (last);
1547 for (i = 0; i < nregs; i++)
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1564 gen_group_rtx (rtx orig)
1569 gcc_assert (GET_CODE (orig) == PARALLEL);
1571 length = XVECLEN (orig, 0);
1572 tmps = XALLOCAVEC (rtx, length);
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580 for (; i < length; i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1596 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1600 enum machine_mode m = GET_MODE (orig_src);
1602 gcc_assert (GET_CODE (dst) == PARALLEL);
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
1620 emit_group_load_1 (tmps, dst, src, type, ssize);
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1653 bytelen = ssize - bytepos;
1654 gcc_assert (bytelen > 0);
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1661 if (!MEM_P (orig_src)
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1669 src = gen_reg_rtx (GET_MODE (orig_src));
1671 emit_move_insn (src, orig_src);
1674 /* Optimize the access just a bit. */
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1679 && bytelen == GET_MODE_SIZE (mode))
1681 tmps[i] = gen_reg_rtx (mode);
1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1689 else if (GET_CODE (src) == CONCAT)
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1701 tmps[i] = XEXP (src, bytepos / slen0);
1702 if (! CONSTANT_P (tmps[i])
1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1705 (bytepos % slen0) * BITS_PER_UNIT,
1706 1, NULL_RTX, mode, mode);
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1714 emit_move_insn (mem, src);
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1735 else if (CONSTANT_P (src))
1737 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1745 gcc_assert (2 * len == ssize);
1746 split_double (src, &first, &second);
1753 else if (REG_P (src) && GET_MODE (src) == mode)
1756 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1757 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1761 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1772 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1778 emit_group_load_1 (tmps, dst, src, type, ssize);
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i = 0; i < XVECLEN (dst, 0); i++)
1783 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1786 emit_move_insn (d, tmps[i]);
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1795 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800 vec = rtvec_alloc (XVECLEN (parallel, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i = 0; i < XVECLEN (parallel, 0); i++)
1807 rtx e = XVECEXP (parallel, 0, i);
1808 rtx d = XEXP (e, 0);
1812 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1813 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1815 RTVEC_ELT (vec, i) = e;
1818 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1825 emit_group_move (rtx dst, rtx src)
1829 gcc_assert (GET_CODE (src) == PARALLEL
1830 && GET_CODE (dst) == PARALLEL
1831 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1833 /* Skip first entry if NULL. */
1834 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1835 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1836 XEXP (XVECEXP (src, 0, i), 0));
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842 emit_group_move_into_temps (rtx src)
1844 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1847 for (i = 0; i < XVECLEN (src, 0); i++)
1849 rtx e = XVECEXP (src, 0, i);
1850 rtx d = XEXP (e, 0);
1853 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1854 RTVEC_ELT (vec, i) = e;
1857 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1866 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1869 int start, finish, i;
1870 enum machine_mode m = GET_MODE (orig_dst);
1872 gcc_assert (GET_CODE (src) == PARALLEL);
1874 if (!SCALAR_INT_MODE_P (m)
1875 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1877 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1878 if (imode == BLKmode)
1879 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1881 dst = gen_reg_rtx (imode);
1882 emit_group_store (dst, src, type, ssize);
1883 if (imode != BLKmode)
1884 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1885 emit_move_insn (orig_dst, dst);
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src, 0, 0), 0))
1895 finish = XVECLEN (src, 0);
1897 tmps = XALLOCAVEC (rtx, finish);
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i = start; i < finish; i++)
1902 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1903 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1905 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1906 emit_move_insn (tmps[i], reg);
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1915 if (GET_CODE (dst) == PARALLEL)
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst, src))
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1929 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1930 emit_group_store (temp, src, type, ssize);
1931 emit_group_load (dst, temp, type, ssize);
1934 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1936 enum machine_mode outer = GET_MODE (dst);
1937 enum machine_mode inner;
1938 HOST_WIDE_INT bytepos;
1942 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1943 dst = gen_reg_rtx (outer);
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1951 inner = GET_MODE (tmps[start]);
1952 bytepos = subreg_lowpart_offset (inner, outer);
1953 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1955 temp = simplify_gen_subreg (outer, tmps[start],
1959 emit_move_insn (dst, temp);
1966 /* If the first element wasn't the low part, try the last. */
1968 && start < finish - 1)
1970 inner = GET_MODE (tmps[finish - 1]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1974 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1978 emit_move_insn (dst, temp);
1985 /* Otherwise, simply initialize the result to zero. */
1987 emit_move_insn (dst, CONST0_RTX (outer));
1990 /* Process the pieces. */
1991 for (i = start; i < finish; i++)
1993 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1994 enum machine_mode mode = GET_MODE (tmps[i]);
1995 unsigned int bytelen = GET_MODE_SIZE (mode);
1996 unsigned int adj_bytelen = bytelen;
1999 /* Handle trailing fragments that run over the size of the struct. */
2000 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2001 adj_bytelen = ssize - bytepos;
2003 if (GET_CODE (dst) == CONCAT)
2005 if (bytepos + adj_bytelen
2006 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2007 dest = XEXP (dst, 0);
2008 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2011 dest = XEXP (dst, 1);
2015 enum machine_mode dest_mode = GET_MODE (dest);
2016 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2018 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2020 if (GET_MODE_ALIGNMENT (dest_mode)
2021 >= GET_MODE_ALIGNMENT (tmp_mode))
2023 dest = assign_stack_temp (dest_mode,
2024 GET_MODE_SIZE (dest_mode),
2026 emit_move_insn (adjust_address (dest,
2034 dest = assign_stack_temp (tmp_mode,
2035 GET_MODE_SIZE (tmp_mode),
2037 emit_move_insn (dest, tmps[i]);
2038 dst = adjust_address (dest, dest_mode, bytepos);
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2062 bytelen = adj_bytelen;
2065 /* Optimize the access just a bit. */
2067 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2068 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2069 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2073 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (orig_dst != dst)
2079 emit_move_insn (orig_dst, dst);
2082 /* Generate code to copy a BLKmode object of TYPE out of a
2083 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2084 is null, a stack temporary is created. TGTBLK is returned.
2086 The purpose of this routine is to handle functions that return
2087 BLKmode structures in registers. Some machines (the PA for example)
2088 want to return all small structures in registers regardless of the
2089 structure's alignment. */
2092 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2094 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2095 rtx src = NULL, dst = NULL;
2096 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2097 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2098 enum machine_mode copy_mode;
2102 tgtblk = assign_temp (build_qualified_type (type,
2104 | TYPE_QUAL_CONST)),
2106 preserve_temp_slots (tgtblk);
2109 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2110 into a new pseudo which is a full word. */
2112 if (GET_MODE (srcreg) != BLKmode
2113 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2114 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2116 /* If the structure doesn't take up a whole number of words, see whether
2117 SRCREG is padded on the left or on the right. If it's on the left,
2118 set PADDING_CORRECTION to the number of bits to skip.
2120 In most ABIs, the structure will be returned at the least end of
2121 the register, which translates to right padding on little-endian
2122 targets and left padding on big-endian targets. The opposite
2123 holds if the structure is returned at the most significant
2124 end of the register. */
2125 if (bytes % UNITS_PER_WORD != 0
2126 && (targetm.calls.return_in_msb (type)
2128 : BYTES_BIG_ENDIAN))
2130 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2132 /* Copy the structure BITSIZE bits at a time. If the target lives in
2133 memory, take care of not reading/writing past its end by selecting
2134 a copy mode suited to BITSIZE. This should always be possible given
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2141 copy_mode = word_mode;
2144 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2145 if (mem_mode != BLKmode)
2146 copy_mode = mem_mode;
2149 for (bitpos = 0, xbitpos = padding_correction;
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2153 /* We need a new source operand each time xbitpos is on a
2154 word boundary and when xbitpos == padding_correction
2155 (the first time through). */
2156 if (xbitpos % BITS_PER_WORD == 0
2157 || xbitpos == padding_correction)
2158 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2161 /* We need a new destination operand each time bitpos is on
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 bitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, copy_mode, copy_mode));
2177 /* Add a USE expression for REG to the (possibly empty) list pointed
2178 to by CALL_FUSAGE. REG must denote a hard register. */
2181 use_reg (rtx *call_fusage, rtx reg)
2183 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2186 = gen_rtx_EXPR_LIST (VOIDmode,
2187 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2190 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2191 starting at REGNO. All of these registers must be hard registers. */
2194 use_regs (rtx *call_fusage, int regno, int nregs)
2198 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 use_group_regs (rtx *call_fusage, rtx regs)
2213 for (i = 0; i < XVECLEN (regs, 0); i++)
2215 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2217 /* A NULL entry means the parameter goes both on the stack and in
2218 registers. This can also be a MEM for targets that pass values
2219 partially on the stack and partially in registers. */
2220 if (reg != 0 && REG_P (reg))
2221 use_reg (call_fusage, reg);
2225 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2226 assigment and the code of the expresion on the RHS is CODE. Return
2230 get_def_for_expr (tree name, enum tree_code code)
2234 if (TREE_CODE (name) != SSA_NAME)
2237 def_stmt = get_gimple_for_ssa_name (name);
2239 || gimple_assign_rhs_code (def_stmt) != code)
2246 /* Determine whether the LEN bytes generated by CONSTFUN can be
2247 stored to memory using several move instructions. CONSTFUNDATA is
2248 a pointer which will be passed as argument in every CONSTFUN call.
2249 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2250 a memset operation and false if it's a copy of a constant string.
2251 Return nonzero if a call to store_by_pieces should succeed. */
2254 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2255 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2256 void *constfundata, unsigned int align, bool memsetp)
2258 unsigned HOST_WIDE_INT l;
2259 unsigned int max_size;
2260 HOST_WIDE_INT offset = 0;
2261 enum machine_mode mode, tmode;
2262 enum insn_code icode;
2270 ? SET_BY_PIECES_P (len, align)
2271 : STORE_BY_PIECES_P (len, align)))
2274 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2275 if (align >= GET_MODE_ALIGNMENT (tmode))
2276 align = GET_MODE_ALIGNMENT (tmode);
2279 enum machine_mode xmode;
2281 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2283 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2284 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2285 || SLOW_UNALIGNED_ACCESS (tmode, align))
2288 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2291 /* We would first store what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2295 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2300 max_size = STORE_MAX_PIECES + 1;
2301 while (max_size > 1)
2303 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2304 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) < max_size)
2308 if (mode == VOIDmode)
2311 icode = optab_handler (mov_optab, mode);
2312 if (icode != CODE_FOR_nothing
2313 && align >= GET_MODE_ALIGNMENT (mode))
2315 unsigned int size = GET_MODE_SIZE (mode);
2322 cst = (*constfun) (constfundata, offset, mode);
2323 if (!LEGITIMATE_CONSTANT_P (cst))
2333 max_size = GET_MODE_SIZE (mode);
2336 /* The code above should have handled everything. */
2343 /* Generate several move instructions to store LEN bytes generated by
2344 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2345 pointer which will be passed as argument in every CONSTFUN call.
2346 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2347 a memset operation and false if it's a copy of a constant string.
2348 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2349 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2353 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2354 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2355 void *constfundata, unsigned int align, bool memsetp, int endp)
2357 enum machine_mode to_addr_mode
2358 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2359 struct store_by_pieces_d data;
2363 gcc_assert (endp != 2);
2368 ? SET_BY_PIECES_P (len, align)
2369 : STORE_BY_PIECES_P (len, align));
2370 data.constfun = constfun;
2371 data.constfundata = constfundata;
2374 store_by_pieces_1 (&data, align);
2379 gcc_assert (!data.reverse);
2384 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2385 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2387 data.to_addr = copy_to_mode_reg (to_addr_mode,
2388 plus_constant (data.to_addr,
2391 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2398 to1 = adjust_address (data.to, QImode, data.offset);
2406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2410 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2412 struct store_by_pieces_d data;
2417 data.constfun = clear_by_pieces_1;
2418 data.constfundata = NULL;
2421 store_by_pieces_1 (&data, align);
2424 /* Callback routine for clear_by_pieces.
2425 Return const0_rtx unconditionally. */
2428 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2429 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2430 enum machine_mode mode ATTRIBUTE_UNUSED)
2435 /* Subroutine of clear_by_pieces and store_by_pieces.
2436 Generate several move instructions to store LEN bytes of block TO. (A MEM
2437 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2440 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2441 unsigned int align ATTRIBUTE_UNUSED)
2443 enum machine_mode to_addr_mode
2444 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2445 rtx to_addr = XEXP (data->to, 0);
2446 unsigned int max_size = STORE_MAX_PIECES + 1;
2447 enum machine_mode mode = VOIDmode, tmode;
2448 enum insn_code icode;
2451 data->to_addr = to_addr;
2453 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2454 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2456 data->explicit_inc_to = 0;
2458 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2460 data->offset = data->len;
2462 /* If storing requires more than two move insns,
2463 copy addresses to registers (to make displacements shorter)
2464 and use post-increment if available. */
2465 if (!data->autinc_to
2466 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2468 /* Determine the main mode we'll be using. */
2469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2471 if (GET_MODE_SIZE (tmode) < max_size)
2474 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2476 data->to_addr = copy_to_mode_reg (to_addr_mode,
2477 plus_constant (to_addr, data->len));
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = -1;
2482 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2483 && ! data->autinc_to)
2485 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2486 data->autinc_to = 1;
2487 data->explicit_inc_to = 1;
2490 if ( !data->autinc_to && CONSTANT_P (to_addr))
2491 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2494 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2495 if (align >= GET_MODE_ALIGNMENT (tmode))
2496 align = GET_MODE_ALIGNMENT (tmode);
2499 enum machine_mode xmode;
2501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2503 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2504 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2505 || SLOW_UNALIGNED_ACCESS (tmode, align))
2508 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2511 /* First store what we can in the largest integer mode, then go to
2512 successively smaller modes. */
2514 while (max_size > 1)
2516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2517 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2518 if (GET_MODE_SIZE (tmode) < max_size)
2521 if (mode == VOIDmode)
2524 icode = optab_handler (mov_optab, mode);
2525 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2526 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2528 max_size = GET_MODE_SIZE (mode);
2531 /* The code above should have handled everything. */
2532 gcc_assert (!data->len);
2535 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2536 with move instructions for mode MODE. GENFUN is the gen_... function
2537 to make a move insn for that mode. DATA has all the other info. */
2540 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2541 struct store_by_pieces_d *data)
2543 unsigned int size = GET_MODE_SIZE (mode);
2546 while (data->len >= size)
2549 data->offset -= size;
2551 if (data->autinc_to)
2552 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2555 to1 = adjust_address (data->to, mode, data->offset);
2557 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2558 emit_insn (gen_add2_insn (data->to_addr,
2559 GEN_INT (-(HOST_WIDE_INT) size)));
2561 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2562 emit_insn ((*genfun) (to1, cst));
2564 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2565 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2567 if (! data->reverse)
2568 data->offset += size;
2574 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2575 its length in bytes. */
2578 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2579 unsigned int expected_align, HOST_WIDE_INT expected_size)
2581 enum machine_mode mode = GET_MODE (object);
2584 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2586 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2587 just move a zero. Otherwise, do this a piece at a time. */
2589 && CONST_INT_P (size)
2590 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2592 rtx zero = CONST0_RTX (mode);
2595 emit_move_insn (object, zero);
2599 if (COMPLEX_MODE_P (mode))
2601 zero = CONST0_RTX (GET_MODE_INNER (mode));
2604 write_complex_part (object, zero, 0);
2605 write_complex_part (object, zero, 1);
2611 if (size == const0_rtx)
2614 align = MEM_ALIGN (object);
2616 if (CONST_INT_P (size)
2617 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2618 clear_by_pieces (object, INTVAL (size), align);
2619 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2620 expected_align, expected_size))
2622 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2623 return set_storage_via_libcall (object, size, const0_rtx,
2624 method == BLOCK_OP_TAILCALL);
2632 clear_storage (rtx object, rtx size, enum block_op_methods method)
2634 return clear_storage_hints (object, size, method, 0, -1);
2638 /* A subroutine of clear_storage. Expand a call to memset.
2639 Return the return value of memset, 0 otherwise. */
2642 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2644 tree call_expr, fn, object_tree, size_tree, val_tree;
2645 enum machine_mode size_mode;
2648 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2649 place those into new pseudos into a VAR_DECL and use them later. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 size_mode = TYPE_MODE (sizetype);
2654 size = convert_to_mode (size_mode, size, 1);
2655 size = copy_to_mode_reg (size_mode, size);
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context. This could be a user call to memset and
2659 the user may wish to examine the return value from memset. For
2660 targets where libcalls and normal calls have different conventions
2661 for returning pointers, we could end up generating incorrect code. */
2663 object_tree = make_tree (ptr_type_node, object);
2664 if (!CONST_INT_P (val))
2665 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2666 size_tree = make_tree (sizetype, size);
2667 val_tree = make_tree (integer_type_node, val);
2669 fn = clear_storage_libcall_fn (true);
2670 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2671 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2673 retval = expand_normal (call_expr);
2678 /* A subroutine of set_storage_via_libcall. Create the tree node
2679 for the function we use for block clears. The first time FOR_CALL
2680 is true, we call assemble_external. */
2682 tree block_clear_fn;
2685 init_block_clear_fn (const char *asmspec)
2687 if (!block_clear_fn)
2691 fn = get_identifier ("memset");
2692 args = build_function_type_list (ptr_type_node, ptr_type_node,
2693 integer_type_node, sizetype,
2696 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2697 DECL_EXTERNAL (fn) = 1;
2698 TREE_PUBLIC (fn) = 1;
2699 DECL_ARTIFICIAL (fn) = 1;
2700 TREE_NOTHROW (fn) = 1;
2701 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2702 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2704 block_clear_fn = fn;
2708 set_user_assembler_name (block_clear_fn, asmspec);
2712 clear_storage_libcall_fn (int for_call)
2714 static bool emitted_extern;
2716 if (!block_clear_fn)
2717 init_block_clear_fn (NULL);
2719 if (for_call && !emitted_extern)
2721 emitted_extern = true;
2722 make_decl_rtl (block_clear_fn);
2723 assemble_external (block_clear_fn);
2726 return block_clear_fn;
2729 /* Expand a setmem pattern; return true if successful. */
2732 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2733 unsigned int expected_align, HOST_WIDE_INT expected_size)
2735 /* Try the most limited insn first, because there's no point
2736 including more than one in the machine description unless
2737 the more limited one has some advantage. */
2739 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2740 enum machine_mode mode;
2742 if (expected_align < align)
2743 expected_align = align;
2745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2746 mode = GET_MODE_WIDER_MODE (mode))
2748 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2749 insn_operand_predicate_fn pred;
2751 if (code != CODE_FOR_nothing
2752 /* We don't need MODE to be narrower than
2753 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2754 the mode mask, as it is returned by the macro, it will
2755 definitely be less than the actual mode mask. */
2756 && ((CONST_INT_P (size)
2757 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2758 <= (GET_MODE_MASK (mode) >> 1)))
2759 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2760 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2761 || (*pred) (object, BLKmode))
2762 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2763 || (*pred) (opalign, VOIDmode)))
2766 enum machine_mode char_mode;
2767 rtx last = get_last_insn ();
2770 opsize = convert_to_mode (mode, size, 1);
2771 pred = insn_data[(int) code].operand[1].predicate;
2772 if (pred != 0 && ! (*pred) (opsize, mode))
2773 opsize = copy_to_mode_reg (mode, opsize);
2776 char_mode = insn_data[(int) code].operand[2].mode;
2777 if (char_mode != VOIDmode)
2779 opchar = convert_to_mode (char_mode, opchar, 1);
2780 pred = insn_data[(int) code].operand[2].predicate;
2781 if (pred != 0 && ! (*pred) (opchar, char_mode))
2782 opchar = copy_to_mode_reg (char_mode, opchar);
2785 if (insn_data[(int) code].n_operands == 4)
2786 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2788 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2789 GEN_INT (expected_align
2791 GEN_INT (expected_size));
2798 delete_insns_since (last);
2806 /* Write to one of the components of the complex value CPLX. Write VAL to
2807 the real part if IMAG_P is false, and the imaginary part if its true. */
2810 write_complex_part (rtx cplx, rtx val, bool imag_p)
2812 enum machine_mode cmode;
2813 enum machine_mode imode;
2816 if (GET_CODE (cplx) == CONCAT)
2818 emit_move_insn (XEXP (cplx, imag_p), val);
2822 cmode = GET_MODE (cplx);
2823 imode = GET_MODE_INNER (cmode);
2824 ibitsize = GET_MODE_BITSIZE (imode);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2833 emit_move_insn (adjust_address_nv (cplx, imode,
2834 imag_p ? GET_MODE_SIZE (imode) : 0),
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since store_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2852 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2856 emit_move_insn (part, val);
2860 /* simplify_gen_subreg may fail for sub-word MEMs. */
2861 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2864 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2867 /* Extract one of the components of the complex value CPLX. Extract the
2868 real part if IMAG_P is false, and the imaginary part if it's true. */
2871 read_complex_part (rtx cplx, bool imag_p)
2873 enum machine_mode cmode, imode;
2876 if (GET_CODE (cplx) == CONCAT)
2877 return XEXP (cplx, imag_p);
2879 cmode = GET_MODE (cplx);
2880 imode = GET_MODE_INNER (cmode);
2881 ibitsize = GET_MODE_BITSIZE (imode);
2883 /* Special case reads from complex constants that got spilled to memory. */
2884 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2886 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2887 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2889 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2890 if (CONSTANT_CLASS_P (part))
2891 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2895 /* For MEMs simplify_gen_subreg may generate an invalid new address
2896 because, e.g., the original address is considered mode-dependent
2897 by the target, which restricts simplify_subreg from invoking
2898 adjust_address_nv. Instead of preparing fallback support for an
2899 invalid address, we call adjust_address_nv directly. */
2901 return adjust_address_nv (cplx, imode,
2902 imag_p ? GET_MODE_SIZE (imode) : 0);
2904 /* If the sub-object is at least word sized, then we know that subregging
2905 will work. This special case is important, since extract_bit_field
2906 wants to operate on integer modes, and there's rarely an OImode to
2907 correspond to TCmode. */
2908 if (ibitsize >= BITS_PER_WORD
2909 /* For hard regs we have exact predicates. Assume we can split
2910 the original object if it spans an even number of hard regs.
2911 This special case is important for SCmode on 64-bit platforms
2912 where the natural size of floating-point regs is 32-bit. */
2914 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2915 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2917 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2918 imag_p ? GET_MODE_SIZE (imode) : 0);
2922 /* simplify_gen_subreg may fail for sub-word MEMs. */
2923 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2926 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2927 true, NULL_RTX, imode, imode);
2930 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2931 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2932 represented in NEW_MODE. If FORCE is true, this will never happen, as
2933 we'll force-create a SUBREG if needed. */
2936 emit_move_change_mode (enum machine_mode new_mode,
2937 enum machine_mode old_mode, rtx x, bool force)
2941 if (push_operand (x, GET_MODE (x)))
2943 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2944 MEM_COPY_ATTRIBUTES (ret, x);
2948 /* We don't have to worry about changing the address since the
2949 size in bytes is supposed to be the same. */
2950 if (reload_in_progress)
2952 /* Copy the MEM to change the mode and move any
2953 substitutions from the old MEM to the new one. */
2954 ret = adjust_address_nv (x, new_mode, 0);
2955 copy_replacements (x, ret);
2958 ret = adjust_address (x, new_mode, 0);
2962 /* Note that we do want simplify_subreg's behavior of validating
2963 that the new mode is ok for a hard register. If we were to use
2964 simplify_gen_subreg, we would create the subreg, but would
2965 probably run into the target not being able to implement it. */
2966 /* Except, of course, when FORCE is true, when this is exactly what
2967 we want. Which is needed for CCmodes on some targets. */
2969 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2971 ret = simplify_subreg (new_mode, x, old_mode, 0);
2977 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2978 an integer mode of the same size as MODE. Returns the instruction
2979 emitted, or NULL if such a move could not be generated. */
2982 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2984 enum machine_mode imode;
2985 enum insn_code code;
2987 /* There must exist a mode of the exact size we require. */
2988 imode = int_mode_for_mode (mode);
2989 if (imode == BLKmode)
2992 /* The target must support moves in this mode. */
2993 code = optab_handler (mov_optab, imode);
2994 if (code == CODE_FOR_nothing)
2997 x = emit_move_change_mode (imode, mode, x, force);
3000 y = emit_move_change_mode (imode, mode, y, force);
3003 return emit_insn (GEN_FCN (code) (x, y));
3006 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3007 Return an equivalent MEM that does not use an auto-increment. */
3010 emit_move_resolve_push (enum machine_mode mode, rtx x)
3012 enum rtx_code code = GET_CODE (XEXP (x, 0));
3013 HOST_WIDE_INT adjust;
3016 adjust = GET_MODE_SIZE (mode);
3017 #ifdef PUSH_ROUNDING
3018 adjust = PUSH_ROUNDING (adjust);
3020 if (code == PRE_DEC || code == POST_DEC)
3022 else if (code == PRE_MODIFY || code == POST_MODIFY)
3024 rtx expr = XEXP (XEXP (x, 0), 1);
3027 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3028 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3029 val = INTVAL (XEXP (expr, 1));
3030 if (GET_CODE (expr) == MINUS)
3032 gcc_assert (adjust == val || adjust == -val);
3036 /* Do not use anti_adjust_stack, since we don't want to update
3037 stack_pointer_delta. */
3038 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3039 GEN_INT (adjust), stack_pointer_rtx,
3040 0, OPTAB_LIB_WIDEN);
3041 if (temp != stack_pointer_rtx)
3042 emit_move_insn (stack_pointer_rtx, temp);
3049 temp = stack_pointer_rtx;
3054 temp = plus_constant (stack_pointer_rtx, -adjust);
3060 return replace_equiv_address (x, temp);
3063 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3064 X is known to satisfy push_operand, and MODE is known to be complex.
3065 Returns the last instruction emitted. */
3068 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3070 enum machine_mode submode = GET_MODE_INNER (mode);
3073 #ifdef PUSH_ROUNDING
3074 unsigned int submodesize = GET_MODE_SIZE (submode);
3076 /* In case we output to the stack, but the size is smaller than the
3077 machine can push exactly, we need to use move instructions. */
3078 if (PUSH_ROUNDING (submodesize) != submodesize)
3080 x = emit_move_resolve_push (mode, x);
3081 return emit_move_insn (x, y);
3085 /* Note that the real part always precedes the imag part in memory
3086 regardless of machine's endianness. */
3087 switch (GET_CODE (XEXP (x, 0)))
3101 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3102 read_complex_part (y, imag_first));
3103 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3104 read_complex_part (y, !imag_first));
3107 /* A subroutine of emit_move_complex. Perform the move from Y to X
3108 via two moves of the parts. Returns the last instruction emitted. */
3111 emit_move_complex_parts (rtx x, rtx y)
3113 /* Show the output dies here. This is necessary for SUBREGs
3114 of pseudos since we cannot track their lifetimes correctly;
3115 hard regs shouldn't appear here except as return values. */
3116 if (!reload_completed && !reload_in_progress
3117 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3120 write_complex_part (x, read_complex_part (y, false), false);
3121 write_complex_part (x, read_complex_part (y, true), true);
3123 return get_last_insn ();
3126 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3127 MODE is known to be complex. Returns the last instruction emitted. */
3130 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3134 /* Need to take special care for pushes, to maintain proper ordering
3135 of the data, and possibly extra padding. */
3136 if (push_operand (x, mode))
3137 return emit_move_complex_push (mode, x, y);
3139 /* See if we can coerce the target into moving both values at once. */
3141 /* Move floating point as parts. */
3142 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3143 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3145 /* Not possible if the values are inherently not adjacent. */
3146 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3148 /* Is possible if both are registers (or subregs of registers). */
3149 else if (register_operand (x, mode) && register_operand (y, mode))
3151 /* If one of the operands is a memory, and alignment constraints
3152 are friendly enough, we may be able to do combined memory operations.
3153 We do not attempt this if Y is a constant because that combination is
3154 usually better with the by-parts thing below. */
3155 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3156 && (!STRICT_ALIGNMENT
3157 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3166 /* For memory to memory moves, optimal behavior can be had with the
3167 existing block move logic. */
3168 if (MEM_P (x) && MEM_P (y))
3170 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3171 BLOCK_OP_NO_LIBCALL);
3172 return get_last_insn ();
3175 ret = emit_move_via_integer (mode, x, y, true);
3180 return emit_move_complex_parts (x, y);
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3184 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3187 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3191 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3194 enum insn_code code = optab_handler (mov_optab, CCmode);
3195 if (code != CODE_FOR_nothing)
3197 x = emit_move_change_mode (CCmode, mode, x, true);
3198 y = emit_move_change_mode (CCmode, mode, y, true);
3199 return emit_insn (GEN_FCN (code) (x, y));
3203 /* Otherwise, find the MODE_INT mode of the same width. */
3204 ret = emit_move_via_integer (mode, x, y, false);
3205 gcc_assert (ret != NULL);
3209 /* Return true if word I of OP lies entirely in the
3210 undefined bits of a paradoxical subreg. */
3213 undefined_operand_subword_p (const_rtx op, int i)
3215 enum machine_mode innermode, innermostmode;
3217 if (GET_CODE (op) != SUBREG)
3219 innermode = GET_MODE (op);
3220 innermostmode = GET_MODE (SUBREG_REG (op));
3221 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3222 /* The SUBREG_BYTE represents offset, as if the value were stored in
3223 memory, except for a paradoxical subreg where we define
3224 SUBREG_BYTE to be 0; undo this exception as in
3226 if (SUBREG_BYTE (op) == 0
3227 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3229 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3230 if (WORDS_BIG_ENDIAN)
3231 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3232 if (BYTES_BIG_ENDIAN)
3233 offset += difference % UNITS_PER_WORD;
3235 if (offset >= GET_MODE_SIZE (innermostmode)
3236 || offset <= -GET_MODE_SIZE (word_mode))
3241 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3242 MODE is any multi-word or full-word mode that lacks a move_insn
3243 pattern. Note that you will get better code if you define such
3244 patterns, even if they must turn into multiple assembler instructions. */
3247 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3254 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3256 /* If X is a push on the stack, do the push now and replace
3257 X with a reference to the stack pointer. */
3258 if (push_operand (x, mode))
3259 x = emit_move_resolve_push (mode, x);
3261 /* If we are in reload, see if either operand is a MEM whose address
3262 is scheduled for replacement. */
3263 if (reload_in_progress && MEM_P (x)
3264 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3265 x = replace_equiv_address_nv (x, inner);
3266 if (reload_in_progress && MEM_P (y)
3267 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3268 y = replace_equiv_address_nv (y, inner);
3272 need_clobber = false;
3274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3277 rtx xpart = operand_subword (x, i, 1, mode);
3280 /* Do not generate code for a move if it would come entirely
3281 from the undefined bits of a paradoxical subreg. */
3282 if (undefined_operand_subword_p (y, i))
3285 ypart = operand_subword (y, i, 1, mode);
3287 /* If we can't get a part of Y, put Y into memory if it is a
3288 constant. Otherwise, force it into a register. Then we must
3289 be able to get a part of Y. */
3290 if (ypart == 0 && CONSTANT_P (y))
3292 y = use_anchored_address (force_const_mem (mode, y));
3293 ypart = operand_subword (y, i, 1, mode);
3295 else if (ypart == 0)
3296 ypart = operand_subword_force (y, i, mode);
3298 gcc_assert (xpart && ypart);
3300 need_clobber |= (GET_CODE (xpart) == SUBREG);
3302 last_insn = emit_move_insn (xpart, ypart);
3308 /* Show the output dies here. This is necessary for SUBREGs
3309 of pseudos since we cannot track their lifetimes correctly;
3310 hard regs shouldn't appear here except as return values.
3311 We never want to emit such a clobber after reload. */
3313 && ! (reload_in_progress || reload_completed)
3314 && need_clobber != 0)
3322 /* Low level part of emit_move_insn.
3323 Called just like emit_move_insn, but assumes X and Y
3324 are basically valid. */
3327 emit_move_insn_1 (rtx x, rtx y)
3329 enum machine_mode mode = GET_MODE (x);
3330 enum insn_code code;
3332 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3334 code = optab_handler (mov_optab, mode);
3335 if (code != CODE_FOR_nothing)
3336 return emit_insn (GEN_FCN (code) (x, y));
3338 /* Expand complex moves by moving real part and imag part. */
3339 if (COMPLEX_MODE_P (mode))
3340 return emit_move_complex (mode, x, y);
3342 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3343 || ALL_FIXED_POINT_MODE_P (mode))
3345 rtx result = emit_move_via_integer (mode, x, y, true);
3347 /* If we can't find an integer mode, use multi words. */
3351 return emit_move_multi_word (mode, x, y);
3354 if (GET_MODE_CLASS (mode) == MODE_CC)
3355 return emit_move_ccmode (mode, x, y);
3357 /* Try using a move pattern for the corresponding integer mode. This is
3358 only safe when simplify_subreg can convert MODE constants into integer
3359 constants. At present, it can only do this reliably if the value
3360 fits within a HOST_WIDE_INT. */
3361 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3363 rtx ret = emit_move_via_integer (mode, x, y, false);
3368 return emit_move_multi_word (mode, x, y);
3371 /* Generate code to copy Y into X.
3372 Both Y and X must have the same mode, except that
3373 Y can be a constant with VOIDmode.
3374 This mode cannot be BLKmode; use emit_block_move for that.
3376 Return the last instruction emitted. */
3379 emit_move_insn (rtx x, rtx y)
3381 enum machine_mode mode = GET_MODE (x);
3382 rtx y_cst = NULL_RTX;
3385 gcc_assert (mode != BLKmode
3386 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3391 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3392 && (last_insn = compress_float_constant (x, y)))
3397 if (!LEGITIMATE_CONSTANT_P (y))
3399 y = force_const_mem (mode, y);
3401 /* If the target's cannot_force_const_mem prevented the spill,
3402 assume that the target's move expanders will also take care
3403 of the non-legitimate constant. */
3407 y = use_anchored_address (y);
3411 /* If X or Y are memory references, verify that their addresses are valid
3414 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3416 && ! push_operand (x, GET_MODE (x))))
3417 x = validize_mem (x);
3420 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3421 MEM_ADDR_SPACE (y)))
3422 y = validize_mem (y);
3424 gcc_assert (mode != BLKmode);
3426 last_insn = emit_move_insn_1 (x, y);
3428 if (y_cst && REG_P (x)
3429 && (set = single_set (last_insn)) != NULL_RTX
3430 && SET_DEST (set) == x
3431 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3432 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3437 /* If Y is representable exactly in a narrower mode, and the target can
3438 perform the extension directly from constant or memory, then emit the
3439 move as an extension. */
3442 compress_float_constant (rtx x, rtx y)
3444 enum machine_mode dstmode = GET_MODE (x);
3445 enum machine_mode orig_srcmode = GET_MODE (y);
3446 enum machine_mode srcmode;
3448 int oldcost, newcost;
3449 bool speed = optimize_insn_for_speed_p ();
3451 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3453 if (LEGITIMATE_CONSTANT_P (y))
3454 oldcost = rtx_cost (y, SET, speed);
3456 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3458 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3459 srcmode != orig_srcmode;
3460 srcmode = GET_MODE_WIDER_MODE (srcmode))
3463 rtx trunc_y, last_insn;
3465 /* Skip if the target can't extend this way. */
3466 ic = can_extend_p (dstmode, srcmode, 0);
3467 if (ic == CODE_FOR_nothing)
3470 /* Skip if the narrowed value isn't exact. */
3471 if (! exact_real_truncate (srcmode, &r))
3474 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3476 if (LEGITIMATE_CONSTANT_P (trunc_y))
3478 /* Skip if the target needs extra instructions to perform
3480 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3482 /* This is valid, but may not be cheaper than the original. */
3483 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3484 if (oldcost < newcost)
3487 else if (float_extend_from_mem[dstmode][srcmode])
3489 trunc_y = force_const_mem (srcmode, trunc_y);
3490 /* This is valid, but may not be cheaper than the original. */
3491 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3492 if (oldcost < newcost)
3494 trunc_y = validize_mem (trunc_y);
3499 /* For CSE's benefit, force the compressed constant pool entry
3500 into a new pseudo. This constant may be used in different modes,
3501 and if not, combine will put things back together for us. */
3502 trunc_y = force_reg (srcmode, trunc_y);
3503 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3504 last_insn = get_last_insn ();
3507 set_unique_reg_note (last_insn, REG_EQUAL, y);
3515 /* Pushing data onto the stack. */
3517 /* Push a block of length SIZE (perhaps variable)
3518 and return an rtx to address the beginning of the block.
3519 The value may be virtual_outgoing_args_rtx.
3521 EXTRA is the number of bytes of padding to push in addition to SIZE.
3522 BELOW nonzero means this padding comes at low addresses;
3523 otherwise, the padding comes at high addresses. */
3526 push_block (rtx size, int extra, int below)
3530 size = convert_modes (Pmode, ptr_mode, size, 1);
3531 if (CONSTANT_P (size))
3532 anti_adjust_stack (plus_constant (size, extra));
3533 else if (REG_P (size) && extra == 0)
3534 anti_adjust_stack (size);
3537 temp = copy_to_mode_reg (Pmode, size);
3539 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3540 temp, 0, OPTAB_LIB_WIDEN);
3541 anti_adjust_stack (temp);
3544 #ifndef STACK_GROWS_DOWNWARD
3550 temp = virtual_outgoing_args_rtx;
3551 if (extra != 0 && below)
3552 temp = plus_constant (temp, extra);
3556 if (CONST_INT_P (size))
3557 temp = plus_constant (virtual_outgoing_args_rtx,
3558 -INTVAL (size) - (below ? 0 : extra));
3559 else if (extra != 0 && !below)
3560 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3561 negate_rtx (Pmode, plus_constant (size, extra)));
3563 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3564 negate_rtx (Pmode, size));
3567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3570 #ifdef PUSH_ROUNDING
3572 /* Emit single push insn. */
3575 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3578 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3580 enum insn_code icode;
3581 insn_operand_predicate_fn pred;
3583 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3584 /* If there is push pattern, use it. Otherwise try old way of throwing
3585 MEM representing push operation to move expander. */
3586 icode = optab_handler (push_optab, mode);
3587 if (icode != CODE_FOR_nothing)
3589 if (((pred = insn_data[(int) icode].operand[0].predicate)
3590 && !((*pred) (x, mode))))
3591 x = force_reg (mode, x);
3592 emit_insn (GEN_FCN (icode) (x));
3595 if (GET_MODE_SIZE (mode) == rounded_size)
3596 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3597 /* If we are to pad downward, adjust the stack pointer first and
3598 then store X into the stack location using an offset. This is
3599 because emit_move_insn does not know how to pad; it does not have
3601 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3603 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3604 HOST_WIDE_INT offset;
3606 emit_move_insn (stack_pointer_rtx,
3607 expand_binop (Pmode,
3608 #ifdef STACK_GROWS_DOWNWARD
3614 GEN_INT (rounded_size),
3615 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3617 offset = (HOST_WIDE_INT) padding_size;
3618 #ifdef STACK_GROWS_DOWNWARD
3619 if (STACK_PUSH_CODE == POST_DEC)
3620 /* We have already decremented the stack pointer, so get the
3622 offset += (HOST_WIDE_INT) rounded_size;
3624 if (STACK_PUSH_CODE == POST_INC)
3625 /* We have already incremented the stack pointer, so get the
3627 offset -= (HOST_WIDE_INT) rounded_size;
3629 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3633 #ifdef STACK_GROWS_DOWNWARD
3634 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3635 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3636 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3638 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3639 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3640 GEN_INT (rounded_size));
3642 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3645 dest = gen_rtx_MEM (mode, dest_addr);
3649 set_mem_attributes (dest, type, 1);
3651 if (flag_optimize_sibling_calls)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest, 0);
3658 emit_move_insn (dest, x);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 bytes of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL bytes.
3674 REG must be a hard register in this case.
3675 If REG is zero but PARTIAL is not, take any all others actions for an
3676 argument partially in registers, but do not actually load any
3679 EXTRA is the amount in bytes of extra space to leave next to this arg.
3680 This is ignored if an argument block has already been allocated.
3682 On a machine that lacks real push insns, ARGS_ADDR is the address of
3683 the bottom of the argument block for this call. We use indexing off there
3684 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3685 argument block has not been preallocated.
3687 ARGS_SO_FAR is the size of args previously pushed for this call.
3689 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3690 for arguments passed in registers. If nonzero, it will be the number
3691 of bytes required. */
3694 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3695 unsigned int align, int partial, rtx reg, int extra,
3696 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3700 enum direction stack_direction
3701 #ifdef STACK_GROWS_DOWNWARD
3707 /* Decide where to pad the argument: `downward' for below,
3708 `upward' for above, or `none' for don't pad it.
3709 Default is below for small data on big-endian machines; else above. */
3710 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3712 /* Invert direction if stack is post-decrement.
3714 if (STACK_PUSH_CODE == POST_DEC)
3715 if (where_pad != none)
3716 where_pad = (where_pad == downward ? upward : downward);
3721 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3723 /* Copy a block into the stack, entirely or partially. */
3730 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3731 used = partial - offset;
3733 if (mode != BLKmode)
3735 /* A value is to be stored in an insufficiently aligned
3736 stack slot; copy via a suitably aligned slot if
3738 size = GEN_INT (GET_MODE_SIZE (mode));
3739 if (!MEM_P (xinner))
3741 temp = assign_temp (type, 0, 1, 1);
3742 emit_move_insn (temp, xinner);
3749 /* USED is now the # of bytes we need not copy to the stack
3750 because registers will take care of them. */
3753 xinner = adjust_address (xinner, BLKmode, used);
3755 /* If the partial register-part of the arg counts in its stack size,
3756 skip the part of stack space corresponding to the registers.
3757 Otherwise, start copying to the beginning of the stack space,
3758 by setting SKIP to 0. */
3759 skip = (reg_parm_stack_space == 0) ? 0 : used;
3761 #ifdef PUSH_ROUNDING
3762 /* Do it with several push insns if that doesn't take lots of insns
3763 and if there is no difficulty with push insns that skip bytes
3764 on the stack for alignment purposes. */
3767 && CONST_INT_P (size)
3769 && MEM_ALIGN (xinner) >= align
3770 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3771 /* Here we avoid the case of a structure whose weak alignment
3772 forces many pushes of a small amount of data,
3773 and such small pushes do rounding that causes trouble. */
3774 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3775 || align >= BIGGEST_ALIGNMENT
3776 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3777 == (align / BITS_PER_UNIT)))
3778 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3780 /* Push padding now if padding above and stack grows down,
3781 or if padding below and stack grows up.
3782 But if space already allocated, this has already been done. */
3783 if (extra && args_addr == 0
3784 && where_pad != none && where_pad != stack_direction)
3785 anti_adjust_stack (GEN_INT (extra));
3787 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3790 #endif /* PUSH_ROUNDING */
3794 /* Otherwise make space on the stack and copy the data
3795 to the address of that space. */
3797 /* Deduct words put into registers from the size we must copy. */
3800 if (CONST_INT_P (size))
3801 size = GEN_INT (INTVAL (size) - used);
3803 size = expand_binop (GET_MODE (size), sub_optab, size,
3804 GEN_INT (used), NULL_RTX, 0,
3808 /* Get the address of the stack space.
3809 In this case, we do not deal with EXTRA separately.
3810 A single stack adjust will do. */
3813 temp = push_block (size, extra, where_pad == downward);
3816 else if (CONST_INT_P (args_so_far))
3817 temp = memory_address (BLKmode,
3818 plus_constant (args_addr,
3819 skip + INTVAL (args_so_far)));
3821 temp = memory_address (BLKmode,
3822 plus_constant (gen_rtx_PLUS (Pmode,
3827 if (!ACCUMULATE_OUTGOING_ARGS)
3829 /* If the source is referenced relative to the stack pointer,
3830 copy it to another register to stabilize it. We do not need
3831 to do this if we know that we won't be changing sp. */
3833 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3834 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3835 temp = copy_to_reg (temp);
3838 target = gen_rtx_MEM (BLKmode, temp);
3840 /* We do *not* set_mem_attributes here, because incoming arguments
3841 may overlap with sibling call outgoing arguments and we cannot
3842 allow reordering of reads from function arguments with stores
3843 to outgoing arguments of sibling calls. We do, however, want
3844 to record the alignment of the stack slot. */
3845 /* ALIGN may well be better aligned than TYPE, e.g. due to
3846 PARM_BOUNDARY. Assume the caller isn't lying. */
3847 set_mem_align (target, align);
3849 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3852 else if (partial > 0)
3854 /* Scalar partly in registers. */
3856 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3859 /* # bytes of start of argument
3860 that we must make space for but need not store. */
3861 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3862 int args_offset = INTVAL (args_so_far);
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra && args_addr == 0
3869 && where_pad != none && where_pad != stack_direction)
3870 anti_adjust_stack (GEN_INT (extra));
3872 /* If we make space by pushing it, we might as well push
3873 the real data. Otherwise, we can leave OFFSET nonzero
3874 and leave the space uninitialized. */
3878 /* Now NOT_STACK gets the number of words that we don't need to
3879 allocate on the stack. Convert OFFSET to words too. */
3880 not_stack = (partial - offset) / UNITS_PER_WORD;
3881 offset /= UNITS_PER_WORD;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3889 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3890 x = validize_mem (force_const_mem (mode, x));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3896 x = copy_to_reg (x);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i = not_stack; i < size; i++)
3904 for (i = size - 1; i >= not_stack; i--)
3906 if (i >= not_stack + offset)
3907 emit_push_insn (operand_subword_force (x, i, mode),
3908 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3910 GEN_INT (args_offset + ((i - not_stack + skip)
3912 reg_parm_stack_space, alignment_pad);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra && args_addr == 0
3923 && where_pad != none && where_pad != stack_direction)
3924 anti_adjust_stack (GEN_INT (extra));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr == 0 && PUSH_ARGS)
3928 emit_single_push_insn (mode, x, type);
3932 if (CONST_INT_P (args_so_far))
3934 = memory_address (mode,
3935 plus_constant (args_addr,
3936 INTVAL (args_so_far)));
3938 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3940 dest = gen_rtx_MEM (mode, addr);
3942 /* We do *not* set_mem_attributes here, because incoming arguments
3943 may overlap with sibling call outgoing arguments and we cannot
3944 allow reordering of reads from function arguments with stores
3945 to outgoing arguments of sibling calls. We do, however, want
3946 to record the alignment of the stack slot. */
3947 /* ALIGN may well be better aligned than TYPE, e.g. due to
3948 PARM_BOUNDARY. Assume the caller isn't lying. */
3949 set_mem_align (dest, align);
3951 emit_move_insn (dest, x);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial > 0 && reg != 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg) == PARALLEL)
3963 emit_group_load (reg, x, type, -1);
3966 gcc_assert (partial % UNITS_PER_WORD == 0);
3967 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3971 if (extra && args_addr == 0 && where_pad == stack_direction)
3972 anti_adjust_stack (GEN_INT (extra));
3974 if (alignment_pad && args_addr == 0)
3975 anti_adjust_stack (alignment_pad);
3978 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3982 get_subtarget (rtx x)
3986 /* Only registers can be subtargets. */
3988 /* Don't use hard regs to avoid extending their life. */
3989 || REGNO (x) < FIRST_PSEUDO_REGISTER
3993 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3994 FIELD is a bitfield. Returns true if the optimization was successful,
3995 and there's nothing else to do. */
3998 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3999 unsigned HOST_WIDE_INT bitpos,
4000 enum machine_mode mode1, rtx str_rtx,
4003 enum machine_mode str_mode = GET_MODE (str_rtx);
4004 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4009 if (mode1 != VOIDmode
4010 || bitsize >= BITS_PER_WORD
4011 || str_bitsize > BITS_PER_WORD
4012 || TREE_SIDE_EFFECTS (to)
4013 || TREE_THIS_VOLATILE (to))
4017 if (!BINARY_CLASS_P (src)
4018 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4021 op0 = TREE_OPERAND (src, 0);
4022 op1 = TREE_OPERAND (src, 1);
4025 if (!operand_equal_p (to, op0, 0))
4028 if (MEM_P (str_rtx))
4030 unsigned HOST_WIDE_INT offset1;
4032 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4033 str_mode = word_mode;
4034 str_mode = get_best_mode (bitsize, bitpos,
4035 MEM_ALIGN (str_rtx), str_mode, 0);
4036 if (str_mode == VOIDmode)
4038 str_bitsize = GET_MODE_BITSIZE (str_mode);
4041 bitpos %= str_bitsize;
4042 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4043 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4045 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4048 /* If the bit field covers the whole REG/MEM, store_field
4049 will likely generate better code. */
4050 if (bitsize >= str_bitsize)
4053 /* We can't handle fields split across multiple entities. */
4054 if (bitpos + bitsize > str_bitsize)
4057 if (BYTES_BIG_ENDIAN)
4058 bitpos = str_bitsize - bitpos - bitsize;
4060 switch (TREE_CODE (src))
4064 /* For now, just optimize the case of the topmost bitfield
4065 where we don't need to do any masking and also
4066 1 bit bitfields where xor can be used.
4067 We might win by one instruction for the other bitfields
4068 too if insv/extv instructions aren't used, so that
4069 can be added later. */
4070 if (bitpos + bitsize != str_bitsize
4071 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4074 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4075 value = convert_modes (str_mode,
4076 TYPE_MODE (TREE_TYPE (op1)), value,
4077 TYPE_UNSIGNED (TREE_TYPE (op1)));
4079 /* We may be accessing data outside the field, which means
4080 we can alias adjacent data. */
4081 if (MEM_P (str_rtx))
4083 str_rtx = shallow_copy_rtx (str_rtx);
4084 set_mem_alias_set (str_rtx, 0);
4085 set_mem_expr (str_rtx, 0);
4088 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4089 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4091 value = expand_and (str_mode, value, const1_rtx, NULL);
4094 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4095 build_int_cst (NULL_TREE, bitpos),
4097 result = expand_binop (str_mode, binop, str_rtx,
4098 value, str_rtx, 1, OPTAB_WIDEN);
4099 if (result != str_rtx)
4100 emit_move_insn (str_rtx, result);
4105 if (TREE_CODE (op1) != INTEGER_CST)
4107 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4108 value = convert_modes (GET_MODE (str_rtx),
4109 TYPE_MODE (TREE_TYPE (op1)), value,
4110 TYPE_UNSIGNED (TREE_TYPE (op1)));
4112 /* We may be accessing data outside the field, which means
4113 we can alias adjacent data. */
4114 if (MEM_P (str_rtx))
4116 str_rtx = shallow_copy_rtx (str_rtx);
4117 set_mem_alias_set (str_rtx, 0);
4118 set_mem_expr (str_rtx, 0);
4121 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4122 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4124 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4126 value = expand_and (GET_MODE (str_rtx), value, mask,
4129 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4130 build_int_cst (NULL_TREE, bitpos),
4132 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4133 value, str_rtx, 1, OPTAB_WIDEN);
4134 if (result != str_rtx)
4135 emit_move_insn (str_rtx, result);
4146 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4147 is true, try generating a nontemporal store. */
4150 expand_assignment (tree to, tree from, bool nontemporal)
4154 enum machine_mode mode;
4157 /* Don't crash if the lhs of the assignment was erroneous. */
4158 if (TREE_CODE (to) == ERROR_MARK)
4160 result = expand_normal (from);
4164 /* Optimize away no-op moves without side-effects. */
4165 if (operand_equal_p (to, from, 0))
4168 mode = TYPE_MODE (TREE_TYPE (to));
4169 if ((TREE_CODE (to) == MEM_REF
4170 || TREE_CODE (to) == TARGET_MEM_REF)
4172 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4173 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4174 < (signed) GET_MODE_ALIGNMENT (mode))
4175 && ((icode = optab_handler (movmisalign_optab, mode))
4176 != CODE_FOR_nothing))
4178 enum machine_mode address_mode, op_mode1;
4179 rtx insn, reg, op0, mem;
4181 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4182 reg = force_not_mem (reg);
4184 if (TREE_CODE (to) == MEM_REF)
4187 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4188 tree base = TREE_OPERAND (to, 0);
4189 address_mode = targetm.addr_space.address_mode (as);
4190 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4191 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4192 if (!integer_zerop (TREE_OPERAND (to, 1)))
4195 = immed_double_int_const (mem_ref_offset (to), address_mode);
4196 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4198 op0 = memory_address_addr_space (mode, op0, as);
4199 mem = gen_rtx_MEM (mode, op0);
4200 set_mem_attributes (mem, to, 0);
4201 set_mem_addr_space (mem, as);
4203 else if (TREE_CODE (to) == TARGET_MEM_REF)
4205 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4206 struct mem_address addr;
4208 get_address_description (to, &addr);
4209 op0 = addr_for_mem_ref (&addr, as, true);
4210 op0 = memory_address_addr_space (mode, op0, as);
4211 mem = gen_rtx_MEM (mode, op0);
4212 set_mem_attributes (mem, to, 0);
4213 set_mem_addr_space (mem, as);
4217 if (TREE_THIS_VOLATILE (to))
4218 MEM_VOLATILE_P (mem) = 1;
4220 op_mode1 = insn_data[icode].operand[1].mode;
4221 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4222 && op_mode1 != VOIDmode)
4223 reg = copy_to_mode_reg (op_mode1, reg);
4225 insn = GEN_FCN (icode) (mem, reg);
4226 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4227 silently be omitted. */
4228 gcc_assert (insn != NULL_RTX);
4233 /* Assignment of a structure component needs special treatment
4234 if the structure component's rtx is not simply a MEM.
4235 Assignment of an array element at a constant index, and assignment of
4236 an array element in an unaligned packed structure field, has the same
4238 if (handled_component_p (to)
4239 /* ??? We only need to handle MEM_REF here if the access is not
4240 a full access of the base object. */
4241 || (TREE_CODE (to) == MEM_REF
4242 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4243 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4245 enum machine_mode mode1;
4246 HOST_WIDE_INT bitsize, bitpos;
4253 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4254 &unsignedp, &volatilep, true);
4256 /* If we are going to use store_bit_field and extract_bit_field,
4257 make sure to_rtx will be safe for multiple use. */
4259 to_rtx = expand_normal (tem);
4261 /* If the bitfield is volatile, we want to access it in the
4262 field's mode, not the computed mode. */
4264 && GET_CODE (to_rtx) == MEM
4265 && flag_strict_volatile_bitfields > 0)
4266 to_rtx = adjust_address (to_rtx, mode1, 0);
4270 enum machine_mode address_mode;
4273 if (!MEM_P (to_rtx))
4275 /* We can get constant negative offsets into arrays with broken
4276 user code. Translate this to a trap instead of ICEing. */
4277 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4278 expand_builtin_trap ();
4279 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4282 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4284 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4285 if (GET_MODE (offset_rtx) != address_mode)
4286 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4288 /* A constant address in TO_RTX can have VOIDmode, we must not try
4289 to call force_reg for that case. Avoid that case. */
4291 && GET_MODE (to_rtx) == BLKmode
4292 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4294 && (bitpos % bitsize) == 0
4295 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4296 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4298 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4302 to_rtx = offset_address (to_rtx, offset_rtx,
4303 highest_pow2_factor_for_target (to,
4307 /* No action is needed if the target is not a memory and the field
4308 lies completely outside that target. This can occur if the source
4309 code contains an out-of-bounds access to a small array. */
4311 && GET_MODE (to_rtx) != BLKmode
4312 && (unsigned HOST_WIDE_INT) bitpos
4313 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4315 expand_normal (from);
4318 /* Handle expand_expr of a complex value returning a CONCAT. */
4319 else if (GET_CODE (to_rtx) == CONCAT)
4321 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4323 gcc_assert (bitpos == 0);
4324 result = store_expr (from, to_rtx, false, nontemporal);
4328 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4329 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4337 /* If the field is at offset zero, we could have been given the
4338 DECL_RTX of the parent struct. Don't munge it. */
4339 to_rtx = shallow_copy_rtx (to_rtx);
4341 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4343 /* Deal with volatile and readonly fields. The former is only
4344 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4346 MEM_VOLATILE_P (to_rtx) = 1;
4347 if (component_uses_parent_alias_set (to))
4348 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4351 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4355 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4356 TREE_TYPE (tem), get_alias_set (to),
4361 preserve_temp_slots (result);
4367 /* If the rhs is a function call and its value is not an aggregate,
4368 call the function before we start to compute the lhs.
4369 This is needed for correct code for cases such as
4370 val = setjmp (buf) on machines where reference to val
4371 requires loading up part of an address in a separate insn.
4373 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4374 since it might be a promoted variable where the zero- or sign- extension
4375 needs to be done. Handling this in the normal way is safe because no
4376 computation is done before the call. The same is true for SSA names. */
4377 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4378 && COMPLETE_TYPE_P (TREE_TYPE (from))
4379 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4380 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4381 && REG_P (DECL_RTL (to)))
4382 || TREE_CODE (to) == SSA_NAME))
4387 value = expand_normal (from);
4389 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4391 /* Handle calls that return values in multiple non-contiguous locations.
4392 The Irix 6 ABI has examples of this. */
4393 if (GET_CODE (to_rtx) == PARALLEL)
4394 emit_group_load (to_rtx, value, TREE_TYPE (from),
4395 int_size_in_bytes (TREE_TYPE (from)));
4396 else if (GET_MODE (to_rtx) == BLKmode)
4397 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4400 if (POINTER_TYPE_P (TREE_TYPE (to)))
4401 value = convert_memory_address_addr_space
4402 (GET_MODE (to_rtx), value,
4403 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4405 emit_move_insn (to_rtx, value);
4407 preserve_temp_slots (to_rtx);
4413 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4414 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4417 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4419 /* Don't move directly into a return register. */
4420 if (TREE_CODE (to) == RESULT_DECL
4421 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4426 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4428 if (GET_CODE (to_rtx) == PARALLEL)
4429 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4430 int_size_in_bytes (TREE_TYPE (from)));
4432 emit_move_insn (to_rtx, temp);
4434 preserve_temp_slots (to_rtx);
4440 /* In case we are returning the contents of an object which overlaps
4441 the place the value is being stored, use a safe function when copying
4442 a value through a pointer into a structure value return block. */
4443 if (TREE_CODE (to) == RESULT_DECL
4444 && TREE_CODE (from) == INDIRECT_REF
4445 && ADDR_SPACE_GENERIC_P
4446 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4447 && refs_may_alias_p (to, from)
4448 && cfun->returns_struct
4449 && !cfun->returns_pcc_struct)
4454 size = expr_size (from);
4455 from_rtx = expand_normal (from);
4457 emit_library_call (memmove_libfunc, LCT_NORMAL,
4458 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4459 XEXP (from_rtx, 0), Pmode,
4460 convert_to_mode (TYPE_MODE (sizetype),
4461 size, TYPE_UNSIGNED (sizetype)),
4462 TYPE_MODE (sizetype));
4464 preserve_temp_slots (to_rtx);
4470 /* Compute FROM and store the value in the rtx we got. */
4473 result = store_expr (from, to_rtx, 0, nontemporal);
4474 preserve_temp_slots (result);
4480 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4481 succeeded, false otherwise. */
4484 emit_storent_insn (rtx to, rtx from)
4486 enum machine_mode mode = GET_MODE (to), imode;
4487 enum insn_code code = optab_handler (storent_optab, mode);
4490 if (code == CODE_FOR_nothing)
4493 imode = insn_data[code].operand[0].mode;
4494 if (!insn_data[code].operand[0].predicate (to, imode))
4497 imode = insn_data[code].operand[1].mode;
4498 if (!insn_data[code].operand[1].predicate (from, imode))
4500 from = copy_to_mode_reg (imode, from);
4501 if (!insn_data[code].operand[1].predicate (from, imode))
4505 pattern = GEN_FCN (code) (to, from);
4506 if (pattern == NULL_RTX)
4509 emit_insn (pattern);
4513 /* Generate code for computing expression EXP,
4514 and storing the value into TARGET.
4516 If the mode is BLKmode then we may return TARGET itself.
4517 It turns out that in BLKmode it doesn't cause a problem.
4518 because C has no operators that could combine two different
4519 assignments into the same BLKmode object with different values
4520 with no sequence point. Will other languages need this to
4523 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4524 stack, and block moves may need to be treated specially.
4526 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4529 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4532 rtx alt_rtl = NULL_RTX;
4533 location_t loc = EXPR_LOCATION (exp);
4535 if (VOID_TYPE_P (TREE_TYPE (exp)))
4537 /* C++ can generate ?: expressions with a throw expression in one
4538 branch and an rvalue in the other. Here, we resolve attempts to
4539 store the throw expression's nonexistent result. */
4540 gcc_assert (!call_param_p);
4541 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4544 if (TREE_CODE (exp) == COMPOUND_EXPR)
4546 /* Perform first part of compound expression, then assign from second
4548 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4549 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4550 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4553 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4555 /* For conditional expression, get safe form of the target. Then
4556 test the condition, doing the appropriate assignment on either
4557 side. This avoids the creation of unnecessary temporaries.
4558 For non-BLKmode, it is more efficient not to do this. */
4560 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4562 do_pending_stack_adjust ();
4564 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4565 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4567 emit_jump_insn (gen_jump (lab2));
4570 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4577 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4578 /* If this is a scalar in a register that is stored in a wider mode
4579 than the declared mode, compute the result into its declared mode
4580 and then convert to the wider mode. Our value is the computed
4583 rtx inner_target = 0;
4585 /* We can do the conversion inside EXP, which will often result
4586 in some optimizations. Do the conversion in two steps: first
4587 change the signedness, if needed, then the extend. But don't
4588 do this if the type of EXP is a subtype of something else
4589 since then the conversion might involve more than just
4590 converting modes. */
4591 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4592 && TREE_TYPE (TREE_TYPE (exp)) == 0
4593 && GET_MODE_PRECISION (GET_MODE (target))
4594 == TYPE_PRECISION (TREE_TYPE (exp)))
4596 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4597 != SUBREG_PROMOTED_UNSIGNED_P (target))
4599 /* Some types, e.g. Fortran's logical*4, won't have a signed
4600 version, so use the mode instead. */
4602 = (signed_or_unsigned_type_for
4603 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4605 ntype = lang_hooks.types.type_for_mode
4606 (TYPE_MODE (TREE_TYPE (exp)),
4607 SUBREG_PROMOTED_UNSIGNED_P (target));
4609 exp = fold_convert_loc (loc, ntype, exp);
4612 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4613 (GET_MODE (SUBREG_REG (target)),
4614 SUBREG_PROMOTED_UNSIGNED_P (target)),
4617 inner_target = SUBREG_REG (target);
4620 temp = expand_expr (exp, inner_target, VOIDmode,
4621 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4623 /* If TEMP is a VOIDmode constant, use convert_modes to make
4624 sure that we properly convert it. */
4625 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4627 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4628 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4629 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4630 GET_MODE (target), temp,
4631 SUBREG_PROMOTED_UNSIGNED_P (target));
4634 convert_move (SUBREG_REG (target), temp,
4635 SUBREG_PROMOTED_UNSIGNED_P (target));
4639 else if (TREE_CODE (exp) == STRING_CST
4640 && !nontemporal && !call_param_p
4641 && TREE_STRING_LENGTH (exp) > 0
4642 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4644 /* Optimize initialization of an array with a STRING_CST. */
4645 HOST_WIDE_INT exp_len, str_copy_len;
4648 exp_len = int_expr_size (exp);
4652 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4653 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4656 str_copy_len = TREE_STRING_LENGTH (exp);
4657 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4659 str_copy_len += STORE_MAX_PIECES - 1;
4660 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4662 str_copy_len = MIN (str_copy_len, exp_len);
4663 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4664 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4665 MEM_ALIGN (target), false))
4670 dest_mem = store_by_pieces (dest_mem,
4671 str_copy_len, builtin_strncpy_read_str,
4672 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4673 MEM_ALIGN (target), false,
4674 exp_len > str_copy_len ? 1 : 0);
4675 if (exp_len > str_copy_len)
4676 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4677 GEN_INT (exp_len - str_copy_len),
4681 else if (TREE_CODE (exp) == MEM_REF
4682 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4683 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST
4684 && integer_zerop (TREE_OPERAND (exp, 1))
4685 && !nontemporal && !call_param_p
4686 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4688 /* Optimize initialization of an array with a STRING_CST. */
4689 HOST_WIDE_INT exp_len, str_copy_len;
4691 tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4693 exp_len = int_expr_size (exp);
4697 str_copy_len = strlen (TREE_STRING_POINTER (str));
4698 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4701 str_copy_len = TREE_STRING_LENGTH (str);
4702 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4704 str_copy_len += STORE_MAX_PIECES - 1;
4705 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4707 str_copy_len = MIN (str_copy_len, exp_len);
4708 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4709 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4710 MEM_ALIGN (target), false))
4715 dest_mem = store_by_pieces (dest_mem,
4716 str_copy_len, builtin_strncpy_read_str,
4717 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4718 MEM_ALIGN (target), false,
4719 exp_len > str_copy_len ? 1 : 0);
4720 if (exp_len > str_copy_len)
4721 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4722 GEN_INT (exp_len - str_copy_len),
4731 /* If we want to use a nontemporal store, force the value to
4733 tmp_target = nontemporal ? NULL_RTX : target;
4734 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4736 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4740 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4741 the same as that of TARGET, adjust the constant. This is needed, for
4742 example, in case it is a CONST_DOUBLE and we want only a word-sized
4744 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4745 && TREE_CODE (exp) != ERROR_MARK
4746 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4747 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4748 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4750 /* If value was not generated in the target, store it there.
4751 Convert the value to TARGET's type first if necessary and emit the
4752 pending incrementations that have been queued when expanding EXP.
4753 Note that we cannot emit the whole queue blindly because this will
4754 effectively disable the POST_INC optimization later.
4756 If TEMP and TARGET compare equal according to rtx_equal_p, but
4757 one or both of them are volatile memory refs, we have to distinguish
4759 - expand_expr has used TARGET. In this case, we must not generate
4760 another copy. This can be detected by TARGET being equal according
4762 - expand_expr has not used TARGET - that means that the source just
4763 happens to have the same RTX form. Since temp will have been created
4764 by expand_expr, it will compare unequal according to == .
4765 We must generate a copy in this case, to reach the correct number
4766 of volatile memory references. */
4768 if ((! rtx_equal_p (temp, target)
4769 || (temp != target && (side_effects_p (temp)
4770 || side_effects_p (target))))
4771 && TREE_CODE (exp) != ERROR_MARK
4772 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4773 but TARGET is not valid memory reference, TEMP will differ
4774 from TARGET although it is really the same location. */
4775 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4776 /* If there's nothing to copy, don't bother. Don't call
4777 expr_size unless necessary, because some front-ends (C++)
4778 expr_size-hook must not be given objects that are not
4779 supposed to be bit-copied or bit-initialized. */
4780 && expr_size (exp) != const0_rtx)
4782 if (GET_MODE (temp) != GET_MODE (target)
4783 && GET_MODE (temp) != VOIDmode)
4785 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4786 if (GET_MODE (target) == BLKmode
4787 && GET_MODE (temp) == BLKmode)
4788 emit_block_move (target, temp, expr_size (exp),
4790 ? BLOCK_OP_CALL_PARM
4791 : BLOCK_OP_NORMAL));
4792 else if (GET_MODE (target) == BLKmode)
4793 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4794 0, GET_MODE (temp), temp);
4796 convert_move (target, temp, unsignedp);
4799 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4801 /* Handle copying a string constant into an array. The string
4802 constant may be shorter than the array. So copy just the string's
4803 actual length, and clear the rest. First get the size of the data
4804 type of the string, which is actually the size of the target. */
4805 rtx size = expr_size (exp);
4807 if (CONST_INT_P (size)
4808 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4809 emit_block_move (target, temp, size,
4811 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4814 enum machine_mode pointer_mode
4815 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4816 enum machine_mode address_mode
4817 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4819 /* Compute the size of the data to copy from the string. */
4821 = size_binop_loc (loc, MIN_EXPR,
4822 make_tree (sizetype, size),
4823 size_int (TREE_STRING_LENGTH (exp)));
4825 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4827 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4830 /* Copy that much. */
4831 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4832 TYPE_UNSIGNED (sizetype));
4833 emit_block_move (target, temp, copy_size_rtx,
4835 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4837 /* Figure out how much is left in TARGET that we have to clear.
4838 Do all calculations in pointer_mode. */
4839 if (CONST_INT_P (copy_size_rtx))
4841 size = plus_constant (size, -INTVAL (copy_size_rtx));
4842 target = adjust_address (target, BLKmode,
4843 INTVAL (copy_size_rtx));
4847 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4848 copy_size_rtx, NULL_RTX, 0,
4851 if (GET_MODE (copy_size_rtx) != address_mode)
4852 copy_size_rtx = convert_to_mode (address_mode,
4854 TYPE_UNSIGNED (sizetype));
4856 target = offset_address (target, copy_size_rtx,
4857 highest_pow2_factor (copy_size));
4858 label = gen_label_rtx ();
4859 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4860 GET_MODE (size), 0, label);
4863 if (size != const0_rtx)
4864 clear_storage (target, size, BLOCK_OP_NORMAL);
4870 /* Handle calls that return values in multiple non-contiguous locations.
4871 The Irix 6 ABI has examples of this. */
4872 else if (GET_CODE (target) == PARALLEL)
4873 emit_group_load (target, temp, TREE_TYPE (exp),
4874 int_size_in_bytes (TREE_TYPE (exp)));
4875 else if (GET_MODE (temp) == BLKmode)
4876 emit_block_move (target, temp, expr_size (exp),
4878 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4879 else if (nontemporal
4880 && emit_storent_insn (target, temp))
4881 /* If we managed to emit a nontemporal store, there is nothing else to
4886 temp = force_operand (temp, target);
4888 emit_move_insn (target, temp);
4895 /* Helper for categorize_ctor_elements. Identical interface. */
4898 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4899 HOST_WIDE_INT *p_elt_count,
4902 unsigned HOST_WIDE_INT idx;
4903 HOST_WIDE_INT nz_elts, elt_count;
4904 tree value, purpose;
4906 /* Whether CTOR is a valid constant initializer, in accordance with what
4907 initializer_constant_valid_p does. If inferred from the constructor
4908 elements, true until proven otherwise. */
4909 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4910 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4915 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4917 HOST_WIDE_INT mult = 1;
4919 if (TREE_CODE (purpose) == RANGE_EXPR)
4921 tree lo_index = TREE_OPERAND (purpose, 0);
4922 tree hi_index = TREE_OPERAND (purpose, 1);
4924 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4925 mult = (tree_low_cst (hi_index, 1)
4926 - tree_low_cst (lo_index, 1) + 1);
4929 switch (TREE_CODE (value))
4933 HOST_WIDE_INT nz = 0, ic = 0;
4936 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4938 nz_elts += mult * nz;
4939 elt_count += mult * ic;
4941 if (const_from_elts_p && const_p)
4942 const_p = const_elt_p;
4949 if (!initializer_zerop (value))
4955 nz_elts += mult * TREE_STRING_LENGTH (value);
4956 elt_count += mult * TREE_STRING_LENGTH (value);
4960 if (!initializer_zerop (TREE_REALPART (value)))
4962 if (!initializer_zerop (TREE_IMAGPART (value)))
4970 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4972 if (!initializer_zerop (TREE_VALUE (v)))
4981 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4984 nz_elts += mult * tc;
4985 elt_count += mult * tc;
4987 if (const_from_elts_p && const_p)
4988 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4996 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4997 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
5000 bool clear_this = true;
5002 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
5004 /* We don't expect more than one element of the union to be
5005 initialized. Not sure what we should do otherwise... */
5006 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
5009 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
5010 CONSTRUCTOR_ELTS (ctor),
5013 /* ??? We could look at each element of the union, and find the
5014 largest element. Which would avoid comparing the size of the
5015 initialized element against any tail padding in the union.
5016 Doesn't seem worth the effort... */
5017 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
5018 TYPE_SIZE (init_sub_type)) == 1)
5020 /* And now we have to find out if the element itself is fully
5021 constructed. E.g. for union { struct { int a, b; } s; } u
5022 = { .s = { .a = 1 } }. */
5023 if (elt_count == count_type_elements (init_sub_type, false))
5028 *p_must_clear = clear_this;
5031 *p_nz_elts += nz_elts;
5032 *p_elt_count += elt_count;
5037 /* Examine CTOR to discover:
5038 * how many scalar fields are set to nonzero values,
5039 and place it in *P_NZ_ELTS;
5040 * how many scalar fields in total are in CTOR,
5041 and place it in *P_ELT_COUNT.
5042 * if a type is a union, and the initializer from the constructor
5043 is not the largest element in the union, then set *p_must_clear.
5045 Return whether or not CTOR is a valid static constant initializer, the same
5046 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5049 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5050 HOST_WIDE_INT *p_elt_count,
5055 *p_must_clear = false;
5058 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5061 /* Count the number of scalars in TYPE. Return -1 on overflow or
5062 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5063 array member at the end of the structure. */
5066 count_type_elements (const_tree type, bool allow_flexarr)
5068 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5069 switch (TREE_CODE (type))
5073 tree telts = array_type_nelts (type);
5074 if (telts && host_integerp (telts, 1))
5076 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5077 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5080 else if (max / n > m)
5088 HOST_WIDE_INT n = 0, t;
5091 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5092 if (TREE_CODE (f) == FIELD_DECL)
5094 t = count_type_elements (TREE_TYPE (f), false);
5097 /* Check for structures with flexible array member. */
5098 tree tf = TREE_TYPE (f);
5100 && DECL_CHAIN (f) == NULL
5101 && TREE_CODE (tf) == ARRAY_TYPE
5103 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5104 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5105 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5106 && int_size_in_bytes (type) >= 0)
5118 case QUAL_UNION_TYPE:
5125 return TYPE_VECTOR_SUBPARTS (type);
5129 case FIXED_POINT_TYPE:
5134 case REFERENCE_TYPE:
5149 /* Return 1 if EXP contains mostly (3/4) zeros. */
5152 mostly_zeros_p (const_tree exp)
5154 if (TREE_CODE (exp) == CONSTRUCTOR)
5157 HOST_WIDE_INT nz_elts, count, elts;
5160 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5164 elts = count_type_elements (TREE_TYPE (exp), false);
5166 return nz_elts < elts / 4;
5169 return initializer_zerop (exp);
5172 /* Return 1 if EXP contains all zeros. */
5175 all_zeros_p (const_tree exp)
5177 if (TREE_CODE (exp) == CONSTRUCTOR)
5180 HOST_WIDE_INT nz_elts, count;
5183 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5184 return nz_elts == 0;
5187 return initializer_zerop (exp);
5190 /* Helper function for store_constructor.
5191 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5192 TYPE is the type of the CONSTRUCTOR, not the element type.
5193 CLEARED is as for store_constructor.
5194 ALIAS_SET is the alias set to use for any stores.
5196 This provides a recursive shortcut back to store_constructor when it isn't
5197 necessary to go through store_field. This is so that we can pass through
5198 the cleared field to let store_constructor know that we may not have to
5199 clear a substructure if the outer structure has already been cleared. */
5202 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5203 HOST_WIDE_INT bitpos, enum machine_mode mode,
5204 tree exp, tree type, int cleared,
5205 alias_set_type alias_set)
5207 if (TREE_CODE (exp) == CONSTRUCTOR
5208 /* We can only call store_constructor recursively if the size and
5209 bit position are on a byte boundary. */
5210 && bitpos % BITS_PER_UNIT == 0
5211 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5212 /* If we have a nonzero bitpos for a register target, then we just
5213 let store_field do the bitfield handling. This is unlikely to
5214 generate unnecessary clear instructions anyways. */
5215 && (bitpos == 0 || MEM_P (target)))
5219 = adjust_address (target,
5220 GET_MODE (target) == BLKmode
5222 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5223 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5226 /* Update the alias set, if required. */
5227 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5228 && MEM_ALIAS_SET (target) != 0)
5230 target = copy_rtx (target);
5231 set_mem_alias_set (target, alias_set);
5234 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5237 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5240 /* Store the value of constructor EXP into the rtx TARGET.
5241 TARGET is either a REG or a MEM; we know it cannot conflict, since
5242 safe_from_p has been called.
5243 CLEARED is true if TARGET is known to have been zero'd.
5244 SIZE is the number of bytes of TARGET we are allowed to modify: this
5245 may not be the same as the size of EXP if we are assigning to a field
5246 which has been packed to exclude padding bits. */
5249 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5251 tree type = TREE_TYPE (exp);
5252 #ifdef WORD_REGISTER_OPERATIONS
5253 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5256 switch (TREE_CODE (type))
5260 case QUAL_UNION_TYPE:
5262 unsigned HOST_WIDE_INT idx;
5265 /* If size is zero or the target is already cleared, do nothing. */
5266 if (size == 0 || cleared)
5268 /* We either clear the aggregate or indicate the value is dead. */
5269 else if ((TREE_CODE (type) == UNION_TYPE
5270 || TREE_CODE (type) == QUAL_UNION_TYPE)
5271 && ! CONSTRUCTOR_ELTS (exp))
5272 /* If the constructor is empty, clear the union. */
5274 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5278 /* If we are building a static constructor into a register,
5279 set the initial value as zero so we can fold the value into
5280 a constant. But if more than one register is involved,
5281 this probably loses. */
5282 else if (REG_P (target) && TREE_STATIC (exp)
5283 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5285 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5289 /* If the constructor has fewer fields than the structure or
5290 if we are initializing the structure to mostly zeros, clear
5291 the whole structure first. Don't do this if TARGET is a
5292 register whose mode size isn't equal to SIZE since
5293 clear_storage can't handle this case. */
5295 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5296 != fields_length (type))
5297 || mostly_zeros_p (exp))
5299 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5302 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5306 if (REG_P (target) && !cleared)
5307 emit_clobber (target);
5309 /* Store each element of the constructor into the
5310 corresponding field of TARGET. */
5311 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5313 enum machine_mode mode;
5314 HOST_WIDE_INT bitsize;
5315 HOST_WIDE_INT bitpos = 0;
5317 rtx to_rtx = target;
5319 /* Just ignore missing fields. We cleared the whole
5320 structure, above, if any fields are missing. */
5324 if (cleared && initializer_zerop (value))
5327 if (host_integerp (DECL_SIZE (field), 1))
5328 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5332 mode = DECL_MODE (field);
5333 if (DECL_BIT_FIELD (field))
5336 offset = DECL_FIELD_OFFSET (field);
5337 if (host_integerp (offset, 0)
5338 && host_integerp (bit_position (field), 0))
5340 bitpos = int_bit_position (field);
5344 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5348 enum machine_mode address_mode;
5352 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5353 make_tree (TREE_TYPE (exp),
5356 offset_rtx = expand_normal (offset);
5357 gcc_assert (MEM_P (to_rtx));
5360 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5361 if (GET_MODE (offset_rtx) != address_mode)
5362 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5364 to_rtx = offset_address (to_rtx, offset_rtx,
5365 highest_pow2_factor (offset));
5368 #ifdef WORD_REGISTER_OPERATIONS
5369 /* If this initializes a field that is smaller than a
5370 word, at the start of a word, try to widen it to a full
5371 word. This special case allows us to output C++ member
5372 function initializations in a form that the optimizers
5375 && bitsize < BITS_PER_WORD
5376 && bitpos % BITS_PER_WORD == 0
5377 && GET_MODE_CLASS (mode) == MODE_INT
5378 && TREE_CODE (value) == INTEGER_CST
5380 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5382 tree type = TREE_TYPE (value);
5384 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5386 type = lang_hooks.types.type_for_size
5387 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5388 value = fold_convert (type, value);
5391 if (BYTES_BIG_ENDIAN)
5393 = fold_build2 (LSHIFT_EXPR, type, value,
5394 build_int_cst (type,
5395 BITS_PER_WORD - bitsize));
5396 bitsize = BITS_PER_WORD;
5401 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5402 && DECL_NONADDRESSABLE_P (field))
5404 to_rtx = copy_rtx (to_rtx);
5405 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5408 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5409 value, type, cleared,
5410 get_alias_set (TREE_TYPE (field)));
5417 unsigned HOST_WIDE_INT i;
5420 tree elttype = TREE_TYPE (type);
5422 HOST_WIDE_INT minelt = 0;
5423 HOST_WIDE_INT maxelt = 0;
5425 domain = TYPE_DOMAIN (type);
5426 const_bounds_p = (TYPE_MIN_VALUE (domain)
5427 && TYPE_MAX_VALUE (domain)
5428 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5429 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5431 /* If we have constant bounds for the range of the type, get them. */
5434 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5435 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5438 /* If the constructor has fewer elements than the array, clear
5439 the whole array first. Similarly if this is static
5440 constructor of a non-BLKmode object. */
5443 else if (REG_P (target) && TREE_STATIC (exp))
5447 unsigned HOST_WIDE_INT idx;
5449 HOST_WIDE_INT count = 0, zero_count = 0;
5450 need_to_clear = ! const_bounds_p;
5452 /* This loop is a more accurate version of the loop in
5453 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5454 is also needed to check for missing elements. */
5455 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5457 HOST_WIDE_INT this_node_count;
5462 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5464 tree lo_index = TREE_OPERAND (index, 0);
5465 tree hi_index = TREE_OPERAND (index, 1);
5467 if (! host_integerp (lo_index, 1)
5468 || ! host_integerp (hi_index, 1))
5474 this_node_count = (tree_low_cst (hi_index, 1)
5475 - tree_low_cst (lo_index, 1) + 1);
5478 this_node_count = 1;
5480 count += this_node_count;
5481 if (mostly_zeros_p (value))
5482 zero_count += this_node_count;
5485 /* Clear the entire array first if there are any missing
5486 elements, or if the incidence of zero elements is >=
5489 && (count < maxelt - minelt + 1
5490 || 4 * zero_count >= 3 * count))
5494 if (need_to_clear && size > 0)
5497 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5499 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5503 if (!cleared && REG_P (target))
5504 /* Inform later passes that the old value is dead. */
5505 emit_clobber (target);
5507 /* Store each element of the constructor into the
5508 corresponding element of TARGET, determined by counting the
5510 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5512 enum machine_mode mode;
5513 HOST_WIDE_INT bitsize;
5514 HOST_WIDE_INT bitpos;
5515 rtx xtarget = target;
5517 if (cleared && initializer_zerop (value))
5520 mode = TYPE_MODE (elttype);
5521 if (mode == BLKmode)
5522 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5523 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5526 bitsize = GET_MODE_BITSIZE (mode);
5528 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5530 tree lo_index = TREE_OPERAND (index, 0);
5531 tree hi_index = TREE_OPERAND (index, 1);
5532 rtx index_r, pos_rtx;
5533 HOST_WIDE_INT lo, hi, count;
5536 /* If the range is constant and "small", unroll the loop. */
5538 && host_integerp (lo_index, 0)
5539 && host_integerp (hi_index, 0)
5540 && (lo = tree_low_cst (lo_index, 0),
5541 hi = tree_low_cst (hi_index, 0),
5542 count = hi - lo + 1,
5545 || (host_integerp (TYPE_SIZE (elttype), 1)
5546 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5549 lo -= minelt; hi -= minelt;
5550 for (; lo <= hi; lo++)
5552 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5555 && !MEM_KEEP_ALIAS_SET_P (target)
5556 && TREE_CODE (type) == ARRAY_TYPE
5557 && TYPE_NONALIASED_COMPONENT (type))
5559 target = copy_rtx (target);
5560 MEM_KEEP_ALIAS_SET_P (target) = 1;
5563 store_constructor_field
5564 (target, bitsize, bitpos, mode, value, type, cleared,
5565 get_alias_set (elttype));
5570 rtx loop_start = gen_label_rtx ();
5571 rtx loop_end = gen_label_rtx ();
5574 expand_normal (hi_index);
5576 index = build_decl (EXPR_LOCATION (exp),
5577 VAR_DECL, NULL_TREE, domain);
5578 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5579 SET_DECL_RTL (index, index_r);
5580 store_expr (lo_index, index_r, 0, false);
5582 /* Build the head of the loop. */
5583 do_pending_stack_adjust ();
5584 emit_label (loop_start);
5586 /* Assign value to element index. */
5588 fold_convert (ssizetype,
5589 fold_build2 (MINUS_EXPR,
5592 TYPE_MIN_VALUE (domain)));
5595 size_binop (MULT_EXPR, position,
5596 fold_convert (ssizetype,
5597 TYPE_SIZE_UNIT (elttype)));
5599 pos_rtx = expand_normal (position);
5600 xtarget = offset_address (target, pos_rtx,
5601 highest_pow2_factor (position));
5602 xtarget = adjust_address (xtarget, mode, 0);
5603 if (TREE_CODE (value) == CONSTRUCTOR)
5604 store_constructor (value, xtarget, cleared,
5605 bitsize / BITS_PER_UNIT);
5607 store_expr (value, xtarget, 0, false);
5609 /* Generate a conditional jump to exit the loop. */
5610 exit_cond = build2 (LT_EXPR, integer_type_node,
5612 jumpif (exit_cond, loop_end, -1);
5614 /* Update the loop counter, and jump to the head of
5616 expand_assignment (index,
5617 build2 (PLUS_EXPR, TREE_TYPE (index),
5618 index, integer_one_node),
5621 emit_jump (loop_start);
5623 /* Build the end of the loop. */
5624 emit_label (loop_end);
5627 else if ((index != 0 && ! host_integerp (index, 0))
5628 || ! host_integerp (TYPE_SIZE (elttype), 1))
5633 index = ssize_int (1);
5636 index = fold_convert (ssizetype,
5637 fold_build2 (MINUS_EXPR,
5640 TYPE_MIN_VALUE (domain)));
5643 size_binop (MULT_EXPR, index,
5644 fold_convert (ssizetype,
5645 TYPE_SIZE_UNIT (elttype)));
5646 xtarget = offset_address (target,
5647 expand_normal (position),
5648 highest_pow2_factor (position));
5649 xtarget = adjust_address (xtarget, mode, 0);
5650 store_expr (value, xtarget, 0, false);
5655 bitpos = ((tree_low_cst (index, 0) - minelt)
5656 * tree_low_cst (TYPE_SIZE (elttype), 1));
5658 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5660 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5661 && TREE_CODE (type) == ARRAY_TYPE
5662 && TYPE_NONALIASED_COMPONENT (type))
5664 target = copy_rtx (target);
5665 MEM_KEEP_ALIAS_SET_P (target) = 1;
5667 store_constructor_field (target, bitsize, bitpos, mode, value,
5668 type, cleared, get_alias_set (elttype));
5676 unsigned HOST_WIDE_INT idx;
5677 constructor_elt *ce;
5681 tree elttype = TREE_TYPE (type);
5682 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5683 enum machine_mode eltmode = TYPE_MODE (elttype);
5684 HOST_WIDE_INT bitsize;
5685 HOST_WIDE_INT bitpos;
5686 rtvec vector = NULL;
5688 alias_set_type alias;
5690 gcc_assert (eltmode != BLKmode);
5692 n_elts = TYPE_VECTOR_SUBPARTS (type);
5693 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5695 enum machine_mode mode = GET_MODE (target);
5697 icode = (int) optab_handler (vec_init_optab, mode);
5698 if (icode != CODE_FOR_nothing)
5702 vector = rtvec_alloc (n_elts);
5703 for (i = 0; i < n_elts; i++)
5704 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5708 /* If the constructor has fewer elements than the vector,
5709 clear the whole array first. Similarly if this is static
5710 constructor of a non-BLKmode object. */
5713 else if (REG_P (target) && TREE_STATIC (exp))
5717 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5720 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5722 int n_elts_here = tree_low_cst
5723 (int_const_binop (TRUNC_DIV_EXPR,
5724 TYPE_SIZE (TREE_TYPE (value)),
5725 TYPE_SIZE (elttype), 0), 1);
5727 count += n_elts_here;
5728 if (mostly_zeros_p (value))
5729 zero_count += n_elts_here;
5732 /* Clear the entire vector first if there are any missing elements,
5733 or if the incidence of zero elements is >= 75%. */
5734 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5737 if (need_to_clear && size > 0 && !vector)
5740 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5742 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5746 /* Inform later passes that the old value is dead. */
5747 if (!cleared && !vector && REG_P (target))
5748 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5751 alias = MEM_ALIAS_SET (target);
5753 alias = get_alias_set (elttype);
5755 /* Store each element of the constructor into the corresponding
5756 element of TARGET, determined by counting the elements. */
5757 for (idx = 0, i = 0;
5758 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5759 idx++, i += bitsize / elt_size)
5761 HOST_WIDE_INT eltpos;
5762 tree value = ce->value;
5764 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5765 if (cleared && initializer_zerop (value))
5769 eltpos = tree_low_cst (ce->index, 1);
5775 /* Vector CONSTRUCTORs should only be built from smaller
5776 vectors in the case of BLKmode vectors. */
5777 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5778 RTVEC_ELT (vector, eltpos)
5779 = expand_normal (value);
5783 enum machine_mode value_mode =
5784 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5785 ? TYPE_MODE (TREE_TYPE (value))
5787 bitpos = eltpos * elt_size;
5788 store_constructor_field (target, bitsize, bitpos,
5789 value_mode, value, type,
5795 emit_insn (GEN_FCN (icode)
5797 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5806 /* Store the value of EXP (an expression tree)
5807 into a subfield of TARGET which has mode MODE and occupies
5808 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5809 If MODE is VOIDmode, it means that we are storing into a bit-field.
5811 Always return const0_rtx unless we have something particular to
5814 TYPE is the type of the underlying object,
5816 ALIAS_SET is the alias set for the destination. This value will
5817 (in general) be different from that for TARGET, since TARGET is a
5818 reference to the containing structure.
5820 If NONTEMPORAL is true, try generating a nontemporal store. */
5823 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5824 enum machine_mode mode, tree exp, tree type,
5825 alias_set_type alias_set, bool nontemporal)
5827 if (TREE_CODE (exp) == ERROR_MARK)
5830 /* If we have nothing to store, do nothing unless the expression has
5833 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5835 /* If we are storing into an unaligned field of an aligned union that is
5836 in a register, we may have the mode of TARGET being an integer mode but
5837 MODE == BLKmode. In that case, get an aligned object whose size and
5838 alignment are the same as TARGET and store TARGET into it (we can avoid
5839 the store if the field being stored is the entire width of TARGET). Then
5840 call ourselves recursively to store the field into a BLKmode version of
5841 that object. Finally, load from the object into TARGET. This is not
5842 very efficient in general, but should only be slightly more expensive
5843 than the otherwise-required unaligned accesses. Perhaps this can be
5844 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5845 twice, once with emit_move_insn and once via store_field. */
5848 && (REG_P (target) || GET_CODE (target) == SUBREG))
5850 rtx object = assign_temp (type, 0, 1, 1);
5851 rtx blk_object = adjust_address (object, BLKmode, 0);
5853 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5854 emit_move_insn (object, target);
5856 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5859 emit_move_insn (target, object);
5861 /* We want to return the BLKmode version of the data. */
5865 if (GET_CODE (target) == CONCAT)
5867 /* We're storing into a struct containing a single __complex. */
5869 gcc_assert (!bitpos);
5870 return store_expr (exp, target, 0, nontemporal);
5873 /* If the structure is in a register or if the component
5874 is a bit field, we cannot use addressing to access it.
5875 Use bit-field techniques or SUBREG to store in it. */
5877 if (mode == VOIDmode
5878 || (mode != BLKmode && ! direct_store[(int) mode]
5879 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5880 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5882 || GET_CODE (target) == SUBREG
5883 /* If the field isn't aligned enough to store as an ordinary memref,
5884 store it as a bit field. */
5886 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5887 || bitpos % GET_MODE_ALIGNMENT (mode))
5888 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5889 || (bitpos % BITS_PER_UNIT != 0)))
5890 /* If the RHS and field are a constant size and the size of the
5891 RHS isn't the same size as the bitfield, we must use bitfield
5894 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5895 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5896 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5897 decl we must use bitfield operations. */
5899 && TREE_CODE (exp) == MEM_REF
5900 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5901 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5902 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5903 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5908 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5909 implies a mask operation. If the precision is the same size as
5910 the field we're storing into, that mask is redundant. This is
5911 particularly common with bit field assignments generated by the
5913 nop_def = get_def_for_expr (exp, NOP_EXPR);
5916 tree type = TREE_TYPE (exp);
5917 if (INTEGRAL_TYPE_P (type)
5918 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5919 && bitsize == TYPE_PRECISION (type))
5921 tree op = gimple_assign_rhs1 (nop_def);
5922 type = TREE_TYPE (op);
5923 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5928 temp = expand_normal (exp);
5930 /* If BITSIZE is narrower than the size of the type of EXP
5931 we will be narrowing TEMP. Normally, what's wanted are the
5932 low-order bits. However, if EXP's type is a record and this is
5933 big-endian machine, we want the upper BITSIZE bits. */
5934 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5935 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5936 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5937 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5938 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5942 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5944 if (mode != VOIDmode && mode != BLKmode
5945 && mode != TYPE_MODE (TREE_TYPE (exp)))
5946 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5948 /* If the modes of TEMP and TARGET are both BLKmode, both
5949 must be in memory and BITPOS must be aligned on a byte
5950 boundary. If so, we simply do a block copy. Likewise
5951 for a BLKmode-like TARGET. */
5952 if (GET_MODE (temp) == BLKmode
5953 && (GET_MODE (target) == BLKmode
5955 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5956 && (bitpos % BITS_PER_UNIT) == 0
5957 && (bitsize % BITS_PER_UNIT) == 0)))
5959 gcc_assert (MEM_P (target) && MEM_P (temp)
5960 && (bitpos % BITS_PER_UNIT) == 0);
5962 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5963 emit_block_move (target, temp,
5964 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5971 /* Store the value in the bitfield. */
5972 store_bit_field (target, bitsize, bitpos, mode, temp);
5978 /* Now build a reference to just the desired component. */
5979 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5981 if (to_rtx == target)
5982 to_rtx = copy_rtx (to_rtx);
5984 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5985 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5986 set_mem_alias_set (to_rtx, alias_set);
5988 return store_expr (exp, to_rtx, 0, nontemporal);
5992 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5993 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5994 codes and find the ultimate containing object, which we return.
5996 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5997 bit position, and *PUNSIGNEDP to the signedness of the field.
5998 If the position of the field is variable, we store a tree
5999 giving the variable offset (in units) in *POFFSET.
6000 This offset is in addition to the bit position.
6001 If the position is not variable, we store 0 in *POFFSET.
6003 If any of the extraction expressions is volatile,
6004 we store 1 in *PVOLATILEP. Otherwise we don't change that.
6006 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6007 Otherwise, it is a mode that can be used to access the field.
6009 If the field describes a variable-sized object, *PMODE is set to
6010 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
6011 this case, but the address of the object can be found.
6013 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6014 look through nodes that serve as markers of a greater alignment than
6015 the one that can be deduced from the expression. These nodes make it
6016 possible for front-ends to prevent temporaries from being created by
6017 the middle-end on alignment considerations. For that purpose, the
6018 normal operating mode at high-level is to always pass FALSE so that
6019 the ultimate containing object is really returned; moreover, the
6020 associated predicate handled_component_p will always return TRUE
6021 on these nodes, thus indicating that they are essentially handled
6022 by get_inner_reference. TRUE should only be passed when the caller
6023 is scanning the expression in order to build another representation
6024 and specifically knows how to handle these nodes; as such, this is
6025 the normal operating mode in the RTL expanders. */
6028 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6029 HOST_WIDE_INT *pbitpos, tree *poffset,
6030 enum machine_mode *pmode, int *punsignedp,
6031 int *pvolatilep, bool keep_aligning)
6034 enum machine_mode mode = VOIDmode;
6035 bool blkmode_bitfield = false;
6036 tree offset = size_zero_node;
6037 double_int bit_offset = double_int_zero;
6039 /* First get the mode, signedness, and size. We do this from just the
6040 outermost expression. */
6042 if (TREE_CODE (exp) == COMPONENT_REF)
6044 tree field = TREE_OPERAND (exp, 1);
6045 size_tree = DECL_SIZE (field);
6046 if (!DECL_BIT_FIELD (field))
6047 mode = DECL_MODE (field);
6048 else if (DECL_MODE (field) == BLKmode)
6049 blkmode_bitfield = true;
6050 else if (TREE_THIS_VOLATILE (exp)
6051 && flag_strict_volatile_bitfields > 0)
6052 /* Volatile bitfields should be accessed in the mode of the
6053 field's type, not the mode computed based on the bit
6055 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6057 *punsignedp = DECL_UNSIGNED (field);
6059 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6061 size_tree = TREE_OPERAND (exp, 1);
6062 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6063 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6065 /* For vector types, with the correct size of access, use the mode of
6067 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6068 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6069 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6070 mode = TYPE_MODE (TREE_TYPE (exp));
6074 mode = TYPE_MODE (TREE_TYPE (exp));
6075 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6077 if (mode == BLKmode)
6078 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6080 *pbitsize = GET_MODE_BITSIZE (mode);
6085 if (! host_integerp (size_tree, 1))
6086 mode = BLKmode, *pbitsize = -1;
6088 *pbitsize = tree_low_cst (size_tree, 1);
6091 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6092 and find the ultimate containing object. */
6095 switch (TREE_CODE (exp))
6099 = double_int_add (bit_offset,
6100 tree_to_double_int (TREE_OPERAND (exp, 2)));
6105 tree field = TREE_OPERAND (exp, 1);
6106 tree this_offset = component_ref_field_offset (exp);
6108 /* If this field hasn't been filled in yet, don't go past it.
6109 This should only happen when folding expressions made during
6110 type construction. */
6111 if (this_offset == 0)
6114 offset = size_binop (PLUS_EXPR, offset, this_offset);
6115 bit_offset = double_int_add (bit_offset,
6117 (DECL_FIELD_BIT_OFFSET (field)));
6119 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6124 case ARRAY_RANGE_REF:
6126 tree index = TREE_OPERAND (exp, 1);
6127 tree low_bound = array_ref_low_bound (exp);
6128 tree unit_size = array_ref_element_size (exp);
6130 /* We assume all arrays have sizes that are a multiple of a byte.
6131 First subtract the lower bound, if any, in the type of the
6132 index, then convert to sizetype and multiply by the size of
6133 the array element. */
6134 if (! integer_zerop (low_bound))
6135 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6138 offset = size_binop (PLUS_EXPR, offset,
6139 size_binop (MULT_EXPR,
6140 fold_convert (sizetype, index),
6149 bit_offset = double_int_add (bit_offset,
6150 uhwi_to_double_int (*pbitsize));
6153 case VIEW_CONVERT_EXPR:
6154 if (keep_aligning && STRICT_ALIGNMENT
6155 && (TYPE_ALIGN (TREE_TYPE (exp))
6156 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6157 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6158 < BIGGEST_ALIGNMENT)
6159 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6160 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6165 /* Hand back the decl for MEM[&decl, off]. */
6166 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6168 tree off = TREE_OPERAND (exp, 1);
6169 if (!integer_zerop (off))
6171 double_int boff, coff = mem_ref_offset (exp);
6172 boff = double_int_lshift (coff,
6174 ? 3 : exact_log2 (BITS_PER_UNIT),
6175 HOST_BITS_PER_DOUBLE_INT, true);
6176 bit_offset = double_int_add (bit_offset, boff);
6178 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6186 /* If any reference in the chain is volatile, the effect is volatile. */
6187 if (TREE_THIS_VOLATILE (exp))
6190 exp = TREE_OPERAND (exp, 0);
6194 /* If OFFSET is constant, see if we can return the whole thing as a
6195 constant bit position. Make sure to handle overflow during
6197 if (host_integerp (offset, 0))
6199 double_int tem = double_int_lshift (tree_to_double_int (offset),
6201 ? 3 : exact_log2 (BITS_PER_UNIT),
6202 HOST_BITS_PER_DOUBLE_INT, true);
6203 tem = double_int_add (tem, bit_offset);
6204 if (double_int_fits_in_shwi_p (tem))
6206 *pbitpos = double_int_to_shwi (tem);
6207 *poffset = offset = NULL_TREE;
6211 /* Otherwise, split it up. */
6214 *pbitpos = double_int_to_shwi (bit_offset);
6218 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6219 if (mode == VOIDmode
6221 && (*pbitpos % BITS_PER_UNIT) == 0
6222 && (*pbitsize % BITS_PER_UNIT) == 0)
6230 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6231 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6232 EXP is marked as PACKED. */
6235 contains_packed_reference (const_tree exp)
6237 bool packed_p = false;
6241 switch (TREE_CODE (exp))
6245 tree field = TREE_OPERAND (exp, 1);
6246 packed_p = DECL_PACKED (field)
6247 || TYPE_PACKED (TREE_TYPE (field))
6248 || TYPE_PACKED (TREE_TYPE (exp));
6256 case ARRAY_RANGE_REF:
6259 case VIEW_CONVERT_EXPR:
6265 exp = TREE_OPERAND (exp, 0);
6271 /* Return a tree of sizetype representing the size, in bytes, of the element
6272 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6275 array_ref_element_size (tree exp)
6277 tree aligned_size = TREE_OPERAND (exp, 3);
6278 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6279 location_t loc = EXPR_LOCATION (exp);
6281 /* If a size was specified in the ARRAY_REF, it's the size measured
6282 in alignment units of the element type. So multiply by that value. */
6285 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6286 sizetype from another type of the same width and signedness. */
6287 if (TREE_TYPE (aligned_size) != sizetype)
6288 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6289 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6290 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6293 /* Otherwise, take the size from that of the element type. Substitute
6294 any PLACEHOLDER_EXPR that we have. */
6296 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6299 /* Return a tree representing the lower bound of the array mentioned in
6300 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6303 array_ref_low_bound (tree exp)
6305 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6307 /* If a lower bound is specified in EXP, use it. */
6308 if (TREE_OPERAND (exp, 2))
6309 return TREE_OPERAND (exp, 2);
6311 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6312 substituting for a PLACEHOLDER_EXPR as needed. */
6313 if (domain_type && TYPE_MIN_VALUE (domain_type))
6314 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6316 /* Otherwise, return a zero of the appropriate type. */
6317 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6320 /* Return a tree representing the upper bound of the array mentioned in
6321 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6324 array_ref_up_bound (tree exp)
6326 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6328 /* If there is a domain type and it has an upper bound, use it, substituting
6329 for a PLACEHOLDER_EXPR as needed. */
6330 if (domain_type && TYPE_MAX_VALUE (domain_type))
6331 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6333 /* Otherwise fail. */
6337 /* Return a tree representing the offset, in bytes, of the field referenced
6338 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6341 component_ref_field_offset (tree exp)
6343 tree aligned_offset = TREE_OPERAND (exp, 2);
6344 tree field = TREE_OPERAND (exp, 1);
6345 location_t loc = EXPR_LOCATION (exp);
6347 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6348 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6352 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6353 sizetype from another type of the same width and signedness. */
6354 if (TREE_TYPE (aligned_offset) != sizetype)
6355 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6356 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6357 size_int (DECL_OFFSET_ALIGN (field)
6361 /* Otherwise, take the offset from that of the field. Substitute
6362 any PLACEHOLDER_EXPR that we have. */
6364 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6367 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6369 static unsigned HOST_WIDE_INT
6370 target_align (const_tree target)
6372 /* We might have a chain of nested references with intermediate misaligning
6373 bitfields components, so need to recurse to find out. */
6375 unsigned HOST_WIDE_INT this_align, outer_align;
6377 switch (TREE_CODE (target))
6383 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6384 outer_align = target_align (TREE_OPERAND (target, 0));
6385 return MIN (this_align, outer_align);
6388 case ARRAY_RANGE_REF:
6389 this_align = TYPE_ALIGN (TREE_TYPE (target));
6390 outer_align = target_align (TREE_OPERAND (target, 0));
6391 return MIN (this_align, outer_align);
6394 case NON_LVALUE_EXPR:
6395 case VIEW_CONVERT_EXPR:
6396 this_align = TYPE_ALIGN (TREE_TYPE (target));
6397 outer_align = target_align (TREE_OPERAND (target, 0));
6398 return MAX (this_align, outer_align);
6401 return TYPE_ALIGN (TREE_TYPE (target));
6406 /* Given an rtx VALUE that may contain additions and multiplications, return
6407 an equivalent value that just refers to a register, memory, or constant.
6408 This is done by generating instructions to perform the arithmetic and
6409 returning a pseudo-register containing the value.
6411 The returned value may be a REG, SUBREG, MEM or constant. */
6414 force_operand (rtx value, rtx target)
6417 /* Use subtarget as the target for operand 0 of a binary operation. */
6418 rtx subtarget = get_subtarget (target);
6419 enum rtx_code code = GET_CODE (value);
6421 /* Check for subreg applied to an expression produced by loop optimizer. */
6423 && !REG_P (SUBREG_REG (value))
6424 && !MEM_P (SUBREG_REG (value)))
6427 = simplify_gen_subreg (GET_MODE (value),
6428 force_reg (GET_MODE (SUBREG_REG (value)),
6429 force_operand (SUBREG_REG (value),
6431 GET_MODE (SUBREG_REG (value)),
6432 SUBREG_BYTE (value));
6433 code = GET_CODE (value);
6436 /* Check for a PIC address load. */
6437 if ((code == PLUS || code == MINUS)
6438 && XEXP (value, 0) == pic_offset_table_rtx
6439 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6440 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6441 || GET_CODE (XEXP (value, 1)) == CONST))
6444 subtarget = gen_reg_rtx (GET_MODE (value));
6445 emit_move_insn (subtarget, value);
6449 if (ARITHMETIC_P (value))
6451 op2 = XEXP (value, 1);
6452 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6454 if (code == MINUS && CONST_INT_P (op2))
6457 op2 = negate_rtx (GET_MODE (value), op2);
6460 /* Check for an addition with OP2 a constant integer and our first
6461 operand a PLUS of a virtual register and something else. In that
6462 case, we want to emit the sum of the virtual register and the
6463 constant first and then add the other value. This allows virtual
6464 register instantiation to simply modify the constant rather than
6465 creating another one around this addition. */
6466 if (code == PLUS && CONST_INT_P (op2)
6467 && GET_CODE (XEXP (value, 0)) == PLUS
6468 && REG_P (XEXP (XEXP (value, 0), 0))
6469 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6470 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6472 rtx temp = expand_simple_binop (GET_MODE (value), code,
6473 XEXP (XEXP (value, 0), 0), op2,
6474 subtarget, 0, OPTAB_LIB_WIDEN);
6475 return expand_simple_binop (GET_MODE (value), code, temp,
6476 force_operand (XEXP (XEXP (value,
6478 target, 0, OPTAB_LIB_WIDEN);
6481 op1 = force_operand (XEXP (value, 0), subtarget);
6482 op2 = force_operand (op2, NULL_RTX);
6486 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6488 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6489 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6490 target, 1, OPTAB_LIB_WIDEN);
6492 return expand_divmod (0,
6493 FLOAT_MODE_P (GET_MODE (value))
6494 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6495 GET_MODE (value), op1, op2, target, 0);
6497 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6500 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6503 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6506 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6507 target, 0, OPTAB_LIB_WIDEN);
6509 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6510 target, 1, OPTAB_LIB_WIDEN);
6513 if (UNARY_P (value))
6516 target = gen_reg_rtx (GET_MODE (value));
6517 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6524 case FLOAT_TRUNCATE:
6525 convert_move (target, op1, code == ZERO_EXTEND);
6530 expand_fix (target, op1, code == UNSIGNED_FIX);
6534 case UNSIGNED_FLOAT:
6535 expand_float (target, op1, code == UNSIGNED_FLOAT);
6539 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6543 #ifdef INSN_SCHEDULING
6544 /* On machines that have insn scheduling, we want all memory reference to be
6545 explicit, so we need to deal with such paradoxical SUBREGs. */
6546 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6547 && (GET_MODE_SIZE (GET_MODE (value))
6548 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6550 = simplify_gen_subreg (GET_MODE (value),
6551 force_reg (GET_MODE (SUBREG_REG (value)),
6552 force_operand (SUBREG_REG (value),
6554 GET_MODE (SUBREG_REG (value)),
6555 SUBREG_BYTE (value));
6561 /* Subroutine of expand_expr: return nonzero iff there is no way that
6562 EXP can reference X, which is being modified. TOP_P is nonzero if this
6563 call is going to be used to determine whether we need a temporary
6564 for EXP, as opposed to a recursive call to this function.
6566 It is always safe for this routine to return zero since it merely
6567 searches for optimization opportunities. */
6570 safe_from_p (const_rtx x, tree exp, int top_p)
6576 /* If EXP has varying size, we MUST use a target since we currently
6577 have no way of allocating temporaries of variable size
6578 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6579 So we assume here that something at a higher level has prevented a
6580 clash. This is somewhat bogus, but the best we can do. Only
6581 do this when X is BLKmode and when we are at the top level. */
6582 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6583 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6584 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6585 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6586 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6588 && GET_MODE (x) == BLKmode)
6589 /* If X is in the outgoing argument area, it is always safe. */
6591 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6592 || (GET_CODE (XEXP (x, 0)) == PLUS
6593 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6596 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6597 find the underlying pseudo. */
6598 if (GET_CODE (x) == SUBREG)
6601 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6605 /* Now look at our tree code and possibly recurse. */
6606 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6608 case tcc_declaration:
6609 exp_rtl = DECL_RTL_IF_SET (exp);
6615 case tcc_exceptional:
6616 if (TREE_CODE (exp) == TREE_LIST)
6620 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6622 exp = TREE_CHAIN (exp);
6625 if (TREE_CODE (exp) != TREE_LIST)
6626 return safe_from_p (x, exp, 0);
6629 else if (TREE_CODE (exp) == CONSTRUCTOR)
6631 constructor_elt *ce;
6632 unsigned HOST_WIDE_INT idx;
6634 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6635 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6636 || !safe_from_p (x, ce->value, 0))
6640 else if (TREE_CODE (exp) == ERROR_MARK)
6641 return 1; /* An already-visited SAVE_EXPR? */
6646 /* The only case we look at here is the DECL_INITIAL inside a
6648 return (TREE_CODE (exp) != DECL_EXPR
6649 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6650 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6651 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6654 case tcc_comparison:
6655 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6660 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6662 case tcc_expression:
6665 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6666 the expression. If it is set, we conflict iff we are that rtx or
6667 both are in memory. Otherwise, we check all operands of the
6668 expression recursively. */
6670 switch (TREE_CODE (exp))
6673 /* If the operand is static or we are static, we can't conflict.
6674 Likewise if we don't conflict with the operand at all. */
6675 if (staticp (TREE_OPERAND (exp, 0))
6676 || TREE_STATIC (exp)
6677 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6680 /* Otherwise, the only way this can conflict is if we are taking
6681 the address of a DECL a that address if part of X, which is
6683 exp = TREE_OPERAND (exp, 0);
6686 if (!DECL_RTL_SET_P (exp)
6687 || !MEM_P (DECL_RTL (exp)))
6690 exp_rtl = XEXP (DECL_RTL (exp), 0);
6696 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6697 get_alias_set (exp)))
6702 /* Assume that the call will clobber all hard registers and
6704 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6709 case WITH_CLEANUP_EXPR:
6710 case CLEANUP_POINT_EXPR:
6711 /* Lowered by gimplify.c. */
6715 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6721 /* If we have an rtx, we do not need to scan our operands. */
6725 nops = TREE_OPERAND_LENGTH (exp);
6726 for (i = 0; i < nops; i++)
6727 if (TREE_OPERAND (exp, i) != 0
6728 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6734 /* Should never get a type here. */
6738 /* If we have an rtl, find any enclosed object. Then see if we conflict
6742 if (GET_CODE (exp_rtl) == SUBREG)
6744 exp_rtl = SUBREG_REG (exp_rtl);
6746 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6750 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6751 are memory and they conflict. */
6752 return ! (rtx_equal_p (x, exp_rtl)
6753 || (MEM_P (x) && MEM_P (exp_rtl)
6754 && true_dependence (exp_rtl, VOIDmode, x,
6755 rtx_addr_varies_p)));
6758 /* If we reach here, it is safe. */
6763 /* Return the highest power of two that EXP is known to be a multiple of.
6764 This is used in updating alignment of MEMs in array references. */
6766 unsigned HOST_WIDE_INT
6767 highest_pow2_factor (const_tree exp)
6769 unsigned HOST_WIDE_INT c0, c1;
6771 switch (TREE_CODE (exp))
6774 /* We can find the lowest bit that's a one. If the low
6775 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6776 We need to handle this case since we can find it in a COND_EXPR,
6777 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6778 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6780 if (TREE_OVERFLOW (exp))
6781 return BIGGEST_ALIGNMENT;
6784 /* Note: tree_low_cst is intentionally not used here,
6785 we don't care about the upper bits. */
6786 c0 = TREE_INT_CST_LOW (exp);
6788 return c0 ? c0 : BIGGEST_ALIGNMENT;
6792 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6793 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6794 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6795 return MIN (c0, c1);
6798 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6799 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6802 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6804 if (integer_pow2p (TREE_OPERAND (exp, 1))
6805 && host_integerp (TREE_OPERAND (exp, 1), 1))
6807 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6808 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6809 return MAX (1, c0 / c1);
6814 /* The highest power of two of a bit-and expression is the maximum of
6815 that of its operands. We typically get here for a complex LHS and
6816 a constant negative power of two on the RHS to force an explicit
6817 alignment, so don't bother looking at the LHS. */
6818 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6822 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6825 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6828 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6829 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6830 return MIN (c0, c1);
6839 /* Similar, except that the alignment requirements of TARGET are
6840 taken into account. Assume it is at least as aligned as its
6841 type, unless it is a COMPONENT_REF in which case the layout of
6842 the structure gives the alignment. */
6844 static unsigned HOST_WIDE_INT
6845 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6847 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6848 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6850 return MAX (factor, talign);
6853 /* Subroutine of expand_expr. Expand the two operands of a binary
6854 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6855 The value may be stored in TARGET if TARGET is nonzero. The
6856 MODIFIER argument is as documented by expand_expr. */
6859 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6860 enum expand_modifier modifier)
6862 if (! safe_from_p (target, exp1, 1))
6864 if (operand_equal_p (exp0, exp1, 0))
6866 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6867 *op1 = copy_rtx (*op0);
6871 /* If we need to preserve evaluation order, copy exp0 into its own
6872 temporary variable so that it can't be clobbered by exp1. */
6873 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6874 exp0 = save_expr (exp0);
6875 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6876 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6881 /* Return a MEM that contains constant EXP. DEFER is as for
6882 output_constant_def and MODIFIER is as for expand_expr. */
6885 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6889 mem = output_constant_def (exp, defer);
6890 if (modifier != EXPAND_INITIALIZER)
6891 mem = use_anchored_address (mem);
6895 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6896 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6899 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6900 enum expand_modifier modifier, addr_space_t as)
6902 rtx result, subtarget;
6904 HOST_WIDE_INT bitsize, bitpos;
6905 int volatilep, unsignedp;
6906 enum machine_mode mode1;
6908 /* If we are taking the address of a constant and are at the top level,
6909 we have to use output_constant_def since we can't call force_const_mem
6911 /* ??? This should be considered a front-end bug. We should not be
6912 generating ADDR_EXPR of something that isn't an LVALUE. The only
6913 exception here is STRING_CST. */
6914 if (CONSTANT_CLASS_P (exp))
6915 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6917 /* Everything must be something allowed by is_gimple_addressable. */
6918 switch (TREE_CODE (exp))
6921 /* This case will happen via recursion for &a->b. */
6922 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6926 tree tem = TREE_OPERAND (exp, 0);
6927 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6928 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6930 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6931 return expand_expr (tem, target, tmode, modifier);
6935 /* Expand the initializer like constants above. */
6936 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6939 /* The real part of the complex number is always first, therefore
6940 the address is the same as the address of the parent object. */
6943 inner = TREE_OPERAND (exp, 0);
6947 /* The imaginary part of the complex number is always second.
6948 The expression is therefore always offset by the size of the
6951 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6952 inner = TREE_OPERAND (exp, 0);
6956 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6957 expand_expr, as that can have various side effects; LABEL_DECLs for
6958 example, may not have their DECL_RTL set yet. Expand the rtl of
6959 CONSTRUCTORs too, which should yield a memory reference for the
6960 constructor's contents. Assume language specific tree nodes can
6961 be expanded in some interesting way. */
6962 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6964 || TREE_CODE (exp) == CONSTRUCTOR
6965 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6967 result = expand_expr (exp, target, tmode,
6968 modifier == EXPAND_INITIALIZER
6969 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6971 /* If the DECL isn't in memory, then the DECL wasn't properly
6972 marked TREE_ADDRESSABLE, which will be either a front-end
6973 or a tree optimizer bug. */
6974 gcc_assert (MEM_P (result));
6975 result = XEXP (result, 0);
6977 /* ??? Is this needed anymore? */
6978 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6980 assemble_external (exp);
6981 TREE_USED (exp) = 1;
6984 if (modifier != EXPAND_INITIALIZER
6985 && modifier != EXPAND_CONST_ADDRESS)
6986 result = force_operand (result, target);
6990 /* Pass FALSE as the last argument to get_inner_reference although
6991 we are expanding to RTL. The rationale is that we know how to
6992 handle "aligning nodes" here: we can just bypass them because
6993 they won't change the final object whose address will be returned
6994 (they actually exist only for that purpose). */
6995 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6996 &mode1, &unsignedp, &volatilep, false);
7000 /* We must have made progress. */
7001 gcc_assert (inner != exp);
7003 subtarget = offset || bitpos ? NULL_RTX : target;
7004 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7005 inner alignment, force the inner to be sufficiently aligned. */
7006 if (CONSTANT_CLASS_P (inner)
7007 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7009 inner = copy_node (inner);
7010 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7011 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7012 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7014 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7020 if (modifier != EXPAND_NORMAL)
7021 result = force_operand (result, NULL);
7022 tmp = expand_expr (offset, NULL_RTX, tmode,
7023 modifier == EXPAND_INITIALIZER
7024 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7026 result = convert_memory_address_addr_space (tmode, result, as);
7027 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7029 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7030 result = gen_rtx_PLUS (tmode, result, tmp);
7033 subtarget = bitpos ? NULL_RTX : target;
7034 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7035 1, OPTAB_LIB_WIDEN);
7041 /* Someone beforehand should have rejected taking the address
7042 of such an object. */
7043 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7045 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7046 if (modifier < EXPAND_SUM)
7047 result = force_operand (result, target);
7053 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7054 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7057 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7058 enum expand_modifier modifier)
7060 addr_space_t as = ADDR_SPACE_GENERIC;
7061 enum machine_mode address_mode = Pmode;
7062 enum machine_mode pointer_mode = ptr_mode;
7063 enum machine_mode rmode;
7066 /* Target mode of VOIDmode says "whatever's natural". */
7067 if (tmode == VOIDmode)
7068 tmode = TYPE_MODE (TREE_TYPE (exp));
7070 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7072 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7073 address_mode = targetm.addr_space.address_mode (as);
7074 pointer_mode = targetm.addr_space.pointer_mode (as);
7077 /* We can get called with some Weird Things if the user does silliness
7078 like "(short) &a". In that case, convert_memory_address won't do
7079 the right thing, so ignore the given target mode. */
7080 if (tmode != address_mode && tmode != pointer_mode)
7081 tmode = address_mode;
7083 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7084 tmode, modifier, as);
7086 /* Despite expand_expr claims concerning ignoring TMODE when not
7087 strictly convenient, stuff breaks if we don't honor it. Note
7088 that combined with the above, we only do this for pointer modes. */
7089 rmode = GET_MODE (result);
7090 if (rmode == VOIDmode)
7093 result = convert_memory_address_addr_space (tmode, result, as);
7098 /* Generate code for computing CONSTRUCTOR EXP.
7099 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7100 is TRUE, instead of creating a temporary variable in memory
7101 NULL is returned and the caller needs to handle it differently. */
7104 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7105 bool avoid_temp_mem)
7107 tree type = TREE_TYPE (exp);
7108 enum machine_mode mode = TYPE_MODE (type);
7110 /* Try to avoid creating a temporary at all. This is possible
7111 if all of the initializer is zero.
7112 FIXME: try to handle all [0..255] initializers we can handle
7114 if (TREE_STATIC (exp)
7115 && !TREE_ADDRESSABLE (exp)
7116 && target != 0 && mode == BLKmode
7117 && all_zeros_p (exp))
7119 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7123 /* All elts simple constants => refer to a constant in memory. But
7124 if this is a non-BLKmode mode, let it store a field at a time
7125 since that should make a CONST_INT or CONST_DOUBLE when we
7126 fold. Likewise, if we have a target we can use, it is best to
7127 store directly into the target unless the type is large enough
7128 that memcpy will be used. If we are making an initializer and
7129 all operands are constant, put it in memory as well.
7131 FIXME: Avoid trying to fill vector constructors piece-meal.
7132 Output them with output_constant_def below unless we're sure
7133 they're zeros. This should go away when vector initializers
7134 are treated like VECTOR_CST instead of arrays. */
7135 if ((TREE_STATIC (exp)
7136 && ((mode == BLKmode
7137 && ! (target != 0 && safe_from_p (target, exp, 1)))
7138 || TREE_ADDRESSABLE (exp)
7139 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7140 && (! MOVE_BY_PIECES_P
7141 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7143 && ! mostly_zeros_p (exp))))
7144 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7145 && TREE_CONSTANT (exp)))
7152 constructor = expand_expr_constant (exp, 1, modifier);
7154 if (modifier != EXPAND_CONST_ADDRESS
7155 && modifier != EXPAND_INITIALIZER
7156 && modifier != EXPAND_SUM)
7157 constructor = validize_mem (constructor);
7162 /* Handle calls that pass values in multiple non-contiguous
7163 locations. The Irix 6 ABI has examples of this. */
7164 if (target == 0 || ! safe_from_p (target, exp, 1)
7165 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7171 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7172 | (TREE_READONLY (exp)
7173 * TYPE_QUAL_CONST))),
7174 0, TREE_ADDRESSABLE (exp), 1);
7177 store_constructor (exp, target, 0, int_expr_size (exp));
7182 /* expand_expr: generate code for computing expression EXP.
7183 An rtx for the computed value is returned. The value is never null.
7184 In the case of a void EXP, const0_rtx is returned.
7186 The value may be stored in TARGET if TARGET is nonzero.
7187 TARGET is just a suggestion; callers must assume that
7188 the rtx returned may not be the same as TARGET.
7190 If TARGET is CONST0_RTX, it means that the value will be ignored.
7192 If TMODE is not VOIDmode, it suggests generating the
7193 result in mode TMODE. But this is done only when convenient.
7194 Otherwise, TMODE is ignored and the value generated in its natural mode.
7195 TMODE is just a suggestion; callers must assume that
7196 the rtx returned may not have mode TMODE.
7198 Note that TARGET may have neither TMODE nor MODE. In that case, it
7199 probably will not be used.
7201 If MODIFIER is EXPAND_SUM then when EXP is an addition
7202 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7203 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7204 products as above, or REG or MEM, or constant.
7205 Ordinarily in such cases we would output mul or add instructions
7206 and then return a pseudo reg containing the sum.
7208 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7209 it also marks a label as absolutely required (it can't be dead).
7210 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7211 This is used for outputting expressions used in initializers.
7213 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7214 with a constant address even if that address is not normally legitimate.
7215 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7217 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7218 a call parameter. Such targets require special care as we haven't yet
7219 marked TARGET so that it's safe from being trashed by libcalls. We
7220 don't want to use TARGET for anything but the final result;
7221 Intermediate values must go elsewhere. Additionally, calls to
7222 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7224 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7225 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7226 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7227 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7231 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7232 enum expand_modifier modifier, rtx *alt_rtl)
7236 /* Handle ERROR_MARK before anybody tries to access its type. */
7237 if (TREE_CODE (exp) == ERROR_MARK
7238 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7240 ret = CONST0_RTX (tmode);
7241 return ret ? ret : const0_rtx;
7244 /* If this is an expression of some kind and it has an associated line
7245 number, then emit the line number before expanding the expression.
7247 We need to save and restore the file and line information so that
7248 errors discovered during expansion are emitted with the right
7249 information. It would be better of the diagnostic routines
7250 used the file/line information embedded in the tree nodes rather
7252 if (cfun && EXPR_HAS_LOCATION (exp))
7254 location_t saved_location = input_location;
7255 location_t saved_curr_loc = get_curr_insn_source_location ();
7256 tree saved_block = get_curr_insn_block ();
7257 input_location = EXPR_LOCATION (exp);
7258 set_curr_insn_source_location (input_location);
7260 /* Record where the insns produced belong. */
7261 set_curr_insn_block (TREE_BLOCK (exp));
7263 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7265 input_location = saved_location;
7266 set_curr_insn_block (saved_block);
7267 set_curr_insn_source_location (saved_curr_loc);
7271 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7278 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7279 enum expand_modifier modifier)
7281 rtx op0, op1, op2, temp;
7284 enum machine_mode mode;
7285 enum tree_code code = ops->code;
7287 rtx subtarget, original_target;
7289 bool reduce_bit_field;
7290 location_t loc = ops->location;
7291 tree treeop0, treeop1;
7292 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7293 ? reduce_to_bit_field_precision ((expr), \
7299 mode = TYPE_MODE (type);
7300 unsignedp = TYPE_UNSIGNED (type);
7305 /* We should be called only on simple (binary or unary) expressions,
7306 exactly those that are valid in gimple expressions that aren't
7307 GIMPLE_SINGLE_RHS (or invalid). */
7308 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7309 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7310 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7312 ignore = (target == const0_rtx
7313 || ((CONVERT_EXPR_CODE_P (code)
7314 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7315 && TREE_CODE (type) == VOID_TYPE));
7317 /* We should be called only if we need the result. */
7318 gcc_assert (!ignore);
7320 /* An operation in what may be a bit-field type needs the
7321 result to be reduced to the precision of the bit-field type,
7322 which is narrower than that of the type's mode. */
7323 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7324 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7326 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7329 /* Use subtarget as the target for operand 0 of a binary operation. */
7330 subtarget = get_subtarget (target);
7331 original_target = target;
7335 case NON_LVALUE_EXPR:
7338 if (treeop0 == error_mark_node)
7341 if (TREE_CODE (type) == UNION_TYPE)
7343 tree valtype = TREE_TYPE (treeop0);
7345 /* If both input and output are BLKmode, this conversion isn't doing
7346 anything except possibly changing memory attribute. */
7347 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7349 rtx result = expand_expr (treeop0, target, tmode,
7352 result = copy_rtx (result);
7353 set_mem_attributes (result, type, 0);
7359 if (TYPE_MODE (type) != BLKmode)
7360 target = gen_reg_rtx (TYPE_MODE (type));
7362 target = assign_temp (type, 0, 1, 1);
7366 /* Store data into beginning of memory target. */
7367 store_expr (treeop0,
7368 adjust_address (target, TYPE_MODE (valtype), 0),
7369 modifier == EXPAND_STACK_PARM,
7374 gcc_assert (REG_P (target));
7376 /* Store this field into a union of the proper type. */
7377 store_field (target,
7378 MIN ((int_size_in_bytes (TREE_TYPE
7381 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7382 0, TYPE_MODE (valtype), treeop0,
7386 /* Return the entire union. */
7390 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7392 op0 = expand_expr (treeop0, target, VOIDmode,
7395 /* If the signedness of the conversion differs and OP0 is
7396 a promoted SUBREG, clear that indication since we now
7397 have to do the proper extension. */
7398 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7399 && GET_CODE (op0) == SUBREG)
7400 SUBREG_PROMOTED_VAR_P (op0) = 0;
7402 return REDUCE_BIT_FIELD (op0);
7405 op0 = expand_expr (treeop0, NULL_RTX, mode,
7406 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7407 if (GET_MODE (op0) == mode)
7410 /* If OP0 is a constant, just convert it into the proper mode. */
7411 else if (CONSTANT_P (op0))
7413 tree inner_type = TREE_TYPE (treeop0);
7414 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7416 if (modifier == EXPAND_INITIALIZER)
7417 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7418 subreg_lowpart_offset (mode,
7421 op0= convert_modes (mode, inner_mode, op0,
7422 TYPE_UNSIGNED (inner_type));
7425 else if (modifier == EXPAND_INITIALIZER)
7426 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7428 else if (target == 0)
7429 op0 = convert_to_mode (mode, op0,
7430 TYPE_UNSIGNED (TREE_TYPE
7434 convert_move (target, op0,
7435 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7439 return REDUCE_BIT_FIELD (op0);
7441 case ADDR_SPACE_CONVERT_EXPR:
7443 tree treeop0_type = TREE_TYPE (treeop0);
7445 addr_space_t as_from;
7447 gcc_assert (POINTER_TYPE_P (type));
7448 gcc_assert (POINTER_TYPE_P (treeop0_type));
7450 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7451 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7453 /* Conversions between pointers to the same address space should
7454 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7455 gcc_assert (as_to != as_from);
7457 /* Ask target code to handle conversion between pointers
7458 to overlapping address spaces. */
7459 if (targetm.addr_space.subset_p (as_to, as_from)
7460 || targetm.addr_space.subset_p (as_from, as_to))
7462 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7463 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7468 /* For disjoint address spaces, converting anything but
7469 a null pointer invokes undefined behaviour. We simply
7470 always return a null pointer here. */
7471 return CONST0_RTX (mode);
7474 case POINTER_PLUS_EXPR:
7475 /* Even though the sizetype mode and the pointer's mode can be different
7476 expand is able to handle this correctly and get the correct result out
7477 of the PLUS_EXPR code. */
7478 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7479 if sizetype precision is smaller than pointer precision. */
7480 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7481 treeop1 = fold_convert_loc (loc, type,
7482 fold_convert_loc (loc, ssizetype,
7485 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7486 something else, make sure we add the register to the constant and
7487 then to the other thing. This case can occur during strength
7488 reduction and doing it this way will produce better code if the
7489 frame pointer or argument pointer is eliminated.
7491 fold-const.c will ensure that the constant is always in the inner
7492 PLUS_EXPR, so the only case we need to do anything about is if
7493 sp, ap, or fp is our second argument, in which case we must swap
7494 the innermost first argument and our second argument. */
7496 if (TREE_CODE (treeop0) == PLUS_EXPR
7497 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7498 && TREE_CODE (treeop1) == VAR_DECL
7499 && (DECL_RTL (treeop1) == frame_pointer_rtx
7500 || DECL_RTL (treeop1) == stack_pointer_rtx
7501 || DECL_RTL (treeop1) == arg_pointer_rtx))
7505 treeop1 = TREE_OPERAND (treeop0, 0);
7506 TREE_OPERAND (treeop0, 0) = t;
7509 /* If the result is to be ptr_mode and we are adding an integer to
7510 something, we might be forming a constant. So try to use
7511 plus_constant. If it produces a sum and we can't accept it,
7512 use force_operand. This allows P = &ARR[const] to generate
7513 efficient code on machines where a SYMBOL_REF is not a valid
7516 If this is an EXPAND_SUM call, always return the sum. */
7517 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7518 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7520 if (modifier == EXPAND_STACK_PARM)
7522 if (TREE_CODE (treeop0) == INTEGER_CST
7523 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7524 && TREE_CONSTANT (treeop1))
7528 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7530 /* Use immed_double_const to ensure that the constant is
7531 truncated according to the mode of OP1, then sign extended
7532 to a HOST_WIDE_INT. Using the constant directly can result
7533 in non-canonical RTL in a 64x32 cross compile. */
7535 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7537 TYPE_MODE (TREE_TYPE (treeop1)));
7538 op1 = plus_constant (op1, INTVAL (constant_part));
7539 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7540 op1 = force_operand (op1, target);
7541 return REDUCE_BIT_FIELD (op1);
7544 else if (TREE_CODE (treeop1) == INTEGER_CST
7545 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7546 && TREE_CONSTANT (treeop0))
7550 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7551 (modifier == EXPAND_INITIALIZER
7552 ? EXPAND_INITIALIZER : EXPAND_SUM));
7553 if (! CONSTANT_P (op0))
7555 op1 = expand_expr (treeop1, NULL_RTX,
7556 VOIDmode, modifier);
7557 /* Return a PLUS if modifier says it's OK. */
7558 if (modifier == EXPAND_SUM
7559 || modifier == EXPAND_INITIALIZER)
7560 return simplify_gen_binary (PLUS, mode, op0, op1);
7563 /* Use immed_double_const to ensure that the constant is
7564 truncated according to the mode of OP1, then sign extended
7565 to a HOST_WIDE_INT. Using the constant directly can result
7566 in non-canonical RTL in a 64x32 cross compile. */
7568 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7570 TYPE_MODE (TREE_TYPE (treeop0)));
7571 op0 = plus_constant (op0, INTVAL (constant_part));
7572 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7573 op0 = force_operand (op0, target);
7574 return REDUCE_BIT_FIELD (op0);
7578 /* Use TER to expand pointer addition of a negated value
7579 as pointer subtraction. */
7580 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7581 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7582 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7583 && TREE_CODE (treeop1) == SSA_NAME
7584 && TYPE_MODE (TREE_TYPE (treeop0))
7585 == TYPE_MODE (TREE_TYPE (treeop1)))
7587 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7590 treeop1 = gimple_assign_rhs1 (def);
7596 /* No sense saving up arithmetic to be done
7597 if it's all in the wrong mode to form part of an address.
7598 And force_operand won't know whether to sign-extend or
7600 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7601 || mode != ptr_mode)
7603 expand_operands (treeop0, treeop1,
7604 subtarget, &op0, &op1, EXPAND_NORMAL);
7605 if (op0 == const0_rtx)
7607 if (op1 == const0_rtx)
7612 expand_operands (treeop0, treeop1,
7613 subtarget, &op0, &op1, modifier);
7614 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7618 /* For initializers, we are allowed to return a MINUS of two
7619 symbolic constants. Here we handle all cases when both operands
7621 /* Handle difference of two symbolic constants,
7622 for the sake of an initializer. */
7623 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7624 && really_constant_p (treeop0)
7625 && really_constant_p (treeop1))
7627 expand_operands (treeop0, treeop1,
7628 NULL_RTX, &op0, &op1, modifier);
7630 /* If the last operand is a CONST_INT, use plus_constant of
7631 the negated constant. Else make the MINUS. */
7632 if (CONST_INT_P (op1))
7633 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7635 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7638 /* No sense saving up arithmetic to be done
7639 if it's all in the wrong mode to form part of an address.
7640 And force_operand won't know whether to sign-extend or
7642 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7643 || mode != ptr_mode)
7646 expand_operands (treeop0, treeop1,
7647 subtarget, &op0, &op1, modifier);
7649 /* Convert A - const to A + (-const). */
7650 if (CONST_INT_P (op1))
7652 op1 = negate_rtx (mode, op1);
7653 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7658 case WIDEN_MULT_PLUS_EXPR:
7659 case WIDEN_MULT_MINUS_EXPR:
7660 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7661 op2 = expand_normal (ops->op2);
7662 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7666 case WIDEN_MULT_EXPR:
7667 /* If first operand is constant, swap them.
7668 Thus the following special case checks need only
7669 check the second operand. */
7670 if (TREE_CODE (treeop0) == INTEGER_CST)
7677 /* First, check if we have a multiplication of one signed and one
7678 unsigned operand. */
7679 if (TREE_CODE (treeop1) != INTEGER_CST
7680 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7681 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7683 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7684 this_optab = usmul_widen_optab;
7685 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7687 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7689 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7690 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7693 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7699 /* Check for a multiplication with matching signedness. */
7700 else if ((TREE_CODE (treeop1) == INTEGER_CST
7701 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7702 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7703 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7705 tree op0type = TREE_TYPE (treeop0);
7706 enum machine_mode innermode = TYPE_MODE (op0type);
7707 bool zextend_p = TYPE_UNSIGNED (op0type);
7708 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7709 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7711 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7713 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7715 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7717 temp = expand_widening_mult (mode, op0, op1, target,
7718 unsignedp, this_optab);
7719 return REDUCE_BIT_FIELD (temp);
7721 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7722 && innermode == word_mode)
7725 op0 = expand_normal (treeop0);
7726 if (TREE_CODE (treeop1) == INTEGER_CST)
7727 op1 = convert_modes (innermode, mode,
7728 expand_normal (treeop1), unsignedp);
7730 op1 = expand_normal (treeop1);
7731 temp = expand_binop (mode, other_optab, op0, op1, target,
7732 unsignedp, OPTAB_LIB_WIDEN);
7733 hipart = gen_highpart (innermode, temp);
7734 htem = expand_mult_highpart_adjust (innermode, hipart,
7738 emit_move_insn (hipart, htem);
7739 return REDUCE_BIT_FIELD (temp);
7743 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7744 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7745 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7746 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7749 /* If this is a fixed-point operation, then we cannot use the code
7750 below because "expand_mult" doesn't support sat/no-sat fixed-point
7752 if (ALL_FIXED_POINT_MODE_P (mode))
7755 /* If first operand is constant, swap them.
7756 Thus the following special case checks need only
7757 check the second operand. */
7758 if (TREE_CODE (treeop0) == INTEGER_CST)
7765 /* Attempt to return something suitable for generating an
7766 indexed address, for machines that support that. */
7768 if (modifier == EXPAND_SUM && mode == ptr_mode
7769 && host_integerp (treeop1, 0))
7771 tree exp1 = treeop1;
7773 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7777 op0 = force_operand (op0, NULL_RTX);
7779 op0 = copy_to_mode_reg (mode, op0);
7781 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7782 gen_int_mode (tree_low_cst (exp1, 0),
7783 TYPE_MODE (TREE_TYPE (exp1)))));
7786 if (modifier == EXPAND_STACK_PARM)
7789 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7790 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7792 case TRUNC_DIV_EXPR:
7793 case FLOOR_DIV_EXPR:
7795 case ROUND_DIV_EXPR:
7796 case EXACT_DIV_EXPR:
7797 /* If this is a fixed-point operation, then we cannot use the code
7798 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7800 if (ALL_FIXED_POINT_MODE_P (mode))
7803 if (modifier == EXPAND_STACK_PARM)
7805 /* Possible optimization: compute the dividend with EXPAND_SUM
7806 then if the divisor is constant can optimize the case
7807 where some terms of the dividend have coeffs divisible by it. */
7808 expand_operands (treeop0, treeop1,
7809 subtarget, &op0, &op1, EXPAND_NORMAL);
7810 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7815 case TRUNC_MOD_EXPR:
7816 case FLOOR_MOD_EXPR:
7818 case ROUND_MOD_EXPR:
7819 if (modifier == EXPAND_STACK_PARM)
7821 expand_operands (treeop0, treeop1,
7822 subtarget, &op0, &op1, EXPAND_NORMAL);
7823 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7825 case FIXED_CONVERT_EXPR:
7826 op0 = expand_normal (treeop0);
7827 if (target == 0 || modifier == EXPAND_STACK_PARM)
7828 target = gen_reg_rtx (mode);
7830 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7831 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7832 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7833 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7835 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7838 case FIX_TRUNC_EXPR:
7839 op0 = expand_normal (treeop0);
7840 if (target == 0 || modifier == EXPAND_STACK_PARM)
7841 target = gen_reg_rtx (mode);
7842 expand_fix (target, op0, unsignedp);
7846 op0 = expand_normal (treeop0);
7847 if (target == 0 || modifier == EXPAND_STACK_PARM)
7848 target = gen_reg_rtx (mode);
7849 /* expand_float can't figure out what to do if FROM has VOIDmode.
7850 So give it the correct mode. With -O, cse will optimize this. */
7851 if (GET_MODE (op0) == VOIDmode)
7852 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7854 expand_float (target, op0,
7855 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7859 op0 = expand_expr (treeop0, subtarget,
7860 VOIDmode, EXPAND_NORMAL);
7861 if (modifier == EXPAND_STACK_PARM)
7863 temp = expand_unop (mode,
7864 optab_for_tree_code (NEGATE_EXPR, type,
7868 return REDUCE_BIT_FIELD (temp);
7871 op0 = expand_expr (treeop0, subtarget,
7872 VOIDmode, EXPAND_NORMAL);
7873 if (modifier == EXPAND_STACK_PARM)
7876 /* ABS_EXPR is not valid for complex arguments. */
7877 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7878 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7880 /* Unsigned abs is simply the operand. Testing here means we don't
7881 risk generating incorrect code below. */
7882 if (TYPE_UNSIGNED (type))
7885 return expand_abs (mode, op0, target, unsignedp,
7886 safe_from_p (target, treeop0, 1));
7890 target = original_target;
7892 || modifier == EXPAND_STACK_PARM
7893 || (MEM_P (target) && MEM_VOLATILE_P (target))
7894 || GET_MODE (target) != mode
7896 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7897 target = gen_reg_rtx (mode);
7898 expand_operands (treeop0, treeop1,
7899 target, &op0, &op1, EXPAND_NORMAL);
7901 /* First try to do it with a special MIN or MAX instruction.
7902 If that does not win, use a conditional jump to select the proper
7904 this_optab = optab_for_tree_code (code, type, optab_default);
7905 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7910 /* At this point, a MEM target is no longer useful; we will get better
7913 if (! REG_P (target))
7914 target = gen_reg_rtx (mode);
7916 /* If op1 was placed in target, swap op0 and op1. */
7917 if (target != op0 && target == op1)
7924 /* We generate better code and avoid problems with op1 mentioning
7925 target by forcing op1 into a pseudo if it isn't a constant. */
7926 if (! CONSTANT_P (op1))
7927 op1 = force_reg (mode, op1);
7930 enum rtx_code comparison_code;
7933 if (code == MAX_EXPR)
7934 comparison_code = unsignedp ? GEU : GE;
7936 comparison_code = unsignedp ? LEU : LE;
7938 /* Canonicalize to comparisons against 0. */
7939 if (op1 == const1_rtx)
7941 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7942 or (a != 0 ? a : 1) for unsigned.
7943 For MIN we are safe converting (a <= 1 ? a : 1)
7944 into (a <= 0 ? a : 1) */
7945 cmpop1 = const0_rtx;
7946 if (code == MAX_EXPR)
7947 comparison_code = unsignedp ? NE : GT;
7949 if (op1 == constm1_rtx && !unsignedp)
7951 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7952 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7953 cmpop1 = const0_rtx;
7954 if (code == MIN_EXPR)
7955 comparison_code = LT;
7957 #ifdef HAVE_conditional_move
7958 /* Use a conditional move if possible. */
7959 if (can_conditionally_move_p (mode))
7963 /* ??? Same problem as in expmed.c: emit_conditional_move
7964 forces a stack adjustment via compare_from_rtx, and we
7965 lose the stack adjustment if the sequence we are about
7966 to create is discarded. */
7967 do_pending_stack_adjust ();
7971 /* Try to emit the conditional move. */
7972 insn = emit_conditional_move (target, comparison_code,
7977 /* If we could do the conditional move, emit the sequence,
7981 rtx seq = get_insns ();
7987 /* Otherwise discard the sequence and fall back to code with
7993 emit_move_insn (target, op0);
7995 temp = gen_label_rtx ();
7996 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7997 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8000 emit_move_insn (target, op1);
8005 op0 = expand_expr (treeop0, subtarget,
8006 VOIDmode, EXPAND_NORMAL);
8007 if (modifier == EXPAND_STACK_PARM)
8009 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8013 /* ??? Can optimize bitwise operations with one arg constant.
8014 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8015 and (a bitwise1 b) bitwise2 b (etc)
8016 but that is probably not worth while. */
8018 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8019 boolean values when we want in all cases to compute both of them. In
8020 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8021 as actual zero-or-1 values and then bitwise anding. In cases where
8022 there cannot be any side effects, better code would be made by
8023 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8024 how to recognize those cases. */
8026 case TRUTH_AND_EXPR:
8027 code = BIT_AND_EXPR;
8032 code = BIT_IOR_EXPR;
8036 case TRUTH_XOR_EXPR:
8037 code = BIT_XOR_EXPR;
8043 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8044 || (GET_MODE_PRECISION (TYPE_MODE (type))
8045 == TYPE_PRECISION (type)));
8050 /* If this is a fixed-point operation, then we cannot use the code
8051 below because "expand_shift" doesn't support sat/no-sat fixed-point
8053 if (ALL_FIXED_POINT_MODE_P (mode))
8056 if (! safe_from_p (subtarget, treeop1, 1))
8058 if (modifier == EXPAND_STACK_PARM)
8060 op0 = expand_expr (treeop0, subtarget,
8061 VOIDmode, EXPAND_NORMAL);
8062 temp = expand_shift (code, mode, op0, treeop1, target,
8064 if (code == LSHIFT_EXPR)
8065 temp = REDUCE_BIT_FIELD (temp);
8068 /* Could determine the answer when only additive constants differ. Also,
8069 the addition of one can be handled by changing the condition. */
8076 case UNORDERED_EXPR:
8084 temp = do_store_flag (ops,
8085 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8086 tmode != VOIDmode ? tmode : mode);
8090 /* Use a compare and a jump for BLKmode comparisons, or for function
8091 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8094 || modifier == EXPAND_STACK_PARM
8095 || ! safe_from_p (target, treeop0, 1)
8096 || ! safe_from_p (target, treeop1, 1)
8097 /* Make sure we don't have a hard reg (such as function's return
8098 value) live across basic blocks, if not optimizing. */
8099 || (!optimize && REG_P (target)
8100 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8101 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8103 emit_move_insn (target, const0_rtx);
8105 op1 = gen_label_rtx ();
8106 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8108 emit_move_insn (target, const1_rtx);
8113 case TRUTH_NOT_EXPR:
8114 if (modifier == EXPAND_STACK_PARM)
8116 op0 = expand_expr (treeop0, target,
8117 VOIDmode, EXPAND_NORMAL);
8118 /* The parser is careful to generate TRUTH_NOT_EXPR
8119 only with operands that are always zero or one. */
8120 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8121 target, 1, OPTAB_LIB_WIDEN);
8126 /* Get the rtx code of the operands. */
8127 op0 = expand_normal (treeop0);
8128 op1 = expand_normal (treeop1);
8131 target = gen_reg_rtx (TYPE_MODE (type));
8133 /* Move the real (op0) and imaginary (op1) parts to their location. */
8134 write_complex_part (target, op0, false);
8135 write_complex_part (target, op1, true);
8139 case WIDEN_SUM_EXPR:
8141 tree oprnd0 = treeop0;
8142 tree oprnd1 = treeop1;
8144 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8145 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8150 case REDUC_MAX_EXPR:
8151 case REDUC_MIN_EXPR:
8152 case REDUC_PLUS_EXPR:
8154 op0 = expand_normal (treeop0);
8155 this_optab = optab_for_tree_code (code, type, optab_default);
8156 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8161 case VEC_EXTRACT_EVEN_EXPR:
8162 case VEC_EXTRACT_ODD_EXPR:
8164 expand_operands (treeop0, treeop1,
8165 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8166 this_optab = optab_for_tree_code (code, type, optab_default);
8167 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8173 case VEC_INTERLEAVE_HIGH_EXPR:
8174 case VEC_INTERLEAVE_LOW_EXPR:
8176 expand_operands (treeop0, treeop1,
8177 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8178 this_optab = optab_for_tree_code (code, type, optab_default);
8179 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8185 case VEC_LSHIFT_EXPR:
8186 case VEC_RSHIFT_EXPR:
8188 target = expand_vec_shift_expr (ops, target);
8192 case VEC_UNPACK_HI_EXPR:
8193 case VEC_UNPACK_LO_EXPR:
8195 op0 = expand_normal (treeop0);
8196 this_optab = optab_for_tree_code (code, type, optab_default);
8197 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8203 case VEC_UNPACK_FLOAT_HI_EXPR:
8204 case VEC_UNPACK_FLOAT_LO_EXPR:
8206 op0 = expand_normal (treeop0);
8207 /* The signedness is determined from input operand. */
8208 this_optab = optab_for_tree_code (code,
8209 TREE_TYPE (treeop0),
8211 temp = expand_widen_pattern_expr
8212 (ops, op0, NULL_RTX, NULL_RTX,
8213 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8219 case VEC_WIDEN_MULT_HI_EXPR:
8220 case VEC_WIDEN_MULT_LO_EXPR:
8222 tree oprnd0 = treeop0;
8223 tree oprnd1 = treeop1;
8225 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8226 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8228 gcc_assert (target);
8232 case VEC_PACK_TRUNC_EXPR:
8233 case VEC_PACK_SAT_EXPR:
8234 case VEC_PACK_FIX_TRUNC_EXPR:
8235 mode = TYPE_MODE (TREE_TYPE (treeop0));
8242 /* Here to do an ordinary binary operator. */
8244 expand_operands (treeop0, treeop1,
8245 subtarget, &op0, &op1, EXPAND_NORMAL);
8247 this_optab = optab_for_tree_code (code, type, optab_default);
8249 if (modifier == EXPAND_STACK_PARM)
8251 temp = expand_binop (mode, this_optab, op0, op1, target,
8252 unsignedp, OPTAB_LIB_WIDEN);
8254 return REDUCE_BIT_FIELD (temp);
8256 #undef REDUCE_BIT_FIELD
8259 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8260 enum expand_modifier modifier, rtx *alt_rtl)
8262 rtx op0, op1, temp, decl_rtl;
8265 enum machine_mode mode;
8266 enum tree_code code = TREE_CODE (exp);
8268 rtx subtarget, original_target;
8271 bool reduce_bit_field;
8272 location_t loc = EXPR_LOCATION (exp);
8273 struct separate_ops ops;
8274 tree treeop0, treeop1, treeop2;
8275 tree ssa_name = NULL_TREE;
8278 type = TREE_TYPE (exp);
8279 mode = TYPE_MODE (type);
8280 unsignedp = TYPE_UNSIGNED (type);
8282 treeop0 = treeop1 = treeop2 = NULL_TREE;
8283 if (!VL_EXP_CLASS_P (exp))
8284 switch (TREE_CODE_LENGTH (code))
8287 case 3: treeop2 = TREE_OPERAND (exp, 2);
8288 case 2: treeop1 = TREE_OPERAND (exp, 1);
8289 case 1: treeop0 = TREE_OPERAND (exp, 0);
8299 ignore = (target == const0_rtx
8300 || ((CONVERT_EXPR_CODE_P (code)
8301 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8302 && TREE_CODE (type) == VOID_TYPE));
8304 /* An operation in what may be a bit-field type needs the
8305 result to be reduced to the precision of the bit-field type,
8306 which is narrower than that of the type's mode. */
8307 reduce_bit_field = (!ignore
8308 && TREE_CODE (type) == INTEGER_TYPE
8309 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8311 /* If we are going to ignore this result, we need only do something
8312 if there is a side-effect somewhere in the expression. If there
8313 is, short-circuit the most common cases here. Note that we must
8314 not call expand_expr with anything but const0_rtx in case this
8315 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8319 if (! TREE_SIDE_EFFECTS (exp))
8322 /* Ensure we reference a volatile object even if value is ignored, but
8323 don't do this if all we are doing is taking its address. */
8324 if (TREE_THIS_VOLATILE (exp)
8325 && TREE_CODE (exp) != FUNCTION_DECL
8326 && mode != VOIDmode && mode != BLKmode
8327 && modifier != EXPAND_CONST_ADDRESS)
8329 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8331 temp = copy_to_reg (temp);
8335 if (TREE_CODE_CLASS (code) == tcc_unary
8336 || code == COMPONENT_REF || code == INDIRECT_REF)
8337 return expand_expr (treeop0, const0_rtx, VOIDmode,
8340 else if (TREE_CODE_CLASS (code) == tcc_binary
8341 || TREE_CODE_CLASS (code) == tcc_comparison
8342 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8344 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8345 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8348 else if (code == BIT_FIELD_REF)
8350 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8351 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8352 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8359 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8362 /* Use subtarget as the target for operand 0 of a binary operation. */
8363 subtarget = get_subtarget (target);
8364 original_target = target;
8370 tree function = decl_function_context (exp);
8372 temp = label_rtx (exp);
8373 temp = gen_rtx_LABEL_REF (Pmode, temp);
8375 if (function != current_function_decl
8377 LABEL_REF_NONLOCAL_P (temp) = 1;
8379 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8384 /* ??? ivopts calls expander, without any preparation from
8385 out-of-ssa. So fake instructions as if this was an access to the
8386 base variable. This unnecessarily allocates a pseudo, see how we can
8387 reuse it, if partition base vars have it set already. */
8388 if (!currently_expanding_to_rtl)
8389 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8392 g = get_gimple_for_ssa_name (exp);
8394 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8398 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8399 exp = SSA_NAME_VAR (ssa_name);
8400 goto expand_decl_rtl;
8404 /* If a static var's type was incomplete when the decl was written,
8405 but the type is complete now, lay out the decl now. */
8406 if (DECL_SIZE (exp) == 0
8407 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8408 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8409 layout_decl (exp, 0);
8411 /* ... fall through ... */
8415 decl_rtl = DECL_RTL (exp);
8417 gcc_assert (decl_rtl);
8418 decl_rtl = copy_rtx (decl_rtl);
8419 /* Record writes to register variables. */
8420 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8421 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8423 int i = REGNO (decl_rtl);
8424 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8427 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8433 /* Ensure variable marked as used even if it doesn't go through
8434 a parser. If it hasn't be used yet, write out an external
8436 if (! TREE_USED (exp))
8438 assemble_external (exp);
8439 TREE_USED (exp) = 1;
8442 /* Show we haven't gotten RTL for this yet. */
8445 /* Variables inherited from containing functions should have
8446 been lowered by this point. */
8447 context = decl_function_context (exp);
8448 gcc_assert (!context
8449 || context == current_function_decl
8450 || TREE_STATIC (exp)
8451 || DECL_EXTERNAL (exp)
8452 /* ??? C++ creates functions that are not TREE_STATIC. */
8453 || TREE_CODE (exp) == FUNCTION_DECL);
8455 /* This is the case of an array whose size is to be determined
8456 from its initializer, while the initializer is still being parsed.
8459 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8460 temp = validize_mem (decl_rtl);
8462 /* If DECL_RTL is memory, we are in the normal case and the
8463 address is not valid, get the address into a register. */
8465 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8468 *alt_rtl = decl_rtl;
8469 decl_rtl = use_anchored_address (decl_rtl);
8470 if (modifier != EXPAND_CONST_ADDRESS
8471 && modifier != EXPAND_SUM
8472 && !memory_address_addr_space_p (DECL_MODE (exp),
8474 MEM_ADDR_SPACE (decl_rtl)))
8475 temp = replace_equiv_address (decl_rtl,
8476 copy_rtx (XEXP (decl_rtl, 0)));
8479 /* If we got something, return it. But first, set the alignment
8480 if the address is a register. */
8483 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8484 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8489 /* If the mode of DECL_RTL does not match that of the decl, it
8490 must be a promoted value. We return a SUBREG of the wanted mode,
8491 but mark it so that we know that it was already extended. */
8492 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8494 enum machine_mode pmode;
8496 /* Get the signedness to be used for this variable. Ensure we get
8497 the same mode we got when the variable was declared. */
8498 if (code == SSA_NAME
8499 && (g = SSA_NAME_DEF_STMT (ssa_name))
8500 && gimple_code (g) == GIMPLE_CALL)
8501 pmode = promote_function_mode (type, mode, &unsignedp,
8503 (TREE_TYPE (gimple_call_fn (g))),
8506 pmode = promote_decl_mode (exp, &unsignedp);
8507 gcc_assert (GET_MODE (decl_rtl) == pmode);
8509 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8510 SUBREG_PROMOTED_VAR_P (temp) = 1;
8511 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8518 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8519 TREE_INT_CST_HIGH (exp), mode);
8525 tree tmp = NULL_TREE;
8526 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8527 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8528 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8529 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8530 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8531 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8532 return const_vector_from_tree (exp);
8533 if (GET_MODE_CLASS (mode) == MODE_INT)
8535 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8537 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8540 tmp = build_constructor_from_list (type,
8541 TREE_VECTOR_CST_ELTS (exp));
8542 return expand_expr (tmp, ignore ? const0_rtx : target,
8547 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8550 /* If optimized, generate immediate CONST_DOUBLE
8551 which will be turned into memory by reload if necessary.
8553 We used to force a register so that loop.c could see it. But
8554 this does not allow gen_* patterns to perform optimizations with
8555 the constants. It also produces two insns in cases like "x = 1.0;".
8556 On most machines, floating-point constants are not permitted in
8557 many insns, so we'd end up copying it to a register in any case.
8559 Now, we do the copying in expand_binop, if appropriate. */
8560 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8561 TYPE_MODE (TREE_TYPE (exp)));
8564 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8565 TYPE_MODE (TREE_TYPE (exp)));
8568 /* Handle evaluating a complex constant in a CONCAT target. */
8569 if (original_target && GET_CODE (original_target) == CONCAT)
8571 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8574 rtarg = XEXP (original_target, 0);
8575 itarg = XEXP (original_target, 1);
8577 /* Move the real and imaginary parts separately. */
8578 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8579 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8582 emit_move_insn (rtarg, op0);
8584 emit_move_insn (itarg, op1);
8586 return original_target;
8589 /* ... fall through ... */
8592 temp = expand_expr_constant (exp, 1, modifier);
8594 /* temp contains a constant address.
8595 On RISC machines where a constant address isn't valid,
8596 make some insns to get that address into a register. */
8597 if (modifier != EXPAND_CONST_ADDRESS
8598 && modifier != EXPAND_INITIALIZER
8599 && modifier != EXPAND_SUM
8600 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8601 MEM_ADDR_SPACE (temp)))
8602 return replace_equiv_address (temp,
8603 copy_rtx (XEXP (temp, 0)));
8609 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8611 if (!SAVE_EXPR_RESOLVED_P (exp))
8613 /* We can indeed still hit this case, typically via builtin
8614 expanders calling save_expr immediately before expanding
8615 something. Assume this means that we only have to deal
8616 with non-BLKmode values. */
8617 gcc_assert (GET_MODE (ret) != BLKmode);
8619 val = build_decl (EXPR_LOCATION (exp),
8620 VAR_DECL, NULL, TREE_TYPE (exp));
8621 DECL_ARTIFICIAL (val) = 1;
8622 DECL_IGNORED_P (val) = 1;
8624 TREE_OPERAND (exp, 0) = treeop0;
8625 SAVE_EXPR_RESOLVED_P (exp) = 1;
8627 if (!CONSTANT_P (ret))
8628 ret = copy_to_reg (ret);
8629 SET_DECL_RTL (val, ret);
8637 /* If we don't need the result, just ensure we evaluate any
8641 unsigned HOST_WIDE_INT idx;
8644 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8645 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8650 return expand_constructor (exp, target, modifier, false);
8652 case TARGET_MEM_REF:
8654 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8655 struct mem_address addr;
8658 get_address_description (exp, &addr);
8659 op0 = addr_for_mem_ref (&addr, as, true);
8660 op0 = memory_address_addr_space (mode, op0, as);
8661 temp = gen_rtx_MEM (mode, op0);
8662 set_mem_attributes (temp, exp, 0);
8663 set_mem_addr_space (temp, as);
8664 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8665 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8667 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8668 /* If the target does not have special handling for unaligned
8669 loads of mode then it can use regular moves for them. */
8670 && ((icode = optab_handler (movmisalign_optab, mode))
8671 != CODE_FOR_nothing))
8675 /* We've already validated the memory, and we're creating a
8676 new pseudo destination. The predicates really can't fail. */
8677 reg = gen_reg_rtx (mode);
8679 /* Nor can the insn generator. */
8680 insn = GEN_FCN (icode) (reg, temp);
8681 gcc_assert (insn != NULL_RTX);
8692 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8693 enum machine_mode address_mode;
8694 tree base = TREE_OPERAND (exp, 0);
8697 /* Handle expansion of non-aliased memory with non-BLKmode. That
8698 might end up in a register. */
8699 if (TREE_CODE (base) == ADDR_EXPR)
8701 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8703 base = TREE_OPERAND (base, 0);
8707 base = get_addr_base_and_unit_offset (base, &off);
8711 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8712 decl we must use bitfield operations. */
8714 && !TREE_ADDRESSABLE (base)
8715 && DECL_MODE (base) != BLKmode
8716 && DECL_RTL_SET_P (base)
8717 && !MEM_P (DECL_RTL (base)))
8721 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8722 && (GET_MODE_BITSIZE (DECL_MODE (base))
8723 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8724 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8725 TREE_TYPE (exp), base),
8726 target, tmode, modifier);
8727 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8728 bftype = TREE_TYPE (base);
8729 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8730 bftype = TREE_TYPE (exp);
8731 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8733 TYPE_SIZE (TREE_TYPE (exp)),
8735 target, tmode, modifier);
8738 address_mode = targetm.addr_space.address_mode (as);
8739 base = TREE_OPERAND (exp, 0);
8740 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8742 tree mask = gimple_assign_rhs2 (def_stmt);
8743 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8744 gimple_assign_rhs1 (def_stmt), mask);
8745 TREE_OPERAND (exp, 0) = base;
8747 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8748 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8749 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8750 op0 = convert_memory_address_addr_space (address_mode, op0, as);
8751 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8754 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8755 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8757 op0 = memory_address_addr_space (mode, op0, as);
8758 temp = gen_rtx_MEM (mode, op0);
8759 set_mem_attributes (temp, exp, 0);
8760 set_mem_addr_space (temp, as);
8761 if (TREE_THIS_VOLATILE (exp))
8762 MEM_VOLATILE_P (temp) = 1;
8764 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8765 /* If the target does not have special handling for unaligned
8766 loads of mode then it can use regular moves for them. */
8767 && ((icode = optab_handler (movmisalign_optab, mode))
8768 != CODE_FOR_nothing))
8772 /* We've already validated the memory, and we're creating a
8773 new pseudo destination. The predicates really can't fail. */
8774 reg = gen_reg_rtx (mode);
8776 /* Nor can the insn generator. */
8777 insn = GEN_FCN (icode) (reg, temp);
8788 tree array = treeop0;
8789 tree index = treeop1;
8791 /* Fold an expression like: "foo"[2].
8792 This is not done in fold so it won't happen inside &.
8793 Don't fold if this is for wide characters since it's too
8794 difficult to do correctly and this is a very rare case. */
8796 if (modifier != EXPAND_CONST_ADDRESS
8797 && modifier != EXPAND_INITIALIZER
8798 && modifier != EXPAND_MEMORY)
8800 tree t = fold_read_from_constant_string (exp);
8803 return expand_expr (t, target, tmode, modifier);
8806 /* If this is a constant index into a constant array,
8807 just get the value from the array. Handle both the cases when
8808 we have an explicit constructor and when our operand is a variable
8809 that was declared const. */
8811 if (modifier != EXPAND_CONST_ADDRESS
8812 && modifier != EXPAND_INITIALIZER
8813 && modifier != EXPAND_MEMORY
8814 && TREE_CODE (array) == CONSTRUCTOR
8815 && ! TREE_SIDE_EFFECTS (array)
8816 && TREE_CODE (index) == INTEGER_CST)
8818 unsigned HOST_WIDE_INT ix;
8821 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8823 if (tree_int_cst_equal (field, index))
8825 if (!TREE_SIDE_EFFECTS (value))
8826 return expand_expr (fold (value), target, tmode, modifier);
8831 else if (optimize >= 1
8832 && modifier != EXPAND_CONST_ADDRESS
8833 && modifier != EXPAND_INITIALIZER
8834 && modifier != EXPAND_MEMORY
8835 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8836 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8837 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8838 && const_value_known_p (array))
8840 if (TREE_CODE (index) == INTEGER_CST)
8842 tree init = DECL_INITIAL (array);
8844 if (TREE_CODE (init) == CONSTRUCTOR)
8846 unsigned HOST_WIDE_INT ix;
8849 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8851 if (tree_int_cst_equal (field, index))
8853 if (TREE_SIDE_EFFECTS (value))
8856 if (TREE_CODE (value) == CONSTRUCTOR)
8858 /* If VALUE is a CONSTRUCTOR, this
8859 optimization is only useful if
8860 this doesn't store the CONSTRUCTOR
8861 into memory. If it does, it is more
8862 efficient to just load the data from
8863 the array directly. */
8864 rtx ret = expand_constructor (value, target,
8866 if (ret == NULL_RTX)
8870 return expand_expr (fold (value), target, tmode,
8874 else if(TREE_CODE (init) == STRING_CST)
8876 tree index1 = index;
8877 tree low_bound = array_ref_low_bound (exp);
8878 index1 = fold_convert_loc (loc, sizetype,
8881 /* Optimize the special-case of a zero lower bound.
8883 We convert the low_bound to sizetype to avoid some problems
8884 with constant folding. (E.g. suppose the lower bound is 1,
8885 and its mode is QI. Without the conversion,l (ARRAY
8886 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8887 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8889 if (! integer_zerop (low_bound))
8890 index1 = size_diffop_loc (loc, index1,
8891 fold_convert_loc (loc, sizetype,
8894 if (0 > compare_tree_int (index1,
8895 TREE_STRING_LENGTH (init)))
8897 tree type = TREE_TYPE (TREE_TYPE (init));
8898 enum machine_mode mode = TYPE_MODE (type);
8900 if (GET_MODE_CLASS (mode) == MODE_INT
8901 && GET_MODE_SIZE (mode) == 1)
8902 return gen_int_mode (TREE_STRING_POINTER (init)
8903 [TREE_INT_CST_LOW (index1)],
8910 goto normal_inner_ref;
8913 /* If the operand is a CONSTRUCTOR, we can just extract the
8914 appropriate field if it is present. */
8915 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8917 unsigned HOST_WIDE_INT idx;
8920 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8922 if (field == treeop1
8923 /* We can normally use the value of the field in the
8924 CONSTRUCTOR. However, if this is a bitfield in
8925 an integral mode that we can fit in a HOST_WIDE_INT,
8926 we must mask only the number of bits in the bitfield,
8927 since this is done implicitly by the constructor. If
8928 the bitfield does not meet either of those conditions,
8929 we can't do this optimization. */
8930 && (! DECL_BIT_FIELD (field)
8931 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8932 && (GET_MODE_BITSIZE (DECL_MODE (field))
8933 <= HOST_BITS_PER_WIDE_INT))))
8935 if (DECL_BIT_FIELD (field)
8936 && modifier == EXPAND_STACK_PARM)
8938 op0 = expand_expr (value, target, tmode, modifier);
8939 if (DECL_BIT_FIELD (field))
8941 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8942 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8944 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8946 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8947 op0 = expand_and (imode, op0, op1, target);
8952 = build_int_cst (NULL_TREE,
8953 GET_MODE_BITSIZE (imode) - bitsize);
8955 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8957 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8965 goto normal_inner_ref;
8968 case ARRAY_RANGE_REF:
8971 enum machine_mode mode1, mode2;
8972 HOST_WIDE_INT bitsize, bitpos;
8974 int volatilep = 0, must_force_mem;
8975 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8976 &mode1, &unsignedp, &volatilep, true);
8977 rtx orig_op0, memloc;
8979 /* If we got back the original object, something is wrong. Perhaps
8980 we are evaluating an expression too early. In any event, don't
8981 infinitely recurse. */
8982 gcc_assert (tem != exp);
8984 /* If TEM's type is a union of variable size, pass TARGET to the inner
8985 computation, since it will need a temporary and TARGET is known
8986 to have to do. This occurs in unchecked conversion in Ada. */
8989 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8990 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8992 && modifier != EXPAND_STACK_PARM
8993 ? target : NULL_RTX),
8995 (modifier == EXPAND_INITIALIZER
8996 || modifier == EXPAND_CONST_ADDRESS
8997 || modifier == EXPAND_STACK_PARM)
8998 ? modifier : EXPAND_NORMAL);
9001 /* If the bitfield is volatile, we want to access it in the
9002 field's mode, not the computed mode. */
9004 && GET_CODE (op0) == MEM
9005 && flag_strict_volatile_bitfields > 0)
9006 op0 = adjust_address (op0, mode1, 0);
9009 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9011 /* If we have either an offset, a BLKmode result, or a reference
9012 outside the underlying object, we must force it to memory.
9013 Such a case can occur in Ada if we have unchecked conversion
9014 of an expression from a scalar type to an aggregate type or
9015 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9016 passed a partially uninitialized object or a view-conversion
9017 to a larger size. */
9018 must_force_mem = (offset
9020 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9022 /* Handle CONCAT first. */
9023 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9026 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9029 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9032 op0 = XEXP (op0, 0);
9033 mode2 = GET_MODE (op0);
9035 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9036 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9040 op0 = XEXP (op0, 1);
9042 mode2 = GET_MODE (op0);
9045 /* Otherwise force into memory. */
9049 /* If this is a constant, put it in a register if it is a legitimate
9050 constant and we don't need a memory reference. */
9051 if (CONSTANT_P (op0)
9053 && LEGITIMATE_CONSTANT_P (op0)
9055 op0 = force_reg (mode2, op0);
9057 /* Otherwise, if this is a constant, try to force it to the constant
9058 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9059 is a legitimate constant. */
9060 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9061 op0 = validize_mem (memloc);
9063 /* Otherwise, if this is a constant or the object is not in memory
9064 and need be, put it there. */
9065 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9067 tree nt = build_qualified_type (TREE_TYPE (tem),
9068 (TYPE_QUALS (TREE_TYPE (tem))
9069 | TYPE_QUAL_CONST));
9070 memloc = assign_temp (nt, 1, 1, 1);
9071 emit_move_insn (memloc, op0);
9077 enum machine_mode address_mode;
9078 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9081 gcc_assert (MEM_P (op0));
9084 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9085 if (GET_MODE (offset_rtx) != address_mode)
9086 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9088 if (GET_MODE (op0) == BLKmode
9089 /* A constant address in OP0 can have VOIDmode, we must
9090 not try to call force_reg in that case. */
9091 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9093 && (bitpos % bitsize) == 0
9094 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9095 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9097 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9101 op0 = offset_address (op0, offset_rtx,
9102 highest_pow2_factor (offset));
9105 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9106 record its alignment as BIGGEST_ALIGNMENT. */
9107 if (MEM_P (op0) && bitpos == 0 && offset != 0
9108 && is_aligning_offset (offset, tem))
9109 set_mem_align (op0, BIGGEST_ALIGNMENT);
9111 /* Don't forget about volatility even if this is a bitfield. */
9112 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9114 if (op0 == orig_op0)
9115 op0 = copy_rtx (op0);
9117 MEM_VOLATILE_P (op0) = 1;
9120 /* In cases where an aligned union has an unaligned object
9121 as a field, we might be extracting a BLKmode value from
9122 an integer-mode (e.g., SImode) object. Handle this case
9123 by doing the extract into an object as wide as the field
9124 (which we know to be the width of a basic mode), then
9125 storing into memory, and changing the mode to BLKmode. */
9126 if (mode1 == VOIDmode
9127 || REG_P (op0) || GET_CODE (op0) == SUBREG
9128 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9129 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9130 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9131 && modifier != EXPAND_CONST_ADDRESS
9132 && modifier != EXPAND_INITIALIZER)
9133 /* If the field is volatile, we always want an aligned
9135 || (volatilep && flag_strict_volatile_bitfields > 0)
9136 /* If the field isn't aligned enough to fetch as a memref,
9137 fetch it as a bit field. */
9138 || (mode1 != BLKmode
9139 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9140 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9142 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9143 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9144 && ((modifier == EXPAND_CONST_ADDRESS
9145 || modifier == EXPAND_INITIALIZER)
9147 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9148 || (bitpos % BITS_PER_UNIT != 0)))
9149 /* If the type and the field are a constant size and the
9150 size of the type isn't the same size as the bitfield,
9151 we must use bitfield operations. */
9153 && TYPE_SIZE (TREE_TYPE (exp))
9154 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9155 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9158 enum machine_mode ext_mode = mode;
9160 if (ext_mode == BLKmode
9161 && ! (target != 0 && MEM_P (op0)
9163 && bitpos % BITS_PER_UNIT == 0))
9164 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9166 if (ext_mode == BLKmode)
9169 target = assign_temp (type, 0, 1, 1);
9174 /* In this case, BITPOS must start at a byte boundary and
9175 TARGET, if specified, must be a MEM. */
9176 gcc_assert (MEM_P (op0)
9177 && (!target || MEM_P (target))
9178 && !(bitpos % BITS_PER_UNIT));
9180 emit_block_move (target,
9181 adjust_address (op0, VOIDmode,
9182 bitpos / BITS_PER_UNIT),
9183 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9185 (modifier == EXPAND_STACK_PARM
9186 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9191 op0 = validize_mem (op0);
9193 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9194 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9196 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9197 (modifier == EXPAND_STACK_PARM
9198 ? NULL_RTX : target),
9199 ext_mode, ext_mode);
9201 /* If the result is a record type and BITSIZE is narrower than
9202 the mode of OP0, an integral mode, and this is a big endian
9203 machine, we must put the field into the high-order bits. */
9204 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9205 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9206 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9207 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9208 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9212 /* If the result type is BLKmode, store the data into a temporary
9213 of the appropriate type, but with the mode corresponding to the
9214 mode for the data we have (op0's mode). It's tempting to make
9215 this a constant type, since we know it's only being stored once,
9216 but that can cause problems if we are taking the address of this
9217 COMPONENT_REF because the MEM of any reference via that address
9218 will have flags corresponding to the type, which will not
9219 necessarily be constant. */
9220 if (mode == BLKmode)
9222 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9225 /* If the reference doesn't use the alias set of its type,
9226 we cannot create the temporary using that type. */
9227 if (component_uses_parent_alias_set (exp))
9229 new_rtx = assign_stack_local (ext_mode, size, 0);
9230 set_mem_alias_set (new_rtx, get_alias_set (exp));
9233 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9235 emit_move_insn (new_rtx, op0);
9236 op0 = copy_rtx (new_rtx);
9237 PUT_MODE (op0, BLKmode);
9238 set_mem_attributes (op0, exp, 1);
9244 /* If the result is BLKmode, use that to access the object
9246 if (mode == BLKmode)
9249 /* Get a reference to just this component. */
9250 if (modifier == EXPAND_CONST_ADDRESS
9251 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9252 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9254 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9256 if (op0 == orig_op0)
9257 op0 = copy_rtx (op0);
9259 set_mem_attributes (op0, exp, 0);
9260 if (REG_P (XEXP (op0, 0)))
9261 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9263 MEM_VOLATILE_P (op0) |= volatilep;
9264 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9265 || modifier == EXPAND_CONST_ADDRESS
9266 || modifier == EXPAND_INITIALIZER)
9268 else if (target == 0)
9269 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9271 convert_move (target, op0, unsignedp);
9276 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9279 /* All valid uses of __builtin_va_arg_pack () are removed during
9281 if (CALL_EXPR_VA_ARG_PACK (exp))
9282 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9284 tree fndecl = get_callee_fndecl (exp), attr;
9287 && (attr = lookup_attribute ("error",
9288 DECL_ATTRIBUTES (fndecl))) != NULL)
9289 error ("%Kcall to %qs declared with attribute error: %s",
9290 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9291 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9293 && (attr = lookup_attribute ("warning",
9294 DECL_ATTRIBUTES (fndecl))) != NULL)
9295 warning_at (tree_nonartificial_location (exp),
9296 0, "%Kcall to %qs declared with attribute warning: %s",
9297 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9298 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9300 /* Check for a built-in function. */
9301 if (fndecl && DECL_BUILT_IN (fndecl))
9303 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9304 return expand_builtin (exp, target, subtarget, tmode, ignore);
9307 return expand_call (exp, target, ignore);
9309 case VIEW_CONVERT_EXPR:
9312 /* If we are converting to BLKmode, try to avoid an intermediate
9313 temporary by fetching an inner memory reference. */
9315 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9316 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9317 && handled_component_p (treeop0))
9319 enum machine_mode mode1;
9320 HOST_WIDE_INT bitsize, bitpos;
9325 = get_inner_reference (treeop0, &bitsize, &bitpos,
9326 &offset, &mode1, &unsignedp, &volatilep,
9330 /* ??? We should work harder and deal with non-zero offsets. */
9332 && (bitpos % BITS_PER_UNIT) == 0
9334 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9336 /* See the normal_inner_ref case for the rationale. */
9339 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9340 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9342 && modifier != EXPAND_STACK_PARM
9343 ? target : NULL_RTX),
9345 (modifier == EXPAND_INITIALIZER
9346 || modifier == EXPAND_CONST_ADDRESS
9347 || modifier == EXPAND_STACK_PARM)
9348 ? modifier : EXPAND_NORMAL);
9350 if (MEM_P (orig_op0))
9354 /* Get a reference to just this component. */
9355 if (modifier == EXPAND_CONST_ADDRESS
9356 || modifier == EXPAND_SUM
9357 || modifier == EXPAND_INITIALIZER)
9358 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9360 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9362 if (op0 == orig_op0)
9363 op0 = copy_rtx (op0);
9365 set_mem_attributes (op0, treeop0, 0);
9366 if (REG_P (XEXP (op0, 0)))
9367 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9369 MEM_VOLATILE_P (op0) |= volatilep;
9375 op0 = expand_expr (treeop0,
9376 NULL_RTX, VOIDmode, modifier);
9378 /* If the input and output modes are both the same, we are done. */
9379 if (mode == GET_MODE (op0))
9381 /* If neither mode is BLKmode, and both modes are the same size
9382 then we can use gen_lowpart. */
9383 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9384 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9385 && !COMPLEX_MODE_P (GET_MODE (op0)))
9387 if (GET_CODE (op0) == SUBREG)
9388 op0 = force_reg (GET_MODE (op0), op0);
9389 temp = gen_lowpart_common (mode, op0);
9394 if (!REG_P (op0) && !MEM_P (op0))
9395 op0 = force_reg (GET_MODE (op0), op0);
9396 op0 = gen_lowpart (mode, op0);
9399 /* If both types are integral, convert from one mode to the other. */
9400 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9401 op0 = convert_modes (mode, GET_MODE (op0), op0,
9402 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9403 /* As a last resort, spill op0 to memory, and reload it in a
9405 else if (!MEM_P (op0))
9407 /* If the operand is not a MEM, force it into memory. Since we
9408 are going to be changing the mode of the MEM, don't call
9409 force_const_mem for constants because we don't allow pool
9410 constants to change mode. */
9411 tree inner_type = TREE_TYPE (treeop0);
9413 gcc_assert (!TREE_ADDRESSABLE (exp));
9415 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9417 = assign_stack_temp_for_type
9418 (TYPE_MODE (inner_type),
9419 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9421 emit_move_insn (target, op0);
9425 /* At this point, OP0 is in the correct mode. If the output type is
9426 such that the operand is known to be aligned, indicate that it is.
9427 Otherwise, we need only be concerned about alignment for non-BLKmode
9431 op0 = copy_rtx (op0);
9433 if (TYPE_ALIGN_OK (type))
9434 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9435 else if (STRICT_ALIGNMENT
9437 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9439 tree inner_type = TREE_TYPE (treeop0);
9440 HOST_WIDE_INT temp_size
9441 = MAX (int_size_in_bytes (inner_type),
9442 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9444 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9445 rtx new_with_op0_mode
9446 = adjust_address (new_rtx, GET_MODE (op0), 0);
9448 gcc_assert (!TREE_ADDRESSABLE (exp));
9450 if (GET_MODE (op0) == BLKmode)
9451 emit_block_move (new_with_op0_mode, op0,
9452 GEN_INT (GET_MODE_SIZE (mode)),
9453 (modifier == EXPAND_STACK_PARM
9454 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9456 emit_move_insn (new_with_op0_mode, op0);
9461 op0 = adjust_address (op0, mode, 0);
9466 /* Use a compare and a jump for BLKmode comparisons, or for function
9467 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9469 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9470 are occassionally created by folding during expansion. */
9471 case TRUTH_ANDIF_EXPR:
9472 case TRUTH_ORIF_EXPR:
9475 || modifier == EXPAND_STACK_PARM
9476 || ! safe_from_p (target, treeop0, 1)
9477 || ! safe_from_p (target, treeop1, 1)
9478 /* Make sure we don't have a hard reg (such as function's return
9479 value) live across basic blocks, if not optimizing. */
9480 || (!optimize && REG_P (target)
9481 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9482 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9485 emit_move_insn (target, const0_rtx);
9487 op1 = gen_label_rtx ();
9488 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9491 emit_move_insn (target, const1_rtx);
9494 return ignore ? const0_rtx : target;
9496 case STATEMENT_LIST:
9498 tree_stmt_iterator iter;
9500 gcc_assert (ignore);
9502 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9503 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9508 /* A COND_EXPR with its type being VOID_TYPE represents a
9509 conditional jump and is handled in
9510 expand_gimple_cond_expr. */
9511 gcc_assert (!VOID_TYPE_P (type));
9513 /* Note that COND_EXPRs whose type is a structure or union
9514 are required to be constructed to contain assignments of
9515 a temporary variable, so that we can evaluate them here
9516 for side effect only. If type is void, we must do likewise. */
9518 gcc_assert (!TREE_ADDRESSABLE (type)
9520 && TREE_TYPE (treeop1) != void_type_node
9521 && TREE_TYPE (treeop2) != void_type_node);
9523 /* If we are not to produce a result, we have no target. Otherwise,
9524 if a target was specified use it; it will not be used as an
9525 intermediate target unless it is safe. If no target, use a
9528 if (modifier != EXPAND_STACK_PARM
9530 && safe_from_p (original_target, treeop0, 1)
9531 && GET_MODE (original_target) == mode
9532 #ifdef HAVE_conditional_move
9533 && (! can_conditionally_move_p (mode)
9534 || REG_P (original_target))
9536 && !MEM_P (original_target))
9537 temp = original_target;
9539 temp = assign_temp (type, 0, 0, 1);
9541 do_pending_stack_adjust ();
9543 op0 = gen_label_rtx ();
9544 op1 = gen_label_rtx ();
9545 jumpifnot (treeop0, op0, -1);
9546 store_expr (treeop1, temp,
9547 modifier == EXPAND_STACK_PARM,
9550 emit_jump_insn (gen_jump (op1));
9553 store_expr (treeop2, temp,
9554 modifier == EXPAND_STACK_PARM,
9562 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9569 gcc_assert (ignore);
9571 /* Check for |= or &= of a bitfield of size one into another bitfield
9572 of size 1. In this case, (unless we need the result of the
9573 assignment) we can do this more efficiently with a
9574 test followed by an assignment, if necessary.
9576 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9577 things change so we do, this code should be enhanced to
9579 if (TREE_CODE (lhs) == COMPONENT_REF
9580 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9581 || TREE_CODE (rhs) == BIT_AND_EXPR)
9582 && TREE_OPERAND (rhs, 0) == lhs
9583 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9584 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9585 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9587 rtx label = gen_label_rtx ();
9588 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9589 do_jump (TREE_OPERAND (rhs, 1),
9591 value ? 0 : label, -1);
9592 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9593 MOVE_NONTEMPORAL (exp));
9594 do_pending_stack_adjust ();
9599 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9604 return expand_expr_addr_expr (exp, target, tmode, modifier);
9607 op0 = expand_normal (treeop0);
9608 return read_complex_part (op0, false);
9611 op0 = expand_normal (treeop0);
9612 return read_complex_part (op0, true);
9619 /* Expanded in cfgexpand.c. */
9622 case TRY_CATCH_EXPR:
9624 case EH_FILTER_EXPR:
9625 case TRY_FINALLY_EXPR:
9626 /* Lowered by tree-eh.c. */
9629 case WITH_CLEANUP_EXPR:
9630 case CLEANUP_POINT_EXPR:
9632 case CASE_LABEL_EXPR:
9638 case PREINCREMENT_EXPR:
9639 case PREDECREMENT_EXPR:
9640 case POSTINCREMENT_EXPR:
9641 case POSTDECREMENT_EXPR:
9644 /* Lowered by gimplify.c. */
9648 /* Function descriptors are not valid except for as
9649 initialization constants, and should not be expanded. */
9652 case WITH_SIZE_EXPR:
9653 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9654 have pulled out the size to use in whatever context it needed. */
9655 return expand_expr_real (treeop0, original_target, tmode,
9658 case REALIGN_LOAD_EXPR:
9660 tree oprnd0 = treeop0;
9661 tree oprnd1 = treeop1;
9662 tree oprnd2 = treeop2;
9665 this_optab = optab_for_tree_code (code, type, optab_default);
9666 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9667 op2 = expand_normal (oprnd2);
9668 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9676 tree oprnd0 = treeop0;
9677 tree oprnd1 = treeop1;
9678 tree oprnd2 = treeop2;
9681 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9682 op2 = expand_normal (oprnd2);
9683 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9688 case COMPOUND_LITERAL_EXPR:
9690 /* Initialize the anonymous variable declared in the compound
9691 literal, then return the variable. */
9692 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9694 /* Create RTL for this variable. */
9695 if (!DECL_RTL_SET_P (decl))
9697 if (DECL_HARD_REGISTER (decl))
9698 /* The user specified an assembler name for this variable.
9700 rest_of_decl_compilation (decl, 0, 0);
9705 return expand_expr_real (decl, original_target, tmode,
9710 return expand_expr_real_2 (&ops, target, tmode, modifier);
9714 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9715 signedness of TYPE), possibly returning the result in TARGET. */
9717 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9719 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9720 if (target && GET_MODE (target) != GET_MODE (exp))
9722 /* For constant values, reduce using build_int_cst_type. */
9723 if (CONST_INT_P (exp))
9725 HOST_WIDE_INT value = INTVAL (exp);
9726 tree t = build_int_cst_type (type, value);
9727 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9729 else if (TYPE_UNSIGNED (type))
9731 rtx mask = immed_double_int_const (double_int_mask (prec),
9733 return expand_and (GET_MODE (exp), exp, mask, target);
9737 tree count = build_int_cst (NULL_TREE,
9738 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9739 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9740 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9744 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9745 when applied to the address of EXP produces an address known to be
9746 aligned more than BIGGEST_ALIGNMENT. */
9749 is_aligning_offset (const_tree offset, const_tree exp)
9751 /* Strip off any conversions. */
9752 while (CONVERT_EXPR_P (offset))
9753 offset = TREE_OPERAND (offset, 0);
9755 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9756 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9757 if (TREE_CODE (offset) != BIT_AND_EXPR
9758 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9759 || compare_tree_int (TREE_OPERAND (offset, 1),
9760 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9761 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9764 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9765 It must be NEGATE_EXPR. Then strip any more conversions. */
9766 offset = TREE_OPERAND (offset, 0);
9767 while (CONVERT_EXPR_P (offset))
9768 offset = TREE_OPERAND (offset, 0);
9770 if (TREE_CODE (offset) != NEGATE_EXPR)
9773 offset = TREE_OPERAND (offset, 0);
9774 while (CONVERT_EXPR_P (offset))
9775 offset = TREE_OPERAND (offset, 0);
9777 /* This must now be the address of EXP. */
9778 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9781 /* Return the tree node if an ARG corresponds to a string constant or zero
9782 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9783 in bytes within the string that ARG is accessing. The type of the
9784 offset will be `sizetype'. */
9787 string_constant (tree arg, tree *ptr_offset)
9789 tree array, offset, lower_bound;
9792 if (TREE_CODE (arg) == ADDR_EXPR)
9794 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9796 *ptr_offset = size_zero_node;
9797 return TREE_OPERAND (arg, 0);
9799 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9801 array = TREE_OPERAND (arg, 0);
9802 offset = size_zero_node;
9804 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9806 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9807 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9808 if (TREE_CODE (array) != STRING_CST
9809 && TREE_CODE (array) != VAR_DECL)
9812 /* Check if the array has a nonzero lower bound. */
9813 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9814 if (!integer_zerop (lower_bound))
9816 /* If the offset and base aren't both constants, return 0. */
9817 if (TREE_CODE (lower_bound) != INTEGER_CST)
9819 if (TREE_CODE (offset) != INTEGER_CST)
9821 /* Adjust offset by the lower bound. */
9822 offset = size_diffop (fold_convert (sizetype, offset),
9823 fold_convert (sizetype, lower_bound));
9829 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9831 tree arg0 = TREE_OPERAND (arg, 0);
9832 tree arg1 = TREE_OPERAND (arg, 1);
9837 if (TREE_CODE (arg0) == ADDR_EXPR
9838 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9839 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9841 array = TREE_OPERAND (arg0, 0);
9844 else if (TREE_CODE (arg1) == ADDR_EXPR
9845 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9846 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9848 array = TREE_OPERAND (arg1, 0);
9857 if (TREE_CODE (array) == STRING_CST)
9859 *ptr_offset = fold_convert (sizetype, offset);
9862 else if (TREE_CODE (array) == VAR_DECL
9863 || TREE_CODE (array) == CONST_DECL)
9867 /* Variables initialized to string literals can be handled too. */
9868 if (!const_value_known_p (array)
9869 || !DECL_INITIAL (array)
9870 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9873 /* Avoid const char foo[4] = "abcde"; */
9874 if (DECL_SIZE_UNIT (array) == NULL_TREE
9875 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9876 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9877 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9880 /* If variable is bigger than the string literal, OFFSET must be constant
9881 and inside of the bounds of the string literal. */
9882 offset = fold_convert (sizetype, offset);
9883 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9884 && (! host_integerp (offset, 1)
9885 || compare_tree_int (offset, length) >= 0))
9888 *ptr_offset = offset;
9889 return DECL_INITIAL (array);
9895 /* Generate code to calculate OPS, and exploded expression
9896 using a store-flag instruction and return an rtx for the result.
9897 OPS reflects a comparison.
9899 If TARGET is nonzero, store the result there if convenient.
9901 Return zero if there is no suitable set-flag instruction
9902 available on this machine.
9904 Once expand_expr has been called on the arguments of the comparison,
9905 we are committed to doing the store flag, since it is not safe to
9906 re-evaluate the expression. We emit the store-flag insn by calling
9907 emit_store_flag, but only expand the arguments if we have a reason
9908 to believe that emit_store_flag will be successful. If we think that
9909 it will, but it isn't, we have to simulate the store-flag with a
9910 set/jump/set sequence. */
9913 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9916 tree arg0, arg1, type;
9918 enum machine_mode operand_mode;
9921 rtx subtarget = target;
9922 location_t loc = ops->location;
9927 /* Don't crash if the comparison was erroneous. */
9928 if (arg0 == error_mark_node || arg1 == error_mark_node)
9931 type = TREE_TYPE (arg0);
9932 operand_mode = TYPE_MODE (type);
9933 unsignedp = TYPE_UNSIGNED (type);
9935 /* We won't bother with BLKmode store-flag operations because it would mean
9936 passing a lot of information to emit_store_flag. */
9937 if (operand_mode == BLKmode)
9940 /* We won't bother with store-flag operations involving function pointers
9941 when function pointers must be canonicalized before comparisons. */
9942 #ifdef HAVE_canonicalize_funcptr_for_compare
9943 if (HAVE_canonicalize_funcptr_for_compare
9944 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9945 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9947 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9948 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9949 == FUNCTION_TYPE))))
9956 /* Get the rtx comparison code to use. We know that EXP is a comparison
9957 operation of some type. Some comparisons against 1 and -1 can be
9958 converted to comparisons with zero. Do so here so that the tests
9959 below will be aware that we have a comparison with zero. These
9960 tests will not catch constants in the first operand, but constants
9961 are rarely passed as the first operand. */
9972 if (integer_onep (arg1))
9973 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9975 code = unsignedp ? LTU : LT;
9978 if (! unsignedp && integer_all_onesp (arg1))
9979 arg1 = integer_zero_node, code = LT;
9981 code = unsignedp ? LEU : LE;
9984 if (! unsignedp && integer_all_onesp (arg1))
9985 arg1 = integer_zero_node, code = GE;
9987 code = unsignedp ? GTU : GT;
9990 if (integer_onep (arg1))
9991 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9993 code = unsignedp ? GEU : GE;
9996 case UNORDERED_EXPR:
10022 gcc_unreachable ();
10025 /* Put a constant second. */
10026 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10027 || TREE_CODE (arg0) == FIXED_CST)
10029 tem = arg0; arg0 = arg1; arg1 = tem;
10030 code = swap_condition (code);
10033 /* If this is an equality or inequality test of a single bit, we can
10034 do this by shifting the bit being tested to the low-order bit and
10035 masking the result with the constant 1. If the condition was EQ,
10036 we xor it with 1. This does not require an scc insn and is faster
10037 than an scc insn even if we have it.
10039 The code to make this transformation was moved into fold_single_bit_test,
10040 so we just call into the folder and expand its result. */
10042 if ((code == NE || code == EQ)
10043 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10044 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10046 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10047 return expand_expr (fold_single_bit_test (loc,
10048 code == NE ? NE_EXPR : EQ_EXPR,
10050 target, VOIDmode, EXPAND_NORMAL);
10053 if (! get_subtarget (target)
10054 || GET_MODE (subtarget) != operand_mode)
10057 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10060 target = gen_reg_rtx (mode);
10062 /* Try a cstore if possible. */
10063 return emit_store_flag_force (target, code, op0, op1,
10064 operand_mode, unsignedp, 1);
10068 /* Stubs in case we haven't got a casesi insn. */
10069 #ifndef HAVE_casesi
10070 # define HAVE_casesi 0
10071 # define gen_casesi(a, b, c, d, e) (0)
10072 # define CODE_FOR_casesi CODE_FOR_nothing
10075 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10076 0 otherwise (i.e. if there is no casesi instruction). */
10078 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10079 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10080 rtx fallback_label ATTRIBUTE_UNUSED)
10082 enum machine_mode index_mode = SImode;
10083 int index_bits = GET_MODE_BITSIZE (index_mode);
10084 rtx op1, op2, index;
10085 enum machine_mode op_mode;
10090 /* Convert the index to SImode. */
10091 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10093 enum machine_mode omode = TYPE_MODE (index_type);
10094 rtx rangertx = expand_normal (range);
10096 /* We must handle the endpoints in the original mode. */
10097 index_expr = build2 (MINUS_EXPR, index_type,
10098 index_expr, minval);
10099 minval = integer_zero_node;
10100 index = expand_normal (index_expr);
10102 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10103 omode, 1, default_label);
10104 /* Now we can safely truncate. */
10105 index = convert_to_mode (index_mode, index, 0);
10109 if (TYPE_MODE (index_type) != index_mode)
10111 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10112 index_expr = fold_convert (index_type, index_expr);
10115 index = expand_normal (index_expr);
10118 do_pending_stack_adjust ();
10120 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10121 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10123 index = copy_to_mode_reg (op_mode, index);
10125 op1 = expand_normal (minval);
10127 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10128 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10129 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10130 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10132 op1 = copy_to_mode_reg (op_mode, op1);
10134 op2 = expand_normal (range);
10136 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10137 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10138 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10139 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10141 op2 = copy_to_mode_reg (op_mode, op2);
10143 emit_jump_insn (gen_casesi (index, op1, op2,
10144 table_label, !default_label
10145 ? fallback_label : default_label));
10149 /* Attempt to generate a tablejump instruction; same concept. */
10150 #ifndef HAVE_tablejump
10151 #define HAVE_tablejump 0
10152 #define gen_tablejump(x, y) (0)
10155 /* Subroutine of the next function.
10157 INDEX is the value being switched on, with the lowest value
10158 in the table already subtracted.
10159 MODE is its expected mode (needed if INDEX is constant).
10160 RANGE is the length of the jump table.
10161 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10163 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10164 index value is out of range. */
10167 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10172 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10173 cfun->cfg->max_jumptable_ents = INTVAL (range);
10175 /* Do an unsigned comparison (in the proper mode) between the index
10176 expression and the value which represents the length of the range.
10177 Since we just finished subtracting the lower bound of the range
10178 from the index expression, this comparison allows us to simultaneously
10179 check that the original index expression value is both greater than
10180 or equal to the minimum value of the range and less than or equal to
10181 the maximum value of the range. */
10184 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10187 /* If index is in range, it must fit in Pmode.
10188 Convert to Pmode so we can index with it. */
10190 index = convert_to_mode (Pmode, index, 1);
10192 /* Don't let a MEM slip through, because then INDEX that comes
10193 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10194 and break_out_memory_refs will go to work on it and mess it up. */
10195 #ifdef PIC_CASE_VECTOR_ADDRESS
10196 if (flag_pic && !REG_P (index))
10197 index = copy_to_mode_reg (Pmode, index);
10200 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10201 GET_MODE_SIZE, because this indicates how large insns are. The other
10202 uses should all be Pmode, because they are addresses. This code
10203 could fail if addresses and insns are not the same size. */
10204 index = gen_rtx_PLUS (Pmode,
10205 gen_rtx_MULT (Pmode, index,
10206 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10207 gen_rtx_LABEL_REF (Pmode, table_label));
10208 #ifdef PIC_CASE_VECTOR_ADDRESS
10210 index = PIC_CASE_VECTOR_ADDRESS (index);
10213 index = memory_address (CASE_VECTOR_MODE, index);
10214 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10215 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10216 convert_move (temp, vector, 0);
10218 emit_jump_insn (gen_tablejump (temp, table_label));
10220 /* If we are generating PIC code or if the table is PC-relative, the
10221 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10222 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10227 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10228 rtx table_label, rtx default_label)
10232 if (! HAVE_tablejump)
10235 index_expr = fold_build2 (MINUS_EXPR, index_type,
10236 fold_convert (index_type, index_expr),
10237 fold_convert (index_type, minval));
10238 index = expand_normal (index_expr);
10239 do_pending_stack_adjust ();
10241 do_tablejump (index, TYPE_MODE (index_type),
10242 convert_modes (TYPE_MODE (index_type),
10243 TYPE_MODE (TREE_TYPE (range)),
10244 expand_normal (range),
10245 TYPE_UNSIGNED (TREE_TYPE (range))),
10246 table_label, default_label);
10250 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10252 const_vector_from_tree (tree exp)
10257 enum machine_mode inner, mode;
10259 mode = TYPE_MODE (TREE_TYPE (exp));
10261 if (initializer_zerop (exp))
10262 return CONST0_RTX (mode);
10264 units = GET_MODE_NUNITS (mode);
10265 inner = GET_MODE_INNER (mode);
10267 v = rtvec_alloc (units);
10269 link = TREE_VECTOR_CST_ELTS (exp);
10270 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10272 elt = TREE_VALUE (link);
10274 if (TREE_CODE (elt) == REAL_CST)
10275 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10277 else if (TREE_CODE (elt) == FIXED_CST)
10278 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10281 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10285 /* Initialize remaining elements to 0. */
10286 for (; i < units; ++i)
10287 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10289 return gen_rtx_CONST_VECTOR (mode, v);
10293 /* Build a decl for a EH personality function named NAME. */
10296 build_personality_function (const char *name)
10300 type = build_function_type_list (integer_type_node, integer_type_node,
10301 long_long_unsigned_type_node,
10302 ptr_type_node, ptr_type_node, NULL_TREE);
10303 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10304 get_identifier (name), type);
10305 DECL_ARTIFICIAL (decl) = 1;
10306 DECL_EXTERNAL (decl) = 1;
10307 TREE_PUBLIC (decl) = 1;
10309 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10310 are the flags assigned by targetm.encode_section_info. */
10311 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10316 /* Extracts the personality function of DECL and returns the corresponding
10320 get_personality_function (tree decl)
10322 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10323 enum eh_personality_kind pk;
10325 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10326 if (pk == eh_personality_none)
10330 && pk == eh_personality_any)
10331 personality = lang_hooks.eh_personality ();
10333 if (pk == eh_personality_lang)
10334 gcc_assert (personality != NULL_TREE);
10336 return XEXP (DECL_RTL (personality), 0);
10339 #include "gt-expr.h"