1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 if (! HARD_REGNO_MODE_OK (regno, mode))
248 SET_REGNO (reg, regno);
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 insns = get_insns ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
473 expand_fixed_convert (to, from, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
543 fill_value = const0_rtx;
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 emit_unop_insn (code, to, from, equiv_code);
617 enum machine_mode intermediate;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* Return the largest alignment we can use for doing a move (or store)
796 of MAX_PIECES. ALIGN is the largest alignment we could use. */
799 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
801 enum machine_mode tmode;
803 tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
804 if (align >= GET_MODE_ALIGNMENT (tmode))
805 align = GET_MODE_ALIGNMENT (tmode);
808 enum machine_mode tmode, xmode;
810 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
812 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
813 if (GET_MODE_SIZE (tmode) > max_pieces
814 || SLOW_UNALIGNED_ACCESS (tmode, align))
817 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
823 /* Return the widest integer mode no wider than SIZE. If no such mode
824 can be found, return VOIDmode. */
826 static enum machine_mode
827 widest_int_mode_for_size (unsigned int size)
829 enum machine_mode tmode, mode = VOIDmode;
831 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
832 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
833 if (GET_MODE_SIZE (tmode) < size)
839 /* STORE_MAX_PIECES is the number of bytes at a time that we can
840 store efficiently. Due to internal GCC limitations, this is
841 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
842 for an immediate constant. */
844 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
846 /* Determine whether the LEN bytes can be moved by using several move
847 instructions. Return nonzero if a call to move_by_pieces should
851 can_move_by_pieces (unsigned HOST_WIDE_INT len,
852 unsigned int align ATTRIBUTE_UNUSED)
854 return MOVE_BY_PIECES_P (len, align);
857 /* Generate several move instructions to copy LEN bytes from block FROM to
858 block TO. (These are MEM rtx's with BLKmode).
860 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
861 used to push FROM to the stack.
863 ALIGN is maximum stack alignment we can assume.
865 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
866 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
870 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
871 unsigned int align, int endp)
873 struct move_by_pieces_d data;
874 enum machine_mode to_addr_mode, from_addr_mode
875 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
876 rtx to_addr, from_addr = XEXP (from, 0);
877 unsigned int max_size = MOVE_MAX_PIECES + 1;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 to_addr_mode = VOIDmode;
901 #ifdef STACK_GROWS_DOWNWARD
907 data.to_addr = to_addr;
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move...
926 MODE might not be used depending on the definitions of the
927 USE_* macros below. */
928 enum machine_mode mode ATTRIBUTE_UNUSED
929 = widest_int_mode_for_size (max_size);
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
965 /* First move what we can in the largest integer mode, then go to
966 successively smaller modes. */
970 enum machine_mode mode = widest_int_mode_for_size (max_size);
972 if (mode == VOIDmode)
975 icode = optab_handler (mov_optab, mode);
976 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
977 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
979 max_size = GET_MODE_SIZE (mode);
982 /* The code above should have handled everything. */
983 gcc_assert (!data.len);
989 gcc_assert (!data.reverse);
994 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
995 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
997 data.to_addr = copy_to_mode_reg (to_addr_mode,
998 plus_constant (data.to_addr,
1001 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1008 to1 = adjust_address (data.to, QImode, data.offset);
1016 /* Return number of insns required to move L bytes by pieces.
1017 ALIGN (in bits) is maximum alignment we can assume. */
1019 static unsigned HOST_WIDE_INT
1020 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1021 unsigned int max_size)
1023 unsigned HOST_WIDE_INT n_insns = 0;
1025 align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1027 while (max_size > 1)
1029 enum machine_mode mode;
1030 enum insn_code icode;
1032 mode = widest_int_mode_for_size (max_size);
1034 if (mode == VOIDmode)
1037 icode = optab_handler (mov_optab, mode);
1038 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1039 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1041 max_size = GET_MODE_SIZE (mode);
1048 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1049 with move instructions for mode MODE. GENFUN is the gen_... function
1050 to make a move insn for that mode. DATA has all the other info. */
1053 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1054 struct move_by_pieces_d *data)
1056 unsigned int size = GET_MODE_SIZE (mode);
1057 rtx to1 = NULL_RTX, from1;
1059 while (data->len >= size)
1062 data->offset -= size;
1066 if (data->autinc_to)
1067 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1070 to1 = adjust_address (data->to, mode, data->offset);
1073 if (data->autinc_from)
1074 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1077 from1 = adjust_address (data->from, mode, data->offset);
1079 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1080 emit_insn (gen_add2_insn (data->to_addr,
1081 GEN_INT (-(HOST_WIDE_INT)size)));
1082 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1083 emit_insn (gen_add2_insn (data->from_addr,
1084 GEN_INT (-(HOST_WIDE_INT)size)));
1087 emit_insn ((*genfun) (to1, from1));
1090 #ifdef PUSH_ROUNDING
1091 emit_single_push_insn (mode, from1, NULL);
1097 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1098 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1099 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1100 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1102 if (! data->reverse)
1103 data->offset += size;
1109 /* Emit code to move a block Y to a block X. This may be done with
1110 string-move instructions, with multiple scalar move instructions,
1111 or with a library call.
1113 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1114 SIZE is an rtx that says how long they are.
1115 ALIGN is the maximum alignment we can assume they have.
1116 METHOD describes what kind of copy this is, and what mechanisms may be used.
1118 Return the address of the new block, if memcpy is called and returns it,
1122 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1123 unsigned int expected_align, HOST_WIDE_INT expected_size)
1130 if (CONST_INT_P (size)
1131 && INTVAL (size) == 0)
1136 case BLOCK_OP_NORMAL:
1137 case BLOCK_OP_TAILCALL:
1138 may_use_call = true;
1141 case BLOCK_OP_CALL_PARM:
1142 may_use_call = block_move_libcall_safe_for_call_parm ();
1144 /* Make inhibit_defer_pop nonzero around the library call
1145 to force it to pop the arguments right away. */
1149 case BLOCK_OP_NO_LIBCALL:
1150 may_use_call = false;
1157 gcc_assert (MEM_P (x) && MEM_P (y));
1158 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1159 gcc_assert (align >= BITS_PER_UNIT);
1161 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1162 block copy is more efficient for other large modes, e.g. DCmode. */
1163 x = adjust_address (x, BLKmode, 0);
1164 y = adjust_address (y, BLKmode, 0);
1166 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1167 can be incorrect is coming from __builtin_memcpy. */
1168 if (CONST_INT_P (size))
1170 x = shallow_copy_rtx (x);
1171 y = shallow_copy_rtx (y);
1172 set_mem_size (x, size);
1173 set_mem_size (y, size);
1176 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1177 move_by_pieces (x, y, INTVAL (size), align, 0);
1178 else if (emit_block_move_via_movmem (x, y, size, align,
1179 expected_align, expected_size))
1181 else if (may_use_call
1182 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1183 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1184 retval = emit_block_move_via_libcall (x, y, size,
1185 method == BLOCK_OP_TAILCALL);
1187 emit_block_move_via_loop (x, y, size, align);
1189 if (method == BLOCK_OP_CALL_PARM)
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1198 return emit_block_move_hints (x, y, size, method, 0, -1);
1201 /* A subroutine of emit_block_move. Returns true if calling the
1202 block move libcall will not clobber any parameters which may have
1203 already been placed on the stack. */
1206 block_move_libcall_safe_for_call_parm (void)
1208 #if defined (REG_PARM_STACK_SPACE)
1212 /* If arguments are pushed on the stack, then they're safe. */
1216 /* If registers go on the stack anyway, any argument is sure to clobber
1217 an outgoing argument. */
1218 #if defined (REG_PARM_STACK_SPACE)
1219 fn = emit_block_move_libcall_fn (false);
1220 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1221 depend on its argument. */
1223 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1224 && REG_PARM_STACK_SPACE (fn) != 0)
1228 /* If any argument goes in memory, then it might clobber an outgoing
1231 CUMULATIVE_ARGS args_so_far;
1234 fn = emit_block_move_libcall_fn (false);
1235 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1237 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1238 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1240 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1241 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1243 if (!tmp || !REG_P (tmp))
1245 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1247 targetm.calls.function_arg_advance (&args_so_far, mode,
1254 /* A subroutine of emit_block_move. Expand a movmem pattern;
1255 return true if successful. */
1258 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1259 unsigned int expected_align, HOST_WIDE_INT expected_size)
1261 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1262 int save_volatile_ok = volatile_ok;
1263 enum machine_mode mode;
1265 if (expected_align < align)
1266 expected_align = align;
1268 /* Since this is a move insn, we don't care about volatility. */
1271 /* Try the most limited insn first, because there's no point
1272 including more than one in the machine description unless
1273 the more limited one has some advantage. */
1275 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1276 mode = GET_MODE_WIDER_MODE (mode))
1278 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1279 insn_operand_predicate_fn pred;
1281 if (code != CODE_FOR_nothing
1282 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1283 here because if SIZE is less than the mode mask, as it is
1284 returned by the macro, it will definitely be less than the
1285 actual mode mask. */
1286 && ((CONST_INT_P (size)
1287 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1288 <= (GET_MODE_MASK (mode) >> 1)))
1289 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1290 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1291 || (*pred) (x, BLKmode))
1292 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1293 || (*pred) (y, BLKmode))
1294 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1295 || (*pred) (opalign, VOIDmode)))
1298 rtx last = get_last_insn ();
1301 op2 = convert_to_mode (mode, size, 1);
1302 pred = insn_data[(int) code].operand[2].predicate;
1303 if (pred != 0 && ! (*pred) (op2, mode))
1304 op2 = copy_to_mode_reg (mode, op2);
1306 /* ??? When called via emit_block_move_for_call, it'd be
1307 nice if there were some way to inform the backend, so
1308 that it doesn't fail the expansion because it thinks
1309 emitting the libcall would be more efficient. */
1311 if (insn_data[(int) code].n_operands == 4)
1312 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1314 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1315 GEN_INT (expected_align
1317 GEN_INT (expected_size));
1321 volatile_ok = save_volatile_ok;
1325 delete_insns_since (last);
1329 volatile_ok = save_volatile_ok;
1333 /* A subroutine of emit_block_move. Expand a call to memcpy.
1334 Return the return value from memcpy, 0 otherwise. */
1337 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1339 rtx dst_addr, src_addr;
1340 tree call_expr, fn, src_tree, dst_tree, size_tree;
1341 enum machine_mode size_mode;
1344 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1345 pseudos. We can then place those new pseudos into a VAR_DECL and
1348 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1349 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1351 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1352 src_addr = convert_memory_address (ptr_mode, src_addr);
1354 dst_tree = make_tree (ptr_type_node, dst_addr);
1355 src_tree = make_tree (ptr_type_node, src_addr);
1357 size_mode = TYPE_MODE (sizetype);
1359 size = convert_to_mode (size_mode, size, 1);
1360 size = copy_to_mode_reg (size_mode, size);
1362 /* It is incorrect to use the libcall calling conventions to call
1363 memcpy in this context. This could be a user call to memcpy and
1364 the user may wish to examine the return value from memcpy. For
1365 targets where libcalls and normal calls have different conventions
1366 for returning pointers, we could end up generating incorrect code. */
1368 size_tree = make_tree (sizetype, size);
1370 fn = emit_block_move_libcall_fn (true);
1371 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1372 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1374 retval = expand_normal (call_expr);
1379 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1380 for the function we use for block copies. The first time FOR_CALL
1381 is true, we call assemble_external. */
1383 static GTY(()) tree block_move_fn;
1386 init_block_move_fn (const char *asmspec)
1392 fn = get_identifier ("memcpy");
1393 args = build_function_type_list (ptr_type_node, ptr_type_node,
1394 const_ptr_type_node, sizetype,
1397 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1398 DECL_EXTERNAL (fn) = 1;
1399 TREE_PUBLIC (fn) = 1;
1400 DECL_ARTIFICIAL (fn) = 1;
1401 TREE_NOTHROW (fn) = 1;
1402 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1403 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1409 set_user_assembler_name (block_move_fn, asmspec);
1413 emit_block_move_libcall_fn (int for_call)
1415 static bool emitted_extern;
1418 init_block_move_fn (NULL);
1420 if (for_call && !emitted_extern)
1422 emitted_extern = true;
1423 make_decl_rtl (block_move_fn);
1424 assemble_external (block_move_fn);
1427 return block_move_fn;
1430 /* A subroutine of emit_block_move. Copy the data via an explicit
1431 loop. This is used only when libcalls are forbidden. */
1432 /* ??? It'd be nice to copy in hunks larger than QImode. */
1435 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1436 unsigned int align ATTRIBUTE_UNUSED)
1438 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1439 enum machine_mode x_addr_mode
1440 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1441 enum machine_mode y_addr_mode
1442 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1443 enum machine_mode iter_mode;
1445 iter_mode = GET_MODE (size);
1446 if (iter_mode == VOIDmode)
1447 iter_mode = word_mode;
1449 top_label = gen_label_rtx ();
1450 cmp_label = gen_label_rtx ();
1451 iter = gen_reg_rtx (iter_mode);
1453 emit_move_insn (iter, const0_rtx);
1455 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1456 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1457 do_pending_stack_adjust ();
1459 emit_jump (cmp_label);
1460 emit_label (top_label);
1462 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1463 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1465 if (x_addr_mode != y_addr_mode)
1466 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1467 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1469 x = change_address (x, QImode, x_addr);
1470 y = change_address (y, QImode, y_addr);
1472 emit_move_insn (x, y);
1474 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1475 true, OPTAB_LIB_WIDEN);
1477 emit_move_insn (iter, tmp);
1479 emit_label (cmp_label);
1481 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1485 /* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1489 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 #ifdef HAVE_load_multiple
1500 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1501 x = validize_mem (force_const_mem (mode, x));
1503 /* See if the machine can do this with a load multiple insn. */
1504 #ifdef HAVE_load_multiple
1505 if (HAVE_load_multiple)
1507 last = get_last_insn ();
1508 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 delete_insns_since (last);
1520 for (i = 0; i < nregs; i++)
1521 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1522 operand_subword_force (x, i, mode));
1525 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_from_reg (int regno, rtx x, int nregs)
1536 /* See if the machine can do this with a store multiple insn. */
1537 #ifdef HAVE_store_multiple
1538 if (HAVE_store_multiple)
1540 rtx last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 delete_insns_since (last);
1553 for (i = 0; i < nregs; i++)
1555 rtx tem = operand_subword (x, i, 1, BLKmode);
1559 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1563 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1570 gen_group_rtx (rtx orig)
1575 gcc_assert (GET_CODE (orig) == PARALLEL);
1577 length = XVECLEN (orig, 0);
1578 tmps = XALLOCAVEC (rtx, length);
1580 /* Skip a NULL entry in first slot. */
1581 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1586 for (; i < length; i++)
1588 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1589 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1606 enum machine_mode m = GET_MODE (orig_src);
1608 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 && !SCALAR_INT_MODE_P (m)
1612 && !MEM_P (orig_src)
1613 && GET_CODE (orig_src) != CONCAT)
1615 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1616 if (imode == BLKmode)
1617 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 src = gen_reg_rtx (imode);
1620 if (imode != BLKmode)
1621 src = gen_lowpart (GET_MODE (orig_src), src);
1622 emit_move_insn (src, orig_src);
1623 /* ...and back again. */
1624 if (imode != BLKmode)
1625 src = gen_lowpart (imode, src);
1626 emit_group_load_1 (tmps, dst, src, type, ssize);
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
1632 if (XEXP (XVECEXP (dst, 0, 0), 0))
1637 /* Process the pieces. */
1638 for (i = start; i < XVECLEN (dst, 0); i++)
1640 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1641 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1642 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 /* Handle trailing fragments that run over the size of the struct. */
1646 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1651 #ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1653 == (BYTES_BIG_ENDIAN ? upward : downward)
1658 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1659 bytelen = ssize - bytepos;
1660 gcc_assert (bytelen > 0);
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1667 if (!MEM_P (orig_src)
1668 && (!CONSTANT_P (orig_src)
1669 || (GET_MODE (orig_src) != mode
1670 && GET_MODE (orig_src) != VOIDmode)))
1672 if (GET_MODE (orig_src) == VOIDmode)
1673 src = gen_reg_rtx (mode);
1675 src = gen_reg_rtx (GET_MODE (orig_src));
1677 emit_move_insn (src, orig_src);
1680 /* Optimize the access just a bit. */
1682 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1683 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1684 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1685 && bytelen == GET_MODE_SIZE (mode))
1687 tmps[i] = gen_reg_rtx (mode);
1688 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 else if (COMPLEX_MODE_P (mode)
1691 && GET_MODE (src) == mode
1692 && bytelen == GET_MODE_SIZE (mode))
1693 /* Let emit_move_complex do the bulk of the work. */
1695 else if (GET_CODE (src) == CONCAT)
1697 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1698 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700 if ((bytepos == 0 && bytelen == slen0)
1701 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1707 tmps[i] = XEXP (src, bytepos / slen0);
1708 if (! CONSTANT_P (tmps[i])
1709 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1710 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1711 (bytepos % slen0) * BITS_PER_UNIT,
1712 1, false, NULL_RTX, mode, mode);
1718 gcc_assert (!bytepos);
1719 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1720 emit_move_insn (mem, src);
1721 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1722 0, 1, false, NULL_RTX, mode, mode);
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1735 emit_move_insn (mem, src);
1736 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1739 && XVECLEN (dst, 0) > 1)
1740 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1741 else if (CONSTANT_P (src))
1743 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1751 gcc_assert (2 * len == ssize);
1752 split_double (src, &first, &second);
1759 else if (REG_P (src) && GET_MODE (src) == mode)
1762 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1763 bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1767 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1768 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1772 /* Emit code to move a block SRC of type TYPE to a block DST,
1773 where DST is non-consecutive registers represented by a PARALLEL.
1774 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1778 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1783 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1784 emit_group_load_1 (tmps, dst, src, type, ssize);
1786 /* Copy the extracted pieces into the proper (probable) hard regs. */
1787 for (i = 0; i < XVECLEN (dst, 0); i++)
1789 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1792 emit_move_insn (d, tmps[i]);
1796 /* Similar, but load SRC into new pseudos in a format that looks like
1797 PARALLEL. This can later be fed to emit_group_move to get things
1798 in the right place. */
1801 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1806 vec = rtvec_alloc (XVECLEN (parallel, 0));
1807 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1809 /* Convert the vector to look just like the original PARALLEL, except
1810 with the computed values. */
1811 for (i = 0; i < XVECLEN (parallel, 0); i++)
1813 rtx e = XVECEXP (parallel, 0, i);
1814 rtx d = XEXP (e, 0);
1818 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1819 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1821 RTVEC_ELT (vec, i) = e;
1824 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1827 /* Emit code to move a block SRC to block DST, where SRC and DST are
1828 non-consecutive groups of registers, each represented by a PARALLEL. */
1831 emit_group_move (rtx dst, rtx src)
1835 gcc_assert (GET_CODE (src) == PARALLEL
1836 && GET_CODE (dst) == PARALLEL
1837 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1839 /* Skip first entry if NULL. */
1840 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1841 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1842 XEXP (XVECEXP (src, 0, i), 0));
1845 /* Move a group of registers represented by a PARALLEL into pseudos. */
1848 emit_group_move_into_temps (rtx src)
1850 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1853 for (i = 0; i < XVECLEN (src, 0); i++)
1855 rtx e = XVECEXP (src, 0, i);
1856 rtx d = XEXP (e, 0);
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1860 RTVEC_ELT (vec, i) = e;
1863 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1866 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1867 where SRC is non-consecutive registers represented by a PARALLEL.
1868 SSIZE represents the total size of block ORIG_DST, or -1 if not
1872 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1875 int start, finish, i;
1876 enum machine_mode m = GET_MODE (orig_dst);
1878 gcc_assert (GET_CODE (src) == PARALLEL);
1880 if (!SCALAR_INT_MODE_P (m)
1881 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1883 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1884 if (imode == BLKmode)
1885 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1887 dst = gen_reg_rtx (imode);
1888 emit_group_store (dst, src, type, ssize);
1889 if (imode != BLKmode)
1890 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1891 emit_move_insn (orig_dst, dst);
1895 /* Check for a NULL entry, used to indicate that the parameter goes
1896 both on the stack and in registers. */
1897 if (XEXP (XVECEXP (src, 0, 0), 0))
1901 finish = XVECLEN (src, 0);
1903 tmps = XALLOCAVEC (rtx, finish);
1905 /* Copy the (probable) hard regs into pseudos. */
1906 for (i = start; i < finish; i++)
1908 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1909 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1911 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1912 emit_move_insn (tmps[i], reg);
1918 /* If we won't be storing directly into memory, protect the real destination
1919 from strange tricks we might play. */
1921 if (GET_CODE (dst) == PARALLEL)
1925 /* We can get a PARALLEL dst if there is a conditional expression in
1926 a return statement. In that case, the dst and src are the same,
1927 so no action is necessary. */
1928 if (rtx_equal_p (dst, src))
1931 /* It is unclear if we can ever reach here, but we may as well handle
1932 it. Allocate a temporary, and split this into a store/load to/from
1935 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1936 emit_group_store (temp, src, type, ssize);
1937 emit_group_load (dst, temp, type, ssize);
1940 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1942 enum machine_mode outer = GET_MODE (dst);
1943 enum machine_mode inner;
1944 HOST_WIDE_INT bytepos;
1948 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1949 dst = gen_reg_rtx (outer);
1951 /* Make life a bit easier for combine. */
1952 /* If the first element of the vector is the low part
1953 of the destination mode, use a paradoxical subreg to
1954 initialize the destination. */
1957 inner = GET_MODE (tmps[start]);
1958 bytepos = subreg_lowpart_offset (inner, outer);
1959 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1961 temp = simplify_gen_subreg (outer, tmps[start],
1965 emit_move_insn (dst, temp);
1972 /* If the first element wasn't the low part, try the last. */
1974 && start < finish - 1)
1976 inner = GET_MODE (tmps[finish - 1]);
1977 bytepos = subreg_lowpart_offset (inner, outer);
1978 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1980 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1984 emit_move_insn (dst, temp);
1991 /* Otherwise, simply initialize the result to zero. */
1993 emit_move_insn (dst, CONST0_RTX (outer));
1996 /* Process the pieces. */
1997 for (i = start; i < finish; i++)
1999 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2000 enum machine_mode mode = GET_MODE (tmps[i]);
2001 unsigned int bytelen = GET_MODE_SIZE (mode);
2002 unsigned int adj_bytelen = bytelen;
2005 /* Handle trailing fragments that run over the size of the struct. */
2006 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2007 adj_bytelen = ssize - bytepos;
2009 if (GET_CODE (dst) == CONCAT)
2011 if (bytepos + adj_bytelen
2012 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2013 dest = XEXP (dst, 0);
2014 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2016 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2017 dest = XEXP (dst, 1);
2021 enum machine_mode dest_mode = GET_MODE (dest);
2022 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2024 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2026 if (GET_MODE_ALIGNMENT (dest_mode)
2027 >= GET_MODE_ALIGNMENT (tmp_mode))
2029 dest = assign_stack_temp (dest_mode,
2030 GET_MODE_SIZE (dest_mode),
2032 emit_move_insn (adjust_address (dest,
2040 dest = assign_stack_temp (tmp_mode,
2041 GET_MODE_SIZE (tmp_mode),
2043 emit_move_insn (dest, tmps[i]);
2044 dst = adjust_address (dest, dest_mode, bytepos);
2050 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2052 /* store_bit_field always takes its value from the lsb.
2053 Move the fragment to the lsb if it's not already there. */
2055 #ifdef BLOCK_REG_PADDING
2056 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2057 == (BYTES_BIG_ENDIAN ? upward : downward)
2063 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2064 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2065 build_int_cst (NULL_TREE, shift),
2068 bytelen = adj_bytelen;
2071 /* Optimize the access just a bit. */
2073 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 && bytelen == GET_MODE_SIZE (mode))
2077 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2079 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (orig_dst != dst)
2085 emit_move_insn (orig_dst, dst);
2088 /* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2092 The purpose of this routine is to handle functions that return
2093 BLKmode structures in registers. Some machines (the PA for example)
2094 want to return all small structures in registers regardless of the
2095 structure's alignment. */
2098 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2100 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2101 rtx src = NULL, dst = NULL;
2102 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2103 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2104 enum machine_mode copy_mode;
2108 tgtblk = assign_temp (build_qualified_type (type,
2110 | TYPE_QUAL_CONST)),
2112 preserve_temp_slots (tgtblk);
2115 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2116 into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2122 /* If the structure doesn't take up a whole number of words, see whether
2123 SRCREG is padded on the left or on the right. If it's on the left,
2124 set PADDING_CORRECTION to the number of bits to skip.
2126 In most ABIs, the structure will be returned at the least end of
2127 the register, which translates to right padding on little-endian
2128 targets and left padding on big-endian targets. The opposite
2129 holds if the structure is returned at the most significant
2130 end of the register. */
2131 if (bytes % UNITS_PER_WORD != 0
2132 && (targetm.calls.return_in_msb (type)
2134 : BYTES_BIG_ENDIAN))
2136 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2138 /* Copy the structure BITSIZE bits at a time. If the target lives in
2139 memory, take care of not reading/writing past its end by selecting
2140 a copy mode suited to BITSIZE. This should always be possible given
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2147 copy_mode = word_mode;
2150 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2151 if (mem_mode != BLKmode)
2152 copy_mode = mem_mode;
2155 for (bitpos = 0, xbitpos = padding_correction;
2156 bitpos < bytes * BITS_PER_UNIT;
2157 bitpos += bitsize, xbitpos += bitsize)
2159 /* We need a new source operand each time xbitpos is on a
2160 word boundary and when xbitpos == padding_correction
2161 (the first time through). */
2162 if (xbitpos % BITS_PER_WORD == 0
2163 || xbitpos == padding_correction)
2164 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2167 /* We need a new destination operand each time bitpos is on
2169 if (bitpos % BITS_PER_WORD == 0)
2170 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2172 /* Use xbitpos for the source extraction (right justified) and
2173 bitpos for the destination store (left justified). */
2174 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2175 extract_bit_field (src, bitsize,
2176 xbitpos % BITS_PER_WORD, 1, false,
2177 NULL_RTX, copy_mode, copy_mode));
2183 /* Add a USE expression for REG to the (possibly empty) list pointed
2184 to by CALL_FUSAGE. REG must denote a hard register. */
2187 use_reg (rtx *call_fusage, rtx reg)
2189 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2204 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2206 for (i = 0; i < nregs; i++)
2207 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2210 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2215 use_group_regs (rtx *call_fusage, rtx regs)
2219 for (i = 0; i < XVECLEN (regs, 0); i++)
2221 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2223 /* A NULL entry means the parameter goes both on the stack and in
2224 registers. This can also be a MEM for targets that pass values
2225 partially on the stack and partially in registers. */
2226 if (reg != 0 && REG_P (reg))
2227 use_reg (call_fusage, reg);
2231 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2232 assigment and the code of the expresion on the RHS is CODE. Return
2236 get_def_for_expr (tree name, enum tree_code code)
2240 if (TREE_CODE (name) != SSA_NAME)
2243 def_stmt = get_gimple_for_ssa_name (name);
2245 || gimple_assign_rhs_code (def_stmt) != code)
2252 /* Determine whether the LEN bytes generated by CONSTFUN can be
2253 stored to memory using several move instructions. CONSTFUNDATA is
2254 a pointer which will be passed as argument in every CONSTFUN call.
2255 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2256 a memset operation and false if it's a copy of a constant string.
2257 Return nonzero if a call to store_by_pieces should succeed. */
2260 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2261 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2262 void *constfundata, unsigned int align, bool memsetp)
2264 unsigned HOST_WIDE_INT l;
2265 unsigned int max_size;
2266 HOST_WIDE_INT offset = 0;
2267 enum machine_mode mode;
2268 enum insn_code icode;
2270 /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it. */
2271 rtx cst ATTRIBUTE_UNUSED;
2277 ? SET_BY_PIECES_P (len, align)
2278 : STORE_BY_PIECES_P (len, align)))
2281 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2291 max_size = STORE_MAX_PIECES + 1;
2292 while (max_size > 1)
2294 mode = widest_int_mode_for_size (max_size);
2296 if (mode == VOIDmode)
2299 icode = optab_handler (mov_optab, mode);
2300 if (icode != CODE_FOR_nothing
2301 && align >= GET_MODE_ALIGNMENT (mode))
2303 unsigned int size = GET_MODE_SIZE (mode);
2310 cst = (*constfun) (constfundata, offset, mode);
2311 if (!LEGITIMATE_CONSTANT_P (cst))
2321 max_size = GET_MODE_SIZE (mode);
2324 /* The code above should have handled everything. */
2331 /* Generate several move instructions to store LEN bytes generated by
2332 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2333 pointer which will be passed as argument in every CONSTFUN call.
2334 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2335 a memset operation and false if it's a copy of a constant string.
2336 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2337 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2341 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2342 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2343 void *constfundata, unsigned int align, bool memsetp, int endp)
2345 enum machine_mode to_addr_mode
2346 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2347 struct store_by_pieces_d data;
2351 gcc_assert (endp != 2);
2356 ? SET_BY_PIECES_P (len, align)
2357 : STORE_BY_PIECES_P (len, align));
2358 data.constfun = constfun;
2359 data.constfundata = constfundata;
2362 store_by_pieces_1 (&data, align);
2367 gcc_assert (!data.reverse);
2372 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2373 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2375 data.to_addr = copy_to_mode_reg (to_addr_mode,
2376 plus_constant (data.to_addr,
2379 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2386 to1 = adjust_address (data.to, QImode, data.offset);
2394 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2395 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2398 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2400 struct store_by_pieces_d data;
2405 data.constfun = clear_by_pieces_1;
2406 data.constfundata = NULL;
2409 store_by_pieces_1 (&data, align);
2412 /* Callback routine for clear_by_pieces.
2413 Return const0_rtx unconditionally. */
2416 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2417 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2418 enum machine_mode mode ATTRIBUTE_UNUSED)
2423 /* Subroutine of clear_by_pieces and store_by_pieces.
2424 Generate several move instructions to store LEN bytes of block TO. (A MEM
2425 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2428 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2429 unsigned int align ATTRIBUTE_UNUSED)
2431 enum machine_mode to_addr_mode
2432 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2433 rtx to_addr = XEXP (data->to, 0);
2434 unsigned int max_size = STORE_MAX_PIECES + 1;
2435 enum insn_code icode;
2438 data->to_addr = to_addr;
2440 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2441 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2443 data->explicit_inc_to = 0;
2445 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2447 data->offset = data->len;
2449 /* If storing requires more than two move insns,
2450 copy addresses to registers (to make displacements shorter)
2451 and use post-increment if available. */
2452 if (!data->autinc_to
2453 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2455 /* Determine the main mode we'll be using.
2456 MODE might not be used depending on the definitions of the
2457 USE_* macros below. */
2458 enum machine_mode mode ATTRIBUTE_UNUSED
2459 = widest_int_mode_for_size (max_size);
2461 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2463 data->to_addr = copy_to_mode_reg (to_addr_mode,
2464 plus_constant (to_addr, data->len));
2465 data->autinc_to = 1;
2466 data->explicit_inc_to = -1;
2469 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2470 && ! data->autinc_to)
2472 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2473 data->autinc_to = 1;
2474 data->explicit_inc_to = 1;
2477 if ( !data->autinc_to && CONSTANT_P (to_addr))
2478 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2481 align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2483 /* First store what we can in the largest integer mode, then go to
2484 successively smaller modes. */
2486 while (max_size > 1)
2488 enum machine_mode mode = widest_int_mode_for_size (max_size);
2490 if (mode == VOIDmode)
2493 icode = optab_handler (mov_optab, mode);
2494 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2495 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2497 max_size = GET_MODE_SIZE (mode);
2500 /* The code above should have handled everything. */
2501 gcc_assert (!data->len);
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2509 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2510 struct store_by_pieces_d *data)
2512 unsigned int size = GET_MODE_SIZE (mode);
2515 while (data->len >= size)
2518 data->offset -= size;
2520 if (data->autinc_to)
2521 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2524 to1 = adjust_address (data->to, mode, data->offset);
2526 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2527 emit_insn (gen_add2_insn (data->to_addr,
2528 GEN_INT (-(HOST_WIDE_INT) size)));
2530 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2531 emit_insn ((*genfun) (to1, cst));
2533 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2534 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2536 if (! data->reverse)
2537 data->offset += size;
2543 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2544 its length in bytes. */
2547 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2548 unsigned int expected_align, HOST_WIDE_INT expected_size)
2550 enum machine_mode mode = GET_MODE (object);
2553 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2555 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2556 just move a zero. Otherwise, do this a piece at a time. */
2558 && CONST_INT_P (size)
2559 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2561 rtx zero = CONST0_RTX (mode);
2564 emit_move_insn (object, zero);
2568 if (COMPLEX_MODE_P (mode))
2570 zero = CONST0_RTX (GET_MODE_INNER (mode));
2573 write_complex_part (object, zero, 0);
2574 write_complex_part (object, zero, 1);
2580 if (size == const0_rtx)
2583 align = MEM_ALIGN (object);
2585 if (CONST_INT_P (size)
2586 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2587 clear_by_pieces (object, INTVAL (size), align);
2588 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2589 expected_align, expected_size))
2591 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2592 return set_storage_via_libcall (object, size, const0_rtx,
2593 method == BLOCK_OP_TAILCALL);
2601 clear_storage (rtx object, rtx size, enum block_op_methods method)
2603 return clear_storage_hints (object, size, method, 0, -1);
2607 /* A subroutine of clear_storage. Expand a call to memset.
2608 Return the return value of memset, 0 otherwise. */
2611 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2613 tree call_expr, fn, object_tree, size_tree, val_tree;
2614 enum machine_mode size_mode;
2617 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2618 place those into new pseudos into a VAR_DECL and use them later. */
2620 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2622 size_mode = TYPE_MODE (sizetype);
2623 size = convert_to_mode (size_mode, size, 1);
2624 size = copy_to_mode_reg (size_mode, size);
2626 /* It is incorrect to use the libcall calling conventions to call
2627 memset in this context. This could be a user call to memset and
2628 the user may wish to examine the return value from memset. For
2629 targets where libcalls and normal calls have different conventions
2630 for returning pointers, we could end up generating incorrect code. */
2632 object_tree = make_tree (ptr_type_node, object);
2633 if (!CONST_INT_P (val))
2634 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2635 size_tree = make_tree (sizetype, size);
2636 val_tree = make_tree (integer_type_node, val);
2638 fn = clear_storage_libcall_fn (true);
2639 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2640 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2642 retval = expand_normal (call_expr);
2647 /* A subroutine of set_storage_via_libcall. Create the tree node
2648 for the function we use for block clears. The first time FOR_CALL
2649 is true, we call assemble_external. */
2651 tree block_clear_fn;
2654 init_block_clear_fn (const char *asmspec)
2656 if (!block_clear_fn)
2660 fn = get_identifier ("memset");
2661 args = build_function_type_list (ptr_type_node, ptr_type_node,
2662 integer_type_node, sizetype,
2665 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2666 DECL_EXTERNAL (fn) = 1;
2667 TREE_PUBLIC (fn) = 1;
2668 DECL_ARTIFICIAL (fn) = 1;
2669 TREE_NOTHROW (fn) = 1;
2670 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2671 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2673 block_clear_fn = fn;
2677 set_user_assembler_name (block_clear_fn, asmspec);
2681 clear_storage_libcall_fn (int for_call)
2683 static bool emitted_extern;
2685 if (!block_clear_fn)
2686 init_block_clear_fn (NULL);
2688 if (for_call && !emitted_extern)
2690 emitted_extern = true;
2691 make_decl_rtl (block_clear_fn);
2692 assemble_external (block_clear_fn);
2695 return block_clear_fn;
2698 /* Expand a setmem pattern; return true if successful. */
2701 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2702 unsigned int expected_align, HOST_WIDE_INT expected_size)
2704 /* Try the most limited insn first, because there's no point
2705 including more than one in the machine description unless
2706 the more limited one has some advantage. */
2708 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2709 enum machine_mode mode;
2711 if (expected_align < align)
2712 expected_align = align;
2714 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2715 mode = GET_MODE_WIDER_MODE (mode))
2717 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2718 insn_operand_predicate_fn pred;
2720 if (code != CODE_FOR_nothing
2721 /* We don't need MODE to be narrower than
2722 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2723 the mode mask, as it is returned by the macro, it will
2724 definitely be less than the actual mode mask. */
2725 && ((CONST_INT_P (size)
2726 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2727 <= (GET_MODE_MASK (mode) >> 1)))
2728 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2729 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2730 || (*pred) (object, BLKmode))
2731 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2732 || (*pred) (opalign, VOIDmode)))
2735 enum machine_mode char_mode;
2736 rtx last = get_last_insn ();
2739 opsize = convert_to_mode (mode, size, 1);
2740 pred = insn_data[(int) code].operand[1].predicate;
2741 if (pred != 0 && ! (*pred) (opsize, mode))
2742 opsize = copy_to_mode_reg (mode, opsize);
2745 char_mode = insn_data[(int) code].operand[2].mode;
2746 if (char_mode != VOIDmode)
2748 opchar = convert_to_mode (char_mode, opchar, 1);
2749 pred = insn_data[(int) code].operand[2].predicate;
2750 if (pred != 0 && ! (*pred) (opchar, char_mode))
2751 opchar = copy_to_mode_reg (char_mode, opchar);
2754 if (insn_data[(int) code].n_operands == 4)
2755 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2757 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2758 GEN_INT (expected_align
2760 GEN_INT (expected_size));
2767 delete_insns_since (last);
2775 /* Write to one of the components of the complex value CPLX. Write VAL to
2776 the real part if IMAG_P is false, and the imaginary part if its true. */
2779 write_complex_part (rtx cplx, rtx val, bool imag_p)
2781 enum machine_mode cmode;
2782 enum machine_mode imode;
2785 if (GET_CODE (cplx) == CONCAT)
2787 emit_move_insn (XEXP (cplx, imag_p), val);
2791 cmode = GET_MODE (cplx);
2792 imode = GET_MODE_INNER (cmode);
2793 ibitsize = GET_MODE_BITSIZE (imode);
2795 /* For MEMs simplify_gen_subreg may generate an invalid new address
2796 because, e.g., the original address is considered mode-dependent
2797 by the target, which restricts simplify_subreg from invoking
2798 adjust_address_nv. Instead of preparing fallback support for an
2799 invalid address, we call adjust_address_nv directly. */
2802 emit_move_insn (adjust_address_nv (cplx, imode,
2803 imag_p ? GET_MODE_SIZE (imode) : 0),
2808 /* If the sub-object is at least word sized, then we know that subregging
2809 will work. This special case is important, since store_bit_field
2810 wants to operate on integer modes, and there's rarely an OImode to
2811 correspond to TCmode. */
2812 if (ibitsize >= BITS_PER_WORD
2813 /* For hard regs we have exact predicates. Assume we can split
2814 the original object if it spans an even number of hard regs.
2815 This special case is important for SCmode on 64-bit platforms
2816 where the natural size of floating-point regs is 32-bit. */
2818 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2819 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2821 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2822 imag_p ? GET_MODE_SIZE (imode) : 0);
2825 emit_move_insn (part, val);
2829 /* simplify_gen_subreg may fail for sub-word MEMs. */
2830 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2833 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2836 /* Extract one of the components of the complex value CPLX. Extract the
2837 real part if IMAG_P is false, and the imaginary part if it's true. */
2840 read_complex_part (rtx cplx, bool imag_p)
2842 enum machine_mode cmode, imode;
2845 if (GET_CODE (cplx) == CONCAT)
2846 return XEXP (cplx, imag_p);
2848 cmode = GET_MODE (cplx);
2849 imode = GET_MODE_INNER (cmode);
2850 ibitsize = GET_MODE_BITSIZE (imode);
2852 /* Special case reads from complex constants that got spilled to memory. */
2853 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2855 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2856 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2858 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2859 if (CONSTANT_CLASS_P (part))
2860 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2864 /* For MEMs simplify_gen_subreg may generate an invalid new address
2865 because, e.g., the original address is considered mode-dependent
2866 by the target, which restricts simplify_subreg from invoking
2867 adjust_address_nv. Instead of preparing fallback support for an
2868 invalid address, we call adjust_address_nv directly. */
2870 return adjust_address_nv (cplx, imode,
2871 imag_p ? GET_MODE_SIZE (imode) : 0);
2873 /* If the sub-object is at least word sized, then we know that subregging
2874 will work. This special case is important, since extract_bit_field
2875 wants to operate on integer modes, and there's rarely an OImode to
2876 correspond to TCmode. */
2877 if (ibitsize >= BITS_PER_WORD
2878 /* For hard regs we have exact predicates. Assume we can split
2879 the original object if it spans an even number of hard regs.
2880 This special case is important for SCmode on 64-bit platforms
2881 where the natural size of floating-point regs is 32-bit. */
2883 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2884 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2886 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2887 imag_p ? GET_MODE_SIZE (imode) : 0);
2891 /* simplify_gen_subreg may fail for sub-word MEMs. */
2892 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2895 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2896 true, false, NULL_RTX, imode, imode);
2899 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2900 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2901 represented in NEW_MODE. If FORCE is true, this will never happen, as
2902 we'll force-create a SUBREG if needed. */
2905 emit_move_change_mode (enum machine_mode new_mode,
2906 enum machine_mode old_mode, rtx x, bool force)
2910 if (push_operand (x, GET_MODE (x)))
2912 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2913 MEM_COPY_ATTRIBUTES (ret, x);
2917 /* We don't have to worry about changing the address since the
2918 size in bytes is supposed to be the same. */
2919 if (reload_in_progress)
2921 /* Copy the MEM to change the mode and move any
2922 substitutions from the old MEM to the new one. */
2923 ret = adjust_address_nv (x, new_mode, 0);
2924 copy_replacements (x, ret);
2927 ret = adjust_address (x, new_mode, 0);
2931 /* Note that we do want simplify_subreg's behavior of validating
2932 that the new mode is ok for a hard register. If we were to use
2933 simplify_gen_subreg, we would create the subreg, but would
2934 probably run into the target not being able to implement it. */
2935 /* Except, of course, when FORCE is true, when this is exactly what
2936 we want. Which is needed for CCmodes on some targets. */
2938 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2940 ret = simplify_subreg (new_mode, x, old_mode, 0);
2946 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2947 an integer mode of the same size as MODE. Returns the instruction
2948 emitted, or NULL if such a move could not be generated. */
2951 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2953 enum machine_mode imode;
2954 enum insn_code code;
2956 /* There must exist a mode of the exact size we require. */
2957 imode = int_mode_for_mode (mode);
2958 if (imode == BLKmode)
2961 /* The target must support moves in this mode. */
2962 code = optab_handler (mov_optab, imode);
2963 if (code == CODE_FOR_nothing)
2966 x = emit_move_change_mode (imode, mode, x, force);
2969 y = emit_move_change_mode (imode, mode, y, force);
2972 return emit_insn (GEN_FCN (code) (x, y));
2975 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2976 Return an equivalent MEM that does not use an auto-increment. */
2979 emit_move_resolve_push (enum machine_mode mode, rtx x)
2981 enum rtx_code code = GET_CODE (XEXP (x, 0));
2982 HOST_WIDE_INT adjust;
2985 adjust = GET_MODE_SIZE (mode);
2986 #ifdef PUSH_ROUNDING
2987 adjust = PUSH_ROUNDING (adjust);
2989 if (code == PRE_DEC || code == POST_DEC)
2991 else if (code == PRE_MODIFY || code == POST_MODIFY)
2993 rtx expr = XEXP (XEXP (x, 0), 1);
2996 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2997 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
2998 val = INTVAL (XEXP (expr, 1));
2999 if (GET_CODE (expr) == MINUS)
3001 gcc_assert (adjust == val || adjust == -val);
3005 /* Do not use anti_adjust_stack, since we don't want to update
3006 stack_pointer_delta. */
3007 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3008 GEN_INT (adjust), stack_pointer_rtx,
3009 0, OPTAB_LIB_WIDEN);
3010 if (temp != stack_pointer_rtx)
3011 emit_move_insn (stack_pointer_rtx, temp);
3018 temp = stack_pointer_rtx;
3023 temp = plus_constant (stack_pointer_rtx, -adjust);
3029 return replace_equiv_address (x, temp);
3032 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3033 X is known to satisfy push_operand, and MODE is known to be complex.
3034 Returns the last instruction emitted. */
3037 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3039 enum machine_mode submode = GET_MODE_INNER (mode);
3042 #ifdef PUSH_ROUNDING
3043 unsigned int submodesize = GET_MODE_SIZE (submode);
3045 /* In case we output to the stack, but the size is smaller than the
3046 machine can push exactly, we need to use move instructions. */
3047 if (PUSH_ROUNDING (submodesize) != submodesize)
3049 x = emit_move_resolve_push (mode, x);
3050 return emit_move_insn (x, y);
3054 /* Note that the real part always precedes the imag part in memory
3055 regardless of machine's endianness. */
3056 switch (GET_CODE (XEXP (x, 0)))
3070 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3071 read_complex_part (y, imag_first));
3072 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3073 read_complex_part (y, !imag_first));
3076 /* A subroutine of emit_move_complex. Perform the move from Y to X
3077 via two moves of the parts. Returns the last instruction emitted. */
3080 emit_move_complex_parts (rtx x, rtx y)
3082 /* Show the output dies here. This is necessary for SUBREGs
3083 of pseudos since we cannot track their lifetimes correctly;
3084 hard regs shouldn't appear here except as return values. */
3085 if (!reload_completed && !reload_in_progress
3086 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3089 write_complex_part (x, read_complex_part (y, false), false);
3090 write_complex_part (x, read_complex_part (y, true), true);
3092 return get_last_insn ();
3095 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3096 MODE is known to be complex. Returns the last instruction emitted. */
3099 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3103 /* Need to take special care for pushes, to maintain proper ordering
3104 of the data, and possibly extra padding. */
3105 if (push_operand (x, mode))
3106 return emit_move_complex_push (mode, x, y);
3108 /* See if we can coerce the target into moving both values at once. */
3110 /* Move floating point as parts. */
3111 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3112 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3114 /* Not possible if the values are inherently not adjacent. */
3115 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3117 /* Is possible if both are registers (or subregs of registers). */
3118 else if (register_operand (x, mode) && register_operand (y, mode))
3120 /* If one of the operands is a memory, and alignment constraints
3121 are friendly enough, we may be able to do combined memory operations.
3122 We do not attempt this if Y is a constant because that combination is
3123 usually better with the by-parts thing below. */
3124 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3125 && (!STRICT_ALIGNMENT
3126 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3135 /* For memory to memory moves, optimal behavior can be had with the
3136 existing block move logic. */
3137 if (MEM_P (x) && MEM_P (y))
3139 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3140 BLOCK_OP_NO_LIBCALL);
3141 return get_last_insn ();
3144 ret = emit_move_via_integer (mode, x, y, true);
3149 return emit_move_complex_parts (x, y);
3152 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3153 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3156 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3160 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3163 enum insn_code code = optab_handler (mov_optab, CCmode);
3164 if (code != CODE_FOR_nothing)
3166 x = emit_move_change_mode (CCmode, mode, x, true);
3167 y = emit_move_change_mode (CCmode, mode, y, true);
3168 return emit_insn (GEN_FCN (code) (x, y));
3172 /* Otherwise, find the MODE_INT mode of the same width. */
3173 ret = emit_move_via_integer (mode, x, y, false);
3174 gcc_assert (ret != NULL);
3178 /* Return true if word I of OP lies entirely in the
3179 undefined bits of a paradoxical subreg. */
3182 undefined_operand_subword_p (const_rtx op, int i)
3184 enum machine_mode innermode, innermostmode;
3186 if (GET_CODE (op) != SUBREG)
3188 innermode = GET_MODE (op);
3189 innermostmode = GET_MODE (SUBREG_REG (op));
3190 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3191 /* The SUBREG_BYTE represents offset, as if the value were stored in
3192 memory, except for a paradoxical subreg where we define
3193 SUBREG_BYTE to be 0; undo this exception as in
3195 if (SUBREG_BYTE (op) == 0
3196 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3198 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3199 if (WORDS_BIG_ENDIAN)
3200 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3201 if (BYTES_BIG_ENDIAN)
3202 offset += difference % UNITS_PER_WORD;
3204 if (offset >= GET_MODE_SIZE (innermostmode)
3205 || offset <= -GET_MODE_SIZE (word_mode))
3210 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3211 MODE is any multi-word or full-word mode that lacks a move_insn
3212 pattern. Note that you will get better code if you define such
3213 patterns, even if they must turn into multiple assembler instructions. */
3216 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3223 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3225 /* If X is a push on the stack, do the push now and replace
3226 X with a reference to the stack pointer. */
3227 if (push_operand (x, mode))
3228 x = emit_move_resolve_push (mode, x);
3230 /* If we are in reload, see if either operand is a MEM whose address
3231 is scheduled for replacement. */
3232 if (reload_in_progress && MEM_P (x)
3233 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3234 x = replace_equiv_address_nv (x, inner);
3235 if (reload_in_progress && MEM_P (y)
3236 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3237 y = replace_equiv_address_nv (y, inner);
3241 need_clobber = false;
3243 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3246 rtx xpart = operand_subword (x, i, 1, mode);
3249 /* Do not generate code for a move if it would come entirely
3250 from the undefined bits of a paradoxical subreg. */
3251 if (undefined_operand_subword_p (y, i))
3254 ypart = operand_subword (y, i, 1, mode);
3256 /* If we can't get a part of Y, put Y into memory if it is a
3257 constant. Otherwise, force it into a register. Then we must
3258 be able to get a part of Y. */
3259 if (ypart == 0 && CONSTANT_P (y))
3261 y = use_anchored_address (force_const_mem (mode, y));
3262 ypart = operand_subword (y, i, 1, mode);
3264 else if (ypart == 0)
3265 ypart = operand_subword_force (y, i, mode);
3267 gcc_assert (xpart && ypart);
3269 need_clobber |= (GET_CODE (xpart) == SUBREG);
3271 last_insn = emit_move_insn (xpart, ypart);
3277 /* Show the output dies here. This is necessary for SUBREGs
3278 of pseudos since we cannot track their lifetimes correctly;
3279 hard regs shouldn't appear here except as return values.
3280 We never want to emit such a clobber after reload. */
3282 && ! (reload_in_progress || reload_completed)
3283 && need_clobber != 0)
3291 /* Low level part of emit_move_insn.
3292 Called just like emit_move_insn, but assumes X and Y
3293 are basically valid. */
3296 emit_move_insn_1 (rtx x, rtx y)
3298 enum machine_mode mode = GET_MODE (x);
3299 enum insn_code code;
3301 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3303 code = optab_handler (mov_optab, mode);
3304 if (code != CODE_FOR_nothing)
3305 return emit_insn (GEN_FCN (code) (x, y));
3307 /* Expand complex moves by moving real part and imag part. */
3308 if (COMPLEX_MODE_P (mode))
3309 return emit_move_complex (mode, x, y);
3311 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3312 || ALL_FIXED_POINT_MODE_P (mode))
3314 rtx result = emit_move_via_integer (mode, x, y, true);
3316 /* If we can't find an integer mode, use multi words. */
3320 return emit_move_multi_word (mode, x, y);
3323 if (GET_MODE_CLASS (mode) == MODE_CC)
3324 return emit_move_ccmode (mode, x, y);
3326 /* Try using a move pattern for the corresponding integer mode. This is
3327 only safe when simplify_subreg can convert MODE constants into integer
3328 constants. At present, it can only do this reliably if the value
3329 fits within a HOST_WIDE_INT. */
3330 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3332 rtx ret = emit_move_via_integer (mode, x, y, false);
3337 return emit_move_multi_word (mode, x, y);
3340 /* Generate code to copy Y into X.
3341 Both Y and X must have the same mode, except that
3342 Y can be a constant with VOIDmode.
3343 This mode cannot be BLKmode; use emit_block_move for that.
3345 Return the last instruction emitted. */
3348 emit_move_insn (rtx x, rtx y)
3350 enum machine_mode mode = GET_MODE (x);
3351 rtx y_cst = NULL_RTX;
3354 gcc_assert (mode != BLKmode
3355 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3360 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3361 && (last_insn = compress_float_constant (x, y)))
3366 if (!LEGITIMATE_CONSTANT_P (y))
3368 y = force_const_mem (mode, y);
3370 /* If the target's cannot_force_const_mem prevented the spill,
3371 assume that the target's move expanders will also take care
3372 of the non-legitimate constant. */
3376 y = use_anchored_address (y);
3380 /* If X or Y are memory references, verify that their addresses are valid
3383 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3385 && ! push_operand (x, GET_MODE (x))))
3386 x = validize_mem (x);
3389 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3390 MEM_ADDR_SPACE (y)))
3391 y = validize_mem (y);
3393 gcc_assert (mode != BLKmode);
3395 last_insn = emit_move_insn_1 (x, y);
3397 if (y_cst && REG_P (x)
3398 && (set = single_set (last_insn)) != NULL_RTX
3399 && SET_DEST (set) == x
3400 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3401 set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3406 /* If Y is representable exactly in a narrower mode, and the target can
3407 perform the extension directly from constant or memory, then emit the
3408 move as an extension. */
3411 compress_float_constant (rtx x, rtx y)
3413 enum machine_mode dstmode = GET_MODE (x);
3414 enum machine_mode orig_srcmode = GET_MODE (y);
3415 enum machine_mode srcmode;
3417 int oldcost, newcost;
3418 bool speed = optimize_insn_for_speed_p ();
3420 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3422 if (LEGITIMATE_CONSTANT_P (y))
3423 oldcost = rtx_cost (y, SET, speed);
3425 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3427 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3428 srcmode != orig_srcmode;
3429 srcmode = GET_MODE_WIDER_MODE (srcmode))
3432 rtx trunc_y, last_insn;
3434 /* Skip if the target can't extend this way. */
3435 ic = can_extend_p (dstmode, srcmode, 0);
3436 if (ic == CODE_FOR_nothing)
3439 /* Skip if the narrowed value isn't exact. */
3440 if (! exact_real_truncate (srcmode, &r))
3443 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3445 if (LEGITIMATE_CONSTANT_P (trunc_y))
3447 /* Skip if the target needs extra instructions to perform
3449 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3451 /* This is valid, but may not be cheaper than the original. */
3452 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3453 if (oldcost < newcost)
3456 else if (float_extend_from_mem[dstmode][srcmode])
3458 trunc_y = force_const_mem (srcmode, trunc_y);
3459 /* This is valid, but may not be cheaper than the original. */
3460 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3461 if (oldcost < newcost)
3463 trunc_y = validize_mem (trunc_y);
3468 /* For CSE's benefit, force the compressed constant pool entry
3469 into a new pseudo. This constant may be used in different modes,
3470 and if not, combine will put things back together for us. */
3471 trunc_y = force_reg (srcmode, trunc_y);
3472 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3473 last_insn = get_last_insn ();
3476 set_unique_reg_note (last_insn, REG_EQUAL, y);
3484 /* Pushing data onto the stack. */
3486 /* Push a block of length SIZE (perhaps variable)
3487 and return an rtx to address the beginning of the block.
3488 The value may be virtual_outgoing_args_rtx.
3490 EXTRA is the number of bytes of padding to push in addition to SIZE.
3491 BELOW nonzero means this padding comes at low addresses;
3492 otherwise, the padding comes at high addresses. */
3495 push_block (rtx size, int extra, int below)
3499 size = convert_modes (Pmode, ptr_mode, size, 1);
3500 if (CONSTANT_P (size))
3501 anti_adjust_stack (plus_constant (size, extra));
3502 else if (REG_P (size) && extra == 0)
3503 anti_adjust_stack (size);
3506 temp = copy_to_mode_reg (Pmode, size);
3508 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3509 temp, 0, OPTAB_LIB_WIDEN);
3510 anti_adjust_stack (temp);
3513 #ifndef STACK_GROWS_DOWNWARD
3519 temp = virtual_outgoing_args_rtx;
3520 if (extra != 0 && below)
3521 temp = plus_constant (temp, extra);
3525 if (CONST_INT_P (size))
3526 temp = plus_constant (virtual_outgoing_args_rtx,
3527 -INTVAL (size) - (below ? 0 : extra));
3528 else if (extra != 0 && !below)
3529 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3530 negate_rtx (Pmode, plus_constant (size, extra)));
3532 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3533 negate_rtx (Pmode, size));
3536 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3539 #ifdef PUSH_ROUNDING
3541 /* Emit single push insn. */
3544 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3547 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3549 enum insn_code icode;
3550 insn_operand_predicate_fn pred;
3552 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3553 /* If there is push pattern, use it. Otherwise try old way of throwing
3554 MEM representing push operation to move expander. */
3555 icode = optab_handler (push_optab, mode);
3556 if (icode != CODE_FOR_nothing)
3558 if (((pred = insn_data[(int) icode].operand[0].predicate)
3559 && !((*pred) (x, mode))))
3560 x = force_reg (mode, x);
3561 emit_insn (GEN_FCN (icode) (x));
3564 if (GET_MODE_SIZE (mode) == rounded_size)
3565 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3566 /* If we are to pad downward, adjust the stack pointer first and
3567 then store X into the stack location using an offset. This is
3568 because emit_move_insn does not know how to pad; it does not have
3570 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3572 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3573 HOST_WIDE_INT offset;
3575 emit_move_insn (stack_pointer_rtx,
3576 expand_binop (Pmode,
3577 #ifdef STACK_GROWS_DOWNWARD
3583 GEN_INT (rounded_size),
3584 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3586 offset = (HOST_WIDE_INT) padding_size;
3587 #ifdef STACK_GROWS_DOWNWARD
3588 if (STACK_PUSH_CODE == POST_DEC)
3589 /* We have already decremented the stack pointer, so get the
3591 offset += (HOST_WIDE_INT) rounded_size;
3593 if (STACK_PUSH_CODE == POST_INC)
3594 /* We have already incremented the stack pointer, so get the
3596 offset -= (HOST_WIDE_INT) rounded_size;
3598 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3602 #ifdef STACK_GROWS_DOWNWARD
3603 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3604 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3605 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3607 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3608 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3609 GEN_INT (rounded_size));
3611 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3614 dest = gen_rtx_MEM (mode, dest_addr);
3618 set_mem_attributes (dest, type, 1);
3620 if (flag_optimize_sibling_calls)
3621 /* Function incoming arguments may overlap with sibling call
3622 outgoing arguments and we cannot allow reordering of reads
3623 from function arguments with stores to outgoing arguments
3624 of sibling calls. */
3625 set_mem_alias_set (dest, 0);
3627 emit_move_insn (dest, x);
3631 /* Generate code to push X onto the stack, assuming it has mode MODE and
3633 MODE is redundant except when X is a CONST_INT (since they don't
3635 SIZE is an rtx for the size of data to be copied (in bytes),
3636 needed only if X is BLKmode.
3638 ALIGN (in bits) is maximum alignment we can assume.
3640 If PARTIAL and REG are both nonzero, then copy that many of the first
3641 bytes of X into registers starting with REG, and push the rest of X.
3642 The amount of space pushed is decreased by PARTIAL bytes.
3643 REG must be a hard register in this case.
3644 If REG is zero but PARTIAL is not, take any all others actions for an
3645 argument partially in registers, but do not actually load any
3648 EXTRA is the amount in bytes of extra space to leave next to this arg.
3649 This is ignored if an argument block has already been allocated.
3651 On a machine that lacks real push insns, ARGS_ADDR is the address of
3652 the bottom of the argument block for this call. We use indexing off there
3653 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3654 argument block has not been preallocated.
3656 ARGS_SO_FAR is the size of args previously pushed for this call.
3658 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3659 for arguments passed in registers. If nonzero, it will be the number
3660 of bytes required. */
3663 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3664 unsigned int align, int partial, rtx reg, int extra,
3665 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3669 enum direction stack_direction
3670 #ifdef STACK_GROWS_DOWNWARD
3676 /* Decide where to pad the argument: `downward' for below,
3677 `upward' for above, or `none' for don't pad it.
3678 Default is below for small data on big-endian machines; else above. */
3679 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3681 /* Invert direction if stack is post-decrement.
3683 if (STACK_PUSH_CODE == POST_DEC)
3684 if (where_pad != none)
3685 where_pad = (where_pad == downward ? upward : downward);
3690 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3692 /* Copy a block into the stack, entirely or partially. */
3699 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3700 used = partial - offset;
3702 if (mode != BLKmode)
3704 /* A value is to be stored in an insufficiently aligned
3705 stack slot; copy via a suitably aligned slot if
3707 size = GEN_INT (GET_MODE_SIZE (mode));
3708 if (!MEM_P (xinner))
3710 temp = assign_temp (type, 0, 1, 1);
3711 emit_move_insn (temp, xinner);
3718 /* USED is now the # of bytes we need not copy to the stack
3719 because registers will take care of them. */
3722 xinner = adjust_address (xinner, BLKmode, used);
3724 /* If the partial register-part of the arg counts in its stack size,
3725 skip the part of stack space corresponding to the registers.
3726 Otherwise, start copying to the beginning of the stack space,
3727 by setting SKIP to 0. */
3728 skip = (reg_parm_stack_space == 0) ? 0 : used;
3730 #ifdef PUSH_ROUNDING
3731 /* Do it with several push insns if that doesn't take lots of insns
3732 and if there is no difficulty with push insns that skip bytes
3733 on the stack for alignment purposes. */
3736 && CONST_INT_P (size)
3738 && MEM_ALIGN (xinner) >= align
3739 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3740 /* Here we avoid the case of a structure whose weak alignment
3741 forces many pushes of a small amount of data,
3742 and such small pushes do rounding that causes trouble. */
3743 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3744 || align >= BIGGEST_ALIGNMENT
3745 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3746 == (align / BITS_PER_UNIT)))
3747 && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3749 /* Push padding now if padding above and stack grows down,
3750 or if padding below and stack grows up.
3751 But if space already allocated, this has already been done. */
3752 if (extra && args_addr == 0
3753 && where_pad != none && where_pad != stack_direction)
3754 anti_adjust_stack (GEN_INT (extra));
3756 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3759 #endif /* PUSH_ROUNDING */
3763 /* Otherwise make space on the stack and copy the data
3764 to the address of that space. */
3766 /* Deduct words put into registers from the size we must copy. */
3769 if (CONST_INT_P (size))
3770 size = GEN_INT (INTVAL (size) - used);
3772 size = expand_binop (GET_MODE (size), sub_optab, size,
3773 GEN_INT (used), NULL_RTX, 0,
3777 /* Get the address of the stack space.
3778 In this case, we do not deal with EXTRA separately.
3779 A single stack adjust will do. */
3782 temp = push_block (size, extra, where_pad == downward);
3785 else if (CONST_INT_P (args_so_far))
3786 temp = memory_address (BLKmode,
3787 plus_constant (args_addr,
3788 skip + INTVAL (args_so_far)));
3790 temp = memory_address (BLKmode,
3791 plus_constant (gen_rtx_PLUS (Pmode,
3796 if (!ACCUMULATE_OUTGOING_ARGS)
3798 /* If the source is referenced relative to the stack pointer,
3799 copy it to another register to stabilize it. We do not need
3800 to do this if we know that we won't be changing sp. */
3802 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3803 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3804 temp = copy_to_reg (temp);
3807 target = gen_rtx_MEM (BLKmode, temp);
3809 /* We do *not* set_mem_attributes here, because incoming arguments
3810 may overlap with sibling call outgoing arguments and we cannot
3811 allow reordering of reads from function arguments with stores
3812 to outgoing arguments of sibling calls. We do, however, want
3813 to record the alignment of the stack slot. */
3814 /* ALIGN may well be better aligned than TYPE, e.g. due to
3815 PARM_BOUNDARY. Assume the caller isn't lying. */
3816 set_mem_align (target, align);
3818 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3821 else if (partial > 0)
3823 /* Scalar partly in registers. */
3825 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3828 /* # bytes of start of argument
3829 that we must make space for but need not store. */
3830 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3831 int args_offset = INTVAL (args_so_far);
3834 /* Push padding now if padding above and stack grows down,
3835 or if padding below and stack grows up.
3836 But if space already allocated, this has already been done. */
3837 if (extra && args_addr == 0
3838 && where_pad != none && where_pad != stack_direction)
3839 anti_adjust_stack (GEN_INT (extra));
3841 /* If we make space by pushing it, we might as well push
3842 the real data. Otherwise, we can leave OFFSET nonzero
3843 and leave the space uninitialized. */
3847 /* Now NOT_STACK gets the number of words that we don't need to
3848 allocate on the stack. Convert OFFSET to words too. */
3849 not_stack = (partial - offset) / UNITS_PER_WORD;
3850 offset /= UNITS_PER_WORD;
3852 /* If the partial register-part of the arg counts in its stack size,
3853 skip the part of stack space corresponding to the registers.
3854 Otherwise, start copying to the beginning of the stack space,
3855 by setting SKIP to 0. */
3856 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3858 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3859 x = validize_mem (force_const_mem (mode, x));
3861 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3862 SUBREGs of such registers are not allowed. */
3863 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3864 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3865 x = copy_to_reg (x);
3867 /* Loop over all the words allocated on the stack for this arg. */
3868 /* We can do it by words, because any scalar bigger than a word
3869 has a size a multiple of a word. */
3870 #ifndef PUSH_ARGS_REVERSED
3871 for (i = not_stack; i < size; i++)
3873 for (i = size - 1; i >= not_stack; i--)
3875 if (i >= not_stack + offset)
3876 emit_push_insn (operand_subword_force (x, i, mode),
3877 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3879 GEN_INT (args_offset + ((i - not_stack + skip)
3881 reg_parm_stack_space, alignment_pad);
3888 /* Push padding now if padding above and stack grows down,
3889 or if padding below and stack grows up.
3890 But if space already allocated, this has already been done. */
3891 if (extra && args_addr == 0
3892 && where_pad != none && where_pad != stack_direction)
3893 anti_adjust_stack (GEN_INT (extra));
3895 #ifdef PUSH_ROUNDING
3896 if (args_addr == 0 && PUSH_ARGS)
3897 emit_single_push_insn (mode, x, type);
3901 if (CONST_INT_P (args_so_far))
3903 = memory_address (mode,
3904 plus_constant (args_addr,
3905 INTVAL (args_so_far)));
3907 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3909 dest = gen_rtx_MEM (mode, addr);
3911 /* We do *not* set_mem_attributes here, because incoming arguments
3912 may overlap with sibling call outgoing arguments and we cannot
3913 allow reordering of reads from function arguments with stores
3914 to outgoing arguments of sibling calls. We do, however, want
3915 to record the alignment of the stack slot. */
3916 /* ALIGN may well be better aligned than TYPE, e.g. due to
3917 PARM_BOUNDARY. Assume the caller isn't lying. */
3918 set_mem_align (dest, align);
3920 emit_move_insn (dest, x);
3924 /* If part should go in registers, copy that part
3925 into the appropriate registers. Do this now, at the end,
3926 since mem-to-mem copies above may do function calls. */
3927 if (partial > 0 && reg != 0)
3929 /* Handle calls that pass values in multiple non-contiguous locations.
3930 The Irix 6 ABI has examples of this. */
3931 if (GET_CODE (reg) == PARALLEL)
3932 emit_group_load (reg, x, type, -1);
3935 gcc_assert (partial % UNITS_PER_WORD == 0);
3936 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3940 if (extra && args_addr == 0 && where_pad == stack_direction)
3941 anti_adjust_stack (GEN_INT (extra));
3943 if (alignment_pad && args_addr == 0)
3944 anti_adjust_stack (alignment_pad);
3947 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3951 get_subtarget (rtx x)
3955 /* Only registers can be subtargets. */
3957 /* Don't use hard regs to avoid extending their life. */
3958 || REGNO (x) < FIRST_PSEUDO_REGISTER
3962 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3963 FIELD is a bitfield. Returns true if the optimization was successful,
3964 and there's nothing else to do. */
3967 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3968 unsigned HOST_WIDE_INT bitpos,
3969 enum machine_mode mode1, rtx str_rtx,
3972 enum machine_mode str_mode = GET_MODE (str_rtx);
3973 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3978 if (mode1 != VOIDmode
3979 || bitsize >= BITS_PER_WORD
3980 || str_bitsize > BITS_PER_WORD
3981 || TREE_SIDE_EFFECTS (to)
3982 || TREE_THIS_VOLATILE (to))
3986 if (!BINARY_CLASS_P (src)
3987 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3990 op0 = TREE_OPERAND (src, 0);
3991 op1 = TREE_OPERAND (src, 1);
3994 if (!operand_equal_p (to, op0, 0))
3997 if (MEM_P (str_rtx))
3999 unsigned HOST_WIDE_INT offset1;
4001 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4002 str_mode = word_mode;
4003 str_mode = get_best_mode (bitsize, bitpos,
4004 MEM_ALIGN (str_rtx), str_mode, 0);
4005 if (str_mode == VOIDmode)
4007 str_bitsize = GET_MODE_BITSIZE (str_mode);
4010 bitpos %= str_bitsize;
4011 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4012 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4014 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4017 /* If the bit field covers the whole REG/MEM, store_field
4018 will likely generate better code. */
4019 if (bitsize >= str_bitsize)
4022 /* We can't handle fields split across multiple entities. */
4023 if (bitpos + bitsize > str_bitsize)
4026 if (BYTES_BIG_ENDIAN)
4027 bitpos = str_bitsize - bitpos - bitsize;
4029 switch (TREE_CODE (src))
4033 /* For now, just optimize the case of the topmost bitfield
4034 where we don't need to do any masking and also
4035 1 bit bitfields where xor can be used.
4036 We might win by one instruction for the other bitfields
4037 too if insv/extv instructions aren't used, so that
4038 can be added later. */
4039 if (bitpos + bitsize != str_bitsize
4040 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4043 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4044 value = convert_modes (str_mode,
4045 TYPE_MODE (TREE_TYPE (op1)), value,
4046 TYPE_UNSIGNED (TREE_TYPE (op1)));
4048 /* We may be accessing data outside the field, which means
4049 we can alias adjacent data. */
4050 if (MEM_P (str_rtx))
4052 str_rtx = shallow_copy_rtx (str_rtx);
4053 set_mem_alias_set (str_rtx, 0);
4054 set_mem_expr (str_rtx, 0);
4057 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4058 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4060 value = expand_and (str_mode, value, const1_rtx, NULL);
4063 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4064 build_int_cst (NULL_TREE, bitpos),
4066 result = expand_binop (str_mode, binop, str_rtx,
4067 value, str_rtx, 1, OPTAB_WIDEN);
4068 if (result != str_rtx)
4069 emit_move_insn (str_rtx, result);
4074 if (TREE_CODE (op1) != INTEGER_CST)
4076 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4077 value = convert_modes (GET_MODE (str_rtx),
4078 TYPE_MODE (TREE_TYPE (op1)), value,
4079 TYPE_UNSIGNED (TREE_TYPE (op1)));
4081 /* We may be accessing data outside the field, which means
4082 we can alias adjacent data. */
4083 if (MEM_P (str_rtx))
4085 str_rtx = shallow_copy_rtx (str_rtx);
4086 set_mem_alias_set (str_rtx, 0);
4087 set_mem_expr (str_rtx, 0);
4090 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4091 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4093 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4095 value = expand_and (GET_MODE (str_rtx), value, mask,
4098 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4099 build_int_cst (NULL_TREE, bitpos),
4101 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4102 value, str_rtx, 1, OPTAB_WIDEN);
4103 if (result != str_rtx)
4104 emit_move_insn (str_rtx, result);
4115 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4116 is true, try generating a nontemporal store. */
4119 expand_assignment (tree to, tree from, bool nontemporal)
4123 enum machine_mode mode;
4126 /* Don't crash if the lhs of the assignment was erroneous. */
4127 if (TREE_CODE (to) == ERROR_MARK)
4129 result = expand_normal (from);
4133 /* Optimize away no-op moves without side-effects. */
4134 if (operand_equal_p (to, from, 0))
4137 mode = TYPE_MODE (TREE_TYPE (to));
4138 if ((TREE_CODE (to) == MEM_REF
4139 || TREE_CODE (to) == TARGET_MEM_REF)
4141 && ((align = MAX (TYPE_ALIGN (TREE_TYPE (to)),
4142 get_object_alignment (to, BIGGEST_ALIGNMENT)))
4143 < (signed) GET_MODE_ALIGNMENT (mode))
4144 && ((icode = optab_handler (movmisalign_optab, mode))
4145 != CODE_FOR_nothing))
4147 enum machine_mode address_mode, op_mode1;
4148 rtx insn, reg, op0, mem;
4150 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4151 reg = force_not_mem (reg);
4153 if (TREE_CODE (to) == MEM_REF)
4156 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 1))));
4157 tree base = TREE_OPERAND (to, 0);
4158 address_mode = targetm.addr_space.address_mode (as);
4159 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4160 op0 = convert_memory_address_addr_space (address_mode, op0, as);
4161 if (!integer_zerop (TREE_OPERAND (to, 1)))
4164 = immed_double_int_const (mem_ref_offset (to), address_mode);
4165 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
4167 op0 = memory_address_addr_space (mode, op0, as);
4168 mem = gen_rtx_MEM (mode, op0);
4169 set_mem_attributes (mem, to, 0);
4170 set_mem_addr_space (mem, as);
4172 else if (TREE_CODE (to) == TARGET_MEM_REF)
4174 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (to));
4175 struct mem_address addr;
4177 get_address_description (to, &addr);
4178 op0 = addr_for_mem_ref (&addr, as, true);
4179 op0 = memory_address_addr_space (mode, op0, as);
4180 mem = gen_rtx_MEM (mode, op0);
4181 set_mem_attributes (mem, to, 0);
4182 set_mem_addr_space (mem, as);
4186 if (TREE_THIS_VOLATILE (to))
4187 MEM_VOLATILE_P (mem) = 1;
4189 op_mode1 = insn_data[icode].operand[1].mode;
4190 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4191 && op_mode1 != VOIDmode)
4192 reg = copy_to_mode_reg (op_mode1, reg);
4194 insn = GEN_FCN (icode) (mem, reg);
4195 /* The movmisalign<mode> pattern cannot fail, else the assignment would
4196 silently be omitted. */
4197 gcc_assert (insn != NULL_RTX);
4202 /* Assignment of a structure component needs special treatment
4203 if the structure component's rtx is not simply a MEM.
4204 Assignment of an array element at a constant index, and assignment of
4205 an array element in an unaligned packed structure field, has the same
4207 if (handled_component_p (to)
4208 /* ??? We only need to handle MEM_REF here if the access is not
4209 a full access of the base object. */
4210 || (TREE_CODE (to) == MEM_REF
4211 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4212 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4214 enum machine_mode mode1;
4215 HOST_WIDE_INT bitsize, bitpos;
4222 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4223 &unsignedp, &volatilep, true);
4225 /* If we are going to use store_bit_field and extract_bit_field,
4226 make sure to_rtx will be safe for multiple use. */
4228 to_rtx = expand_normal (tem);
4230 /* If the bitfield is volatile, we want to access it in the
4231 field's mode, not the computed mode.
4232 If a MEM has VOIDmode (external with incomplete type),
4233 use BLKmode for it instead. */
4236 if (volatilep && flag_strict_volatile_bitfields > 0)
4237 to_rtx = adjust_address (to_rtx, mode1, 0);
4238 else if (GET_MODE (to_rtx) == VOIDmode)
4239 to_rtx = adjust_address (to_rtx, BLKmode, 0);
4244 enum machine_mode address_mode;
4247 if (!MEM_P (to_rtx))
4249 /* We can get constant negative offsets into arrays with broken
4250 user code. Translate this to a trap instead of ICEing. */
4251 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4252 expand_builtin_trap ();
4253 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4256 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4258 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4259 if (GET_MODE (offset_rtx) != address_mode)
4260 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4262 /* A constant address in TO_RTX can have VOIDmode, we must not try
4263 to call force_reg for that case. Avoid that case. */
4265 && GET_MODE (to_rtx) == BLKmode
4266 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4268 && (bitpos % bitsize) == 0
4269 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4270 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4272 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4276 to_rtx = offset_address (to_rtx, offset_rtx,
4277 highest_pow2_factor_for_target (to,
4281 /* No action is needed if the target is not a memory and the field
4282 lies completely outside that target. This can occur if the source
4283 code contains an out-of-bounds access to a small array. */
4285 && GET_MODE (to_rtx) != BLKmode
4286 && (unsigned HOST_WIDE_INT) bitpos
4287 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4289 expand_normal (from);
4292 /* Handle expand_expr of a complex value returning a CONCAT. */
4293 else if (GET_CODE (to_rtx) == CONCAT)
4295 unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4296 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4298 && bitsize == mode_bitsize)
4299 result = store_expr (from, to_rtx, false, nontemporal);
4300 else if (bitsize == mode_bitsize / 2
4301 && (bitpos == 0 || bitpos == mode_bitsize / 2))
4302 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4304 else if (bitpos + bitsize <= mode_bitsize / 2)
4305 result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4306 mode1, from, TREE_TYPE (tem),
4307 get_alias_set (to), nontemporal);
4308 else if (bitpos >= mode_bitsize / 2)
4309 result = store_field (XEXP (to_rtx, 1), bitsize,
4310 bitpos - mode_bitsize / 2, mode1, from,
4311 TREE_TYPE (tem), get_alias_set (to),
4313 else if (bitpos == 0 && bitsize == mode_bitsize)
4316 result = expand_normal (from);
4317 from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4318 TYPE_MODE (TREE_TYPE (from)), 0);
4319 emit_move_insn (XEXP (to_rtx, 0),
4320 read_complex_part (from_rtx, false));
4321 emit_move_insn (XEXP (to_rtx, 1),
4322 read_complex_part (from_rtx, true));
4326 rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4327 GET_MODE_SIZE (GET_MODE (to_rtx)),
4329 write_complex_part (temp, XEXP (to_rtx, 0), false);
4330 write_complex_part (temp, XEXP (to_rtx, 1), true);
4331 result = store_field (temp, bitsize, bitpos, mode1, from,
4332 TREE_TYPE (tem), get_alias_set (to),
4334 emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4335 emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4342 /* If the field is at offset zero, we could have been given the
4343 DECL_RTX of the parent struct. Don't munge it. */
4344 to_rtx = shallow_copy_rtx (to_rtx);
4346 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4348 /* Deal with volatile and readonly fields. The former is only
4349 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4351 MEM_VOLATILE_P (to_rtx) = 1;
4352 if (component_uses_parent_alias_set (to))
4353 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4356 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4360 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4361 TREE_TYPE (tem), get_alias_set (to),
4366 preserve_temp_slots (result);
4372 /* If the rhs is a function call and its value is not an aggregate,
4373 call the function before we start to compute the lhs.
4374 This is needed for correct code for cases such as
4375 val = setjmp (buf) on machines where reference to val
4376 requires loading up part of an address in a separate insn.
4378 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4379 since it might be a promoted variable where the zero- or sign- extension
4380 needs to be done. Handling this in the normal way is safe because no
4381 computation is done before the call. The same is true for SSA names. */
4382 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4383 && COMPLETE_TYPE_P (TREE_TYPE (from))
4384 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4385 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4386 && REG_P (DECL_RTL (to)))
4387 || TREE_CODE (to) == SSA_NAME))
4392 value = expand_normal (from);
4394 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4396 /* Handle calls that return values in multiple non-contiguous locations.
4397 The Irix 6 ABI has examples of this. */
4398 if (GET_CODE (to_rtx) == PARALLEL)
4399 emit_group_load (to_rtx, value, TREE_TYPE (from),
4400 int_size_in_bytes (TREE_TYPE (from)));
4401 else if (GET_MODE (to_rtx) == BLKmode)
4402 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4405 if (POINTER_TYPE_P (TREE_TYPE (to)))
4406 value = convert_memory_address_addr_space
4407 (GET_MODE (to_rtx), value,
4408 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4410 emit_move_insn (to_rtx, value);
4412 preserve_temp_slots (to_rtx);
4418 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4419 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4422 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4424 /* Don't move directly into a return register. */
4425 if (TREE_CODE (to) == RESULT_DECL
4426 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4431 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4433 if (GET_CODE (to_rtx) == PARALLEL)
4434 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4435 int_size_in_bytes (TREE_TYPE (from)));
4437 emit_move_insn (to_rtx, temp);
4439 preserve_temp_slots (to_rtx);
4445 /* In case we are returning the contents of an object which overlaps
4446 the place the value is being stored, use a safe function when copying
4447 a value through a pointer into a structure value return block. */
4448 if (TREE_CODE (to) == RESULT_DECL
4449 && TREE_CODE (from) == INDIRECT_REF
4450 && ADDR_SPACE_GENERIC_P
4451 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4452 && refs_may_alias_p (to, from)
4453 && cfun->returns_struct
4454 && !cfun->returns_pcc_struct)
4459 size = expr_size (from);
4460 from_rtx = expand_normal (from);
4462 emit_library_call (memmove_libfunc, LCT_NORMAL,
4463 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4464 XEXP (from_rtx, 0), Pmode,
4465 convert_to_mode (TYPE_MODE (sizetype),
4466 size, TYPE_UNSIGNED (sizetype)),
4467 TYPE_MODE (sizetype));
4469 preserve_temp_slots (to_rtx);
4475 /* Compute FROM and store the value in the rtx we got. */
4478 result = store_expr (from, to_rtx, 0, nontemporal);
4479 preserve_temp_slots (result);
4485 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4486 succeeded, false otherwise. */
4489 emit_storent_insn (rtx to, rtx from)
4491 enum machine_mode mode = GET_MODE (to), imode;
4492 enum insn_code code = optab_handler (storent_optab, mode);
4495 if (code == CODE_FOR_nothing)
4498 imode = insn_data[code].operand[0].mode;
4499 if (!insn_data[code].operand[0].predicate (to, imode))
4502 imode = insn_data[code].operand[1].mode;
4503 if (!insn_data[code].operand[1].predicate (from, imode))
4505 from = copy_to_mode_reg (imode, from);
4506 if (!insn_data[code].operand[1].predicate (from, imode))
4510 pattern = GEN_FCN (code) (to, from);
4511 if (pattern == NULL_RTX)
4514 emit_insn (pattern);
4518 /* Generate code for computing expression EXP,
4519 and storing the value into TARGET.
4521 If the mode is BLKmode then we may return TARGET itself.
4522 It turns out that in BLKmode it doesn't cause a problem.
4523 because C has no operators that could combine two different
4524 assignments into the same BLKmode object with different values
4525 with no sequence point. Will other languages need this to
4528 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4529 stack, and block moves may need to be treated specially.
4531 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4534 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4537 rtx alt_rtl = NULL_RTX;
4538 location_t loc = EXPR_LOCATION (exp);
4540 if (VOID_TYPE_P (TREE_TYPE (exp)))
4542 /* C++ can generate ?: expressions with a throw expression in one
4543 branch and an rvalue in the other. Here, we resolve attempts to
4544 store the throw expression's nonexistent result. */
4545 gcc_assert (!call_param_p);
4546 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4549 if (TREE_CODE (exp) == COMPOUND_EXPR)
4551 /* Perform first part of compound expression, then assign from second
4553 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4554 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4555 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4558 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4560 /* For conditional expression, get safe form of the target. Then
4561 test the condition, doing the appropriate assignment on either
4562 side. This avoids the creation of unnecessary temporaries.
4563 For non-BLKmode, it is more efficient not to do this. */
4565 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4567 do_pending_stack_adjust ();
4569 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4570 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4572 emit_jump_insn (gen_jump (lab2));
4575 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4582 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4583 /* If this is a scalar in a register that is stored in a wider mode
4584 than the declared mode, compute the result into its declared mode
4585 and then convert to the wider mode. Our value is the computed
4588 rtx inner_target = 0;
4590 /* We can do the conversion inside EXP, which will often result
4591 in some optimizations. Do the conversion in two steps: first
4592 change the signedness, if needed, then the extend. But don't
4593 do this if the type of EXP is a subtype of something else
4594 since then the conversion might involve more than just
4595 converting modes. */
4596 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4597 && TREE_TYPE (TREE_TYPE (exp)) == 0
4598 && GET_MODE_PRECISION (GET_MODE (target))
4599 == TYPE_PRECISION (TREE_TYPE (exp)))
4601 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4602 != SUBREG_PROMOTED_UNSIGNED_P (target))
4604 /* Some types, e.g. Fortran's logical*4, won't have a signed
4605 version, so use the mode instead. */
4607 = (signed_or_unsigned_type_for
4608 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4610 ntype = lang_hooks.types.type_for_mode
4611 (TYPE_MODE (TREE_TYPE (exp)),
4612 SUBREG_PROMOTED_UNSIGNED_P (target));
4614 exp = fold_convert_loc (loc, ntype, exp);
4617 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4618 (GET_MODE (SUBREG_REG (target)),
4619 SUBREG_PROMOTED_UNSIGNED_P (target)),
4622 inner_target = SUBREG_REG (target);
4625 temp = expand_expr (exp, inner_target, VOIDmode,
4626 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4628 /* If TEMP is a VOIDmode constant, use convert_modes to make
4629 sure that we properly convert it. */
4630 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4632 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4633 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4634 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4635 GET_MODE (target), temp,
4636 SUBREG_PROMOTED_UNSIGNED_P (target));
4639 convert_move (SUBREG_REG (target), temp,
4640 SUBREG_PROMOTED_UNSIGNED_P (target));
4644 else if ((TREE_CODE (exp) == STRING_CST
4645 || (TREE_CODE (exp) == MEM_REF
4646 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4647 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4649 && integer_zerop (TREE_OPERAND (exp, 1))))
4650 && !nontemporal && !call_param_p
4653 /* Optimize initialization of an array with a STRING_CST. */
4654 HOST_WIDE_INT exp_len, str_copy_len;
4656 tree str = TREE_CODE (exp) == STRING_CST
4657 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4659 exp_len = int_expr_size (exp);
4663 if (TREE_STRING_LENGTH (str) <= 0)
4666 str_copy_len = strlen (TREE_STRING_POINTER (str));
4667 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4670 str_copy_len = TREE_STRING_LENGTH (str);
4671 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
4672 && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
4674 str_copy_len += STORE_MAX_PIECES - 1;
4675 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4677 str_copy_len = MIN (str_copy_len, exp_len);
4678 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4679 CONST_CAST (char *, TREE_STRING_POINTER (str)),
4680 MEM_ALIGN (target), false))
4685 dest_mem = store_by_pieces (dest_mem,
4686 str_copy_len, builtin_strncpy_read_str,
4688 TREE_STRING_POINTER (str)),
4689 MEM_ALIGN (target), false,
4690 exp_len > str_copy_len ? 1 : 0);
4691 if (exp_len > str_copy_len)
4692 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4693 GEN_INT (exp_len - str_copy_len),
4702 /* If we want to use a nontemporal store, force the value to
4704 tmp_target = nontemporal ? NULL_RTX : target;
4705 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4707 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4711 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4712 the same as that of TARGET, adjust the constant. This is needed, for
4713 example, in case it is a CONST_DOUBLE and we want only a word-sized
4715 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4716 && TREE_CODE (exp) != ERROR_MARK
4717 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4718 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4719 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4721 /* If value was not generated in the target, store it there.
4722 Convert the value to TARGET's type first if necessary and emit the
4723 pending incrementations that have been queued when expanding EXP.
4724 Note that we cannot emit the whole queue blindly because this will
4725 effectively disable the POST_INC optimization later.
4727 If TEMP and TARGET compare equal according to rtx_equal_p, but
4728 one or both of them are volatile memory refs, we have to distinguish
4730 - expand_expr has used TARGET. In this case, we must not generate
4731 another copy. This can be detected by TARGET being equal according
4733 - expand_expr has not used TARGET - that means that the source just
4734 happens to have the same RTX form. Since temp will have been created
4735 by expand_expr, it will compare unequal according to == .
4736 We must generate a copy in this case, to reach the correct number
4737 of volatile memory references. */
4739 if ((! rtx_equal_p (temp, target)
4740 || (temp != target && (side_effects_p (temp)
4741 || side_effects_p (target))))
4742 && TREE_CODE (exp) != ERROR_MARK
4743 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4744 but TARGET is not valid memory reference, TEMP will differ
4745 from TARGET although it is really the same location. */
4747 && rtx_equal_p (alt_rtl, target)
4748 && !side_effects_p (alt_rtl)
4749 && !side_effects_p (target))
4750 /* If there's nothing to copy, don't bother. Don't call
4751 expr_size unless necessary, because some front-ends (C++)
4752 expr_size-hook must not be given objects that are not
4753 supposed to be bit-copied or bit-initialized. */
4754 && expr_size (exp) != const0_rtx)
4756 if (GET_MODE (temp) != GET_MODE (target)
4757 && GET_MODE (temp) != VOIDmode)
4759 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4760 if (GET_MODE (target) == BLKmode
4761 && GET_MODE (temp) == BLKmode)
4762 emit_block_move (target, temp, expr_size (exp),
4764 ? BLOCK_OP_CALL_PARM
4765 : BLOCK_OP_NORMAL));
4766 else if (GET_MODE (target) == BLKmode)
4767 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4768 0, GET_MODE (temp), temp);
4770 convert_move (target, temp, unsignedp);
4773 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4775 /* Handle copying a string constant into an array. The string
4776 constant may be shorter than the array. So copy just the string's
4777 actual length, and clear the rest. First get the size of the data
4778 type of the string, which is actually the size of the target. */
4779 rtx size = expr_size (exp);
4781 if (CONST_INT_P (size)
4782 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4783 emit_block_move (target, temp, size,
4785 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4788 enum machine_mode pointer_mode
4789 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4790 enum machine_mode address_mode
4791 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4793 /* Compute the size of the data to copy from the string. */
4795 = size_binop_loc (loc, MIN_EXPR,
4796 make_tree (sizetype, size),
4797 size_int (TREE_STRING_LENGTH (exp)));
4799 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4801 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4804 /* Copy that much. */
4805 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4806 TYPE_UNSIGNED (sizetype));
4807 emit_block_move (target, temp, copy_size_rtx,
4809 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4811 /* Figure out how much is left in TARGET that we have to clear.
4812 Do all calculations in pointer_mode. */
4813 if (CONST_INT_P (copy_size_rtx))
4815 size = plus_constant (size, -INTVAL (copy_size_rtx));
4816 target = adjust_address (target, BLKmode,
4817 INTVAL (copy_size_rtx));
4821 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4822 copy_size_rtx, NULL_RTX, 0,
4825 if (GET_MODE (copy_size_rtx) != address_mode)
4826 copy_size_rtx = convert_to_mode (address_mode,
4828 TYPE_UNSIGNED (sizetype));
4830 target = offset_address (target, copy_size_rtx,
4831 highest_pow2_factor (copy_size));
4832 label = gen_label_rtx ();
4833 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4834 GET_MODE (size), 0, label);
4837 if (size != const0_rtx)
4838 clear_storage (target, size, BLOCK_OP_NORMAL);
4844 /* Handle calls that return values in multiple non-contiguous locations.
4845 The Irix 6 ABI has examples of this. */
4846 else if (GET_CODE (target) == PARALLEL)
4847 emit_group_load (target, temp, TREE_TYPE (exp),
4848 int_size_in_bytes (TREE_TYPE (exp)));
4849 else if (GET_MODE (temp) == BLKmode)
4850 emit_block_move (target, temp, expr_size (exp),
4852 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4853 else if (nontemporal
4854 && emit_storent_insn (target, temp))
4855 /* If we managed to emit a nontemporal store, there is nothing else to
4860 temp = force_operand (temp, target);
4862 emit_move_insn (target, temp);
4869 /* Helper for categorize_ctor_elements. Identical interface. */
4872 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4873 HOST_WIDE_INT *p_elt_count,
4876 unsigned HOST_WIDE_INT idx;
4877 HOST_WIDE_INT nz_elts, elt_count;
4878 tree value, purpose;
4880 /* Whether CTOR is a valid constant initializer, in accordance with what
4881 initializer_constant_valid_p does. If inferred from the constructor
4882 elements, true until proven otherwise. */
4883 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4884 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4891 HOST_WIDE_INT mult = 1;
4893 if (TREE_CODE (purpose) == RANGE_EXPR)
4895 tree lo_index = TREE_OPERAND (purpose, 0);
4896 tree hi_index = TREE_OPERAND (purpose, 1);
4898 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4899 mult = (tree_low_cst (hi_index, 1)
4900 - tree_low_cst (lo_index, 1) + 1);
4903 switch (TREE_CODE (value))
4907 HOST_WIDE_INT nz = 0, ic = 0;
4910 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4912 nz_elts += mult * nz;
4913 elt_count += mult * ic;
4915 if (const_from_elts_p && const_p)
4916 const_p = const_elt_p;
4923 if (!initializer_zerop (value))
4929 nz_elts += mult * TREE_STRING_LENGTH (value);
4930 elt_count += mult * TREE_STRING_LENGTH (value);
4934 if (!initializer_zerop (TREE_REALPART (value)))
4936 if (!initializer_zerop (TREE_IMAGPART (value)))
4944 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4946 if (!initializer_zerop (TREE_VALUE (v)))
4955 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4958 nz_elts += mult * tc;
4959 elt_count += mult * tc;
4961 if (const_from_elts_p && const_p)
4962 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4970 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4971 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4974 bool clear_this = true;
4976 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4978 /* We don't expect more than one element of the union to be
4979 initialized. Not sure what we should do otherwise... */
4980 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4983 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4984 CONSTRUCTOR_ELTS (ctor),
4987 /* ??? We could look at each element of the union, and find the
4988 largest element. Which would avoid comparing the size of the
4989 initialized element against any tail padding in the union.
4990 Doesn't seem worth the effort... */
4991 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4992 TYPE_SIZE (init_sub_type)) == 1)
4994 /* And now we have to find out if the element itself is fully
4995 constructed. E.g. for union { struct { int a, b; } s; } u
4996 = { .s = { .a = 1 } }. */
4997 if (elt_count == count_type_elements (init_sub_type, false))
5002 *p_must_clear = clear_this;
5005 *p_nz_elts += nz_elts;
5006 *p_elt_count += elt_count;
5011 /* Examine CTOR to discover:
5012 * how many scalar fields are set to nonzero values,
5013 and place it in *P_NZ_ELTS;
5014 * how many scalar fields in total are in CTOR,
5015 and place it in *P_ELT_COUNT.
5016 * if a type is a union, and the initializer from the constructor
5017 is not the largest element in the union, then set *p_must_clear.
5019 Return whether or not CTOR is a valid static constant initializer, the same
5020 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5023 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5024 HOST_WIDE_INT *p_elt_count,
5029 *p_must_clear = false;
5032 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5035 /* Count the number of scalars in TYPE. Return -1 on overflow or
5036 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5037 array member at the end of the structure. */
5040 count_type_elements (const_tree type, bool allow_flexarr)
5042 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5043 switch (TREE_CODE (type))
5047 tree telts = array_type_nelts (type);
5048 if (telts && host_integerp (telts, 1))
5050 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5051 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5054 else if (max / n > m)
5062 HOST_WIDE_INT n = 0, t;
5065 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5066 if (TREE_CODE (f) == FIELD_DECL)
5068 t = count_type_elements (TREE_TYPE (f), false);
5071 /* Check for structures with flexible array member. */
5072 tree tf = TREE_TYPE (f);
5074 && DECL_CHAIN (f) == NULL
5075 && TREE_CODE (tf) == ARRAY_TYPE
5077 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5078 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5079 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5080 && int_size_in_bytes (type) >= 0)
5092 case QUAL_UNION_TYPE:
5099 return TYPE_VECTOR_SUBPARTS (type);
5103 case FIXED_POINT_TYPE:
5108 case REFERENCE_TYPE:
5123 /* Return 1 if EXP contains mostly (3/4) zeros. */
5126 mostly_zeros_p (const_tree exp)
5128 if (TREE_CODE (exp) == CONSTRUCTOR)
5131 HOST_WIDE_INT nz_elts, count, elts;
5134 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5138 elts = count_type_elements (TREE_TYPE (exp), false);
5140 return nz_elts < elts / 4;
5143 return initializer_zerop (exp);
5146 /* Return 1 if EXP contains all zeros. */
5149 all_zeros_p (const_tree exp)
5151 if (TREE_CODE (exp) == CONSTRUCTOR)
5154 HOST_WIDE_INT nz_elts, count;
5157 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5158 return nz_elts == 0;
5161 return initializer_zerop (exp);
5164 /* Helper function for store_constructor.
5165 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5166 TYPE is the type of the CONSTRUCTOR, not the element type.
5167 CLEARED is as for store_constructor.
5168 ALIAS_SET is the alias set to use for any stores.
5170 This provides a recursive shortcut back to store_constructor when it isn't
5171 necessary to go through store_field. This is so that we can pass through
5172 the cleared field to let store_constructor know that we may not have to
5173 clear a substructure if the outer structure has already been cleared. */
5176 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5177 HOST_WIDE_INT bitpos, enum machine_mode mode,
5178 tree exp, tree type, int cleared,
5179 alias_set_type alias_set)
5181 if (TREE_CODE (exp) == CONSTRUCTOR
5182 /* We can only call store_constructor recursively if the size and
5183 bit position are on a byte boundary. */
5184 && bitpos % BITS_PER_UNIT == 0
5185 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5186 /* If we have a nonzero bitpos for a register target, then we just
5187 let store_field do the bitfield handling. This is unlikely to
5188 generate unnecessary clear instructions anyways. */
5189 && (bitpos == 0 || MEM_P (target)))
5193 = adjust_address (target,
5194 GET_MODE (target) == BLKmode
5196 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5197 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5200 /* Update the alias set, if required. */
5201 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5202 && MEM_ALIAS_SET (target) != 0)
5204 target = copy_rtx (target);
5205 set_mem_alias_set (target, alias_set);
5208 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5211 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5214 /* Store the value of constructor EXP into the rtx TARGET.
5215 TARGET is either a REG or a MEM; we know it cannot conflict, since
5216 safe_from_p has been called.
5217 CLEARED is true if TARGET is known to have been zero'd.
5218 SIZE is the number of bytes of TARGET we are allowed to modify: this
5219 may not be the same as the size of EXP if we are assigning to a field
5220 which has been packed to exclude padding bits. */
5223 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5225 tree type = TREE_TYPE (exp);
5226 #ifdef WORD_REGISTER_OPERATIONS
5227 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5230 switch (TREE_CODE (type))
5234 case QUAL_UNION_TYPE:
5236 unsigned HOST_WIDE_INT idx;
5239 /* If size is zero or the target is already cleared, do nothing. */
5240 if (size == 0 || cleared)
5242 /* We either clear the aggregate or indicate the value is dead. */
5243 else if ((TREE_CODE (type) == UNION_TYPE
5244 || TREE_CODE (type) == QUAL_UNION_TYPE)
5245 && ! CONSTRUCTOR_ELTS (exp))
5246 /* If the constructor is empty, clear the union. */
5248 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5252 /* If we are building a static constructor into a register,
5253 set the initial value as zero so we can fold the value into
5254 a constant. But if more than one register is involved,
5255 this probably loses. */
5256 else if (REG_P (target) && TREE_STATIC (exp)
5257 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5259 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5263 /* If the constructor has fewer fields than the structure or
5264 if we are initializing the structure to mostly zeros, clear
5265 the whole structure first. Don't do this if TARGET is a
5266 register whose mode size isn't equal to SIZE since
5267 clear_storage can't handle this case. */
5269 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5270 != fields_length (type))
5271 || mostly_zeros_p (exp))
5273 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5276 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5280 if (REG_P (target) && !cleared)
5281 emit_clobber (target);
5283 /* Store each element of the constructor into the
5284 corresponding field of TARGET. */
5285 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5287 enum machine_mode mode;
5288 HOST_WIDE_INT bitsize;
5289 HOST_WIDE_INT bitpos = 0;
5291 rtx to_rtx = target;
5293 /* Just ignore missing fields. We cleared the whole
5294 structure, above, if any fields are missing. */
5298 if (cleared && initializer_zerop (value))
5301 if (host_integerp (DECL_SIZE (field), 1))
5302 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5306 mode = DECL_MODE (field);
5307 if (DECL_BIT_FIELD (field))
5310 offset = DECL_FIELD_OFFSET (field);
5311 if (host_integerp (offset, 0)
5312 && host_integerp (bit_position (field), 0))
5314 bitpos = int_bit_position (field);
5318 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5322 enum machine_mode address_mode;
5326 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5327 make_tree (TREE_TYPE (exp),
5330 offset_rtx = expand_normal (offset);
5331 gcc_assert (MEM_P (to_rtx));
5334 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5335 if (GET_MODE (offset_rtx) != address_mode)
5336 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5338 to_rtx = offset_address (to_rtx, offset_rtx,
5339 highest_pow2_factor (offset));
5342 #ifdef WORD_REGISTER_OPERATIONS
5343 /* If this initializes a field that is smaller than a
5344 word, at the start of a word, try to widen it to a full
5345 word. This special case allows us to output C++ member
5346 function initializations in a form that the optimizers
5349 && bitsize < BITS_PER_WORD
5350 && bitpos % BITS_PER_WORD == 0
5351 && GET_MODE_CLASS (mode) == MODE_INT
5352 && TREE_CODE (value) == INTEGER_CST
5354 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5356 tree type = TREE_TYPE (value);
5358 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5360 type = lang_hooks.types.type_for_size
5361 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5362 value = fold_convert (type, value);
5365 if (BYTES_BIG_ENDIAN)
5367 = fold_build2 (LSHIFT_EXPR, type, value,
5368 build_int_cst (type,
5369 BITS_PER_WORD - bitsize));
5370 bitsize = BITS_PER_WORD;
5375 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5376 && DECL_NONADDRESSABLE_P (field))
5378 to_rtx = copy_rtx (to_rtx);
5379 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5382 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5383 value, type, cleared,
5384 get_alias_set (TREE_TYPE (field)));
5391 unsigned HOST_WIDE_INT i;
5394 tree elttype = TREE_TYPE (type);
5396 HOST_WIDE_INT minelt = 0;
5397 HOST_WIDE_INT maxelt = 0;
5399 domain = TYPE_DOMAIN (type);
5400 const_bounds_p = (TYPE_MIN_VALUE (domain)
5401 && TYPE_MAX_VALUE (domain)
5402 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5403 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5405 /* If we have constant bounds for the range of the type, get them. */
5408 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5409 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5412 /* If the constructor has fewer elements than the array, clear
5413 the whole array first. Similarly if this is static
5414 constructor of a non-BLKmode object. */
5417 else if (REG_P (target) && TREE_STATIC (exp))
5421 unsigned HOST_WIDE_INT idx;
5423 HOST_WIDE_INT count = 0, zero_count = 0;
5424 need_to_clear = ! const_bounds_p;
5426 /* This loop is a more accurate version of the loop in
5427 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5428 is also needed to check for missing elements. */
5429 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5431 HOST_WIDE_INT this_node_count;
5436 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5438 tree lo_index = TREE_OPERAND (index, 0);
5439 tree hi_index = TREE_OPERAND (index, 1);
5441 if (! host_integerp (lo_index, 1)
5442 || ! host_integerp (hi_index, 1))
5448 this_node_count = (tree_low_cst (hi_index, 1)
5449 - tree_low_cst (lo_index, 1) + 1);
5452 this_node_count = 1;
5454 count += this_node_count;
5455 if (mostly_zeros_p (value))
5456 zero_count += this_node_count;
5459 /* Clear the entire array first if there are any missing
5460 elements, or if the incidence of zero elements is >=
5463 && (count < maxelt - minelt + 1
5464 || 4 * zero_count >= 3 * count))
5468 if (need_to_clear && size > 0)
5471 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5473 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5477 if (!cleared && REG_P (target))
5478 /* Inform later passes that the old value is dead. */
5479 emit_clobber (target);
5481 /* Store each element of the constructor into the
5482 corresponding element of TARGET, determined by counting the
5484 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5486 enum machine_mode mode;
5487 HOST_WIDE_INT bitsize;
5488 HOST_WIDE_INT bitpos;
5489 rtx xtarget = target;
5491 if (cleared && initializer_zerop (value))
5494 mode = TYPE_MODE (elttype);
5495 if (mode == BLKmode)
5496 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5497 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5500 bitsize = GET_MODE_BITSIZE (mode);
5502 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5504 tree lo_index = TREE_OPERAND (index, 0);
5505 tree hi_index = TREE_OPERAND (index, 1);
5506 rtx index_r, pos_rtx;
5507 HOST_WIDE_INT lo, hi, count;
5510 /* If the range is constant and "small", unroll the loop. */
5512 && host_integerp (lo_index, 0)
5513 && host_integerp (hi_index, 0)
5514 && (lo = tree_low_cst (lo_index, 0),
5515 hi = tree_low_cst (hi_index, 0),
5516 count = hi - lo + 1,
5519 || (host_integerp (TYPE_SIZE (elttype), 1)
5520 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5523 lo -= minelt; hi -= minelt;
5524 for (; lo <= hi; lo++)
5526 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5529 && !MEM_KEEP_ALIAS_SET_P (target)
5530 && TREE_CODE (type) == ARRAY_TYPE
5531 && TYPE_NONALIASED_COMPONENT (type))
5533 target = copy_rtx (target);
5534 MEM_KEEP_ALIAS_SET_P (target) = 1;
5537 store_constructor_field
5538 (target, bitsize, bitpos, mode, value, type, cleared,
5539 get_alias_set (elttype));
5544 rtx loop_start = gen_label_rtx ();
5545 rtx loop_end = gen_label_rtx ();
5548 expand_normal (hi_index);
5550 index = build_decl (EXPR_LOCATION (exp),
5551 VAR_DECL, NULL_TREE, domain);
5552 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5553 SET_DECL_RTL (index, index_r);
5554 store_expr (lo_index, index_r, 0, false);
5556 /* Build the head of the loop. */
5557 do_pending_stack_adjust ();
5558 emit_label (loop_start);
5560 /* Assign value to element index. */
5562 fold_convert (ssizetype,
5563 fold_build2 (MINUS_EXPR,
5566 TYPE_MIN_VALUE (domain)));
5569 size_binop (MULT_EXPR, position,
5570 fold_convert (ssizetype,
5571 TYPE_SIZE_UNIT (elttype)));
5573 pos_rtx = expand_normal (position);
5574 xtarget = offset_address (target, pos_rtx,
5575 highest_pow2_factor (position));
5576 xtarget = adjust_address (xtarget, mode, 0);
5577 if (TREE_CODE (value) == CONSTRUCTOR)
5578 store_constructor (value, xtarget, cleared,
5579 bitsize / BITS_PER_UNIT);
5581 store_expr (value, xtarget, 0, false);
5583 /* Generate a conditional jump to exit the loop. */
5584 exit_cond = build2 (LT_EXPR, integer_type_node,
5586 jumpif (exit_cond, loop_end, -1);
5588 /* Update the loop counter, and jump to the head of
5590 expand_assignment (index,
5591 build2 (PLUS_EXPR, TREE_TYPE (index),
5592 index, integer_one_node),
5595 emit_jump (loop_start);
5597 /* Build the end of the loop. */
5598 emit_label (loop_end);
5601 else if ((index != 0 && ! host_integerp (index, 0))
5602 || ! host_integerp (TYPE_SIZE (elttype), 1))
5607 index = ssize_int (1);
5610 index = fold_convert (ssizetype,
5611 fold_build2 (MINUS_EXPR,
5614 TYPE_MIN_VALUE (domain)));
5617 size_binop (MULT_EXPR, index,
5618 fold_convert (ssizetype,
5619 TYPE_SIZE_UNIT (elttype)));
5620 xtarget = offset_address (target,
5621 expand_normal (position),
5622 highest_pow2_factor (position));
5623 xtarget = adjust_address (xtarget, mode, 0);
5624 store_expr (value, xtarget, 0, false);
5629 bitpos = ((tree_low_cst (index, 0) - minelt)
5630 * tree_low_cst (TYPE_SIZE (elttype), 1));
5632 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5634 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5635 && TREE_CODE (type) == ARRAY_TYPE
5636 && TYPE_NONALIASED_COMPONENT (type))
5638 target = copy_rtx (target);
5639 MEM_KEEP_ALIAS_SET_P (target) = 1;
5641 store_constructor_field (target, bitsize, bitpos, mode, value,
5642 type, cleared, get_alias_set (elttype));
5650 unsigned HOST_WIDE_INT idx;
5651 constructor_elt *ce;
5655 tree elttype = TREE_TYPE (type);
5656 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5657 enum machine_mode eltmode = TYPE_MODE (elttype);
5658 HOST_WIDE_INT bitsize;
5659 HOST_WIDE_INT bitpos;
5660 rtvec vector = NULL;
5662 alias_set_type alias;
5664 gcc_assert (eltmode != BLKmode);
5666 n_elts = TYPE_VECTOR_SUBPARTS (type);
5667 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5669 enum machine_mode mode = GET_MODE (target);
5671 icode = (int) optab_handler (vec_init_optab, mode);
5672 if (icode != CODE_FOR_nothing)
5676 vector = rtvec_alloc (n_elts);
5677 for (i = 0; i < n_elts; i++)
5678 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5682 /* If the constructor has fewer elements than the vector,
5683 clear the whole array first. Similarly if this is static
5684 constructor of a non-BLKmode object. */
5687 else if (REG_P (target) && TREE_STATIC (exp))
5691 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5694 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5696 int n_elts_here = tree_low_cst
5697 (int_const_binop (TRUNC_DIV_EXPR,
5698 TYPE_SIZE (TREE_TYPE (value)),
5699 TYPE_SIZE (elttype), 0), 1);
5701 count += n_elts_here;
5702 if (mostly_zeros_p (value))
5703 zero_count += n_elts_here;
5706 /* Clear the entire vector first if there are any missing elements,
5707 or if the incidence of zero elements is >= 75%. */
5708 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5711 if (need_to_clear && size > 0 && !vector)
5714 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5716 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5720 /* Inform later passes that the old value is dead. */
5721 if (!cleared && !vector && REG_P (target))
5722 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5725 alias = MEM_ALIAS_SET (target);
5727 alias = get_alias_set (elttype);
5729 /* Store each element of the constructor into the corresponding
5730 element of TARGET, determined by counting the elements. */
5731 for (idx = 0, i = 0;
5732 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5733 idx++, i += bitsize / elt_size)
5735 HOST_WIDE_INT eltpos;
5736 tree value = ce->value;
5738 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5739 if (cleared && initializer_zerop (value))
5743 eltpos = tree_low_cst (ce->index, 1);
5749 /* Vector CONSTRUCTORs should only be built from smaller
5750 vectors in the case of BLKmode vectors. */
5751 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5752 RTVEC_ELT (vector, eltpos)
5753 = expand_normal (value);
5757 enum machine_mode value_mode =
5758 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5759 ? TYPE_MODE (TREE_TYPE (value))
5761 bitpos = eltpos * elt_size;
5762 store_constructor_field (target, bitsize, bitpos,
5763 value_mode, value, type,
5769 emit_insn (GEN_FCN (icode)
5771 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5780 /* Store the value of EXP (an expression tree)
5781 into a subfield of TARGET which has mode MODE and occupies
5782 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5783 If MODE is VOIDmode, it means that we are storing into a bit-field.
5785 Always return const0_rtx unless we have something particular to
5788 TYPE is the type of the underlying object,
5790 ALIAS_SET is the alias set for the destination. This value will
5791 (in general) be different from that for TARGET, since TARGET is a
5792 reference to the containing structure.
5794 If NONTEMPORAL is true, try generating a nontemporal store. */
5797 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5798 enum machine_mode mode, tree exp, tree type,
5799 alias_set_type alias_set, bool nontemporal)
5801 if (TREE_CODE (exp) == ERROR_MARK)
5804 /* If we have nothing to store, do nothing unless the expression has
5807 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5809 /* If we are storing into an unaligned field of an aligned union that is
5810 in a register, we may have the mode of TARGET being an integer mode but
5811 MODE == BLKmode. In that case, get an aligned object whose size and
5812 alignment are the same as TARGET and store TARGET into it (we can avoid
5813 the store if the field being stored is the entire width of TARGET). Then
5814 call ourselves recursively to store the field into a BLKmode version of
5815 that object. Finally, load from the object into TARGET. This is not
5816 very efficient in general, but should only be slightly more expensive
5817 than the otherwise-required unaligned accesses. Perhaps this can be
5818 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5819 twice, once with emit_move_insn and once via store_field. */
5822 && (REG_P (target) || GET_CODE (target) == SUBREG))
5824 rtx object = assign_temp (type, 0, 1, 1);
5825 rtx blk_object = adjust_address (object, BLKmode, 0);
5827 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5828 emit_move_insn (object, target);
5830 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5833 emit_move_insn (target, object);
5835 /* We want to return the BLKmode version of the data. */
5839 if (GET_CODE (target) == CONCAT)
5841 /* We're storing into a struct containing a single __complex. */
5843 gcc_assert (!bitpos);
5844 return store_expr (exp, target, 0, nontemporal);
5847 /* If the structure is in a register or if the component
5848 is a bit field, we cannot use addressing to access it.
5849 Use bit-field techniques or SUBREG to store in it. */
5851 if (mode == VOIDmode
5852 || (mode != BLKmode && ! direct_store[(int) mode]
5853 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5854 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5856 || GET_CODE (target) == SUBREG
5857 /* If the field isn't aligned enough to store as an ordinary memref,
5858 store it as a bit field. */
5860 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5861 || bitpos % GET_MODE_ALIGNMENT (mode))
5862 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5863 || (bitpos % BITS_PER_UNIT != 0)))
5864 /* If the RHS and field are a constant size and the size of the
5865 RHS isn't the same size as the bitfield, we must use bitfield
5868 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5869 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5870 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5871 decl we must use bitfield operations. */
5873 && TREE_CODE (exp) == MEM_REF
5874 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5875 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5876 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5877 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5882 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5883 implies a mask operation. If the precision is the same size as
5884 the field we're storing into, that mask is redundant. This is
5885 particularly common with bit field assignments generated by the
5887 nop_def = get_def_for_expr (exp, NOP_EXPR);
5890 tree type = TREE_TYPE (exp);
5891 if (INTEGRAL_TYPE_P (type)
5892 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5893 && bitsize == TYPE_PRECISION (type))
5895 tree op = gimple_assign_rhs1 (nop_def);
5896 type = TREE_TYPE (op);
5897 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5902 temp = expand_normal (exp);
5904 /* If BITSIZE is narrower than the size of the type of EXP
5905 we will be narrowing TEMP. Normally, what's wanted are the
5906 low-order bits. However, if EXP's type is a record and this is
5907 big-endian machine, we want the upper BITSIZE bits. */
5908 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5909 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5910 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5911 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5912 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5916 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5918 if (mode != VOIDmode && mode != BLKmode
5919 && mode != TYPE_MODE (TREE_TYPE (exp)))
5920 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5922 /* If the modes of TEMP and TARGET are both BLKmode, both
5923 must be in memory and BITPOS must be aligned on a byte
5924 boundary. If so, we simply do a block copy. Likewise
5925 for a BLKmode-like TARGET. */
5926 if (GET_MODE (temp) == BLKmode
5927 && (GET_MODE (target) == BLKmode
5929 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5930 && (bitpos % BITS_PER_UNIT) == 0
5931 && (bitsize % BITS_PER_UNIT) == 0)))
5933 gcc_assert (MEM_P (target) && MEM_P (temp)
5934 && (bitpos % BITS_PER_UNIT) == 0);
5936 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5937 emit_block_move (target, temp,
5938 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5945 /* Store the value in the bitfield. */
5946 store_bit_field (target, bitsize, bitpos, mode, temp);
5952 /* Now build a reference to just the desired component. */
5953 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5955 if (to_rtx == target)
5956 to_rtx = copy_rtx (to_rtx);
5958 if (!MEM_SCALAR_P (to_rtx))
5959 MEM_IN_STRUCT_P (to_rtx) = 1;
5960 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5961 set_mem_alias_set (to_rtx, alias_set);
5963 return store_expr (exp, to_rtx, 0, nontemporal);
5967 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5968 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5969 codes and find the ultimate containing object, which we return.
5971 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5972 bit position, and *PUNSIGNEDP to the signedness of the field.
5973 If the position of the field is variable, we store a tree
5974 giving the variable offset (in units) in *POFFSET.
5975 This offset is in addition to the bit position.
5976 If the position is not variable, we store 0 in *POFFSET.
5978 If any of the extraction expressions is volatile,
5979 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5981 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5982 Otherwise, it is a mode that can be used to access the field.
5984 If the field describes a variable-sized object, *PMODE is set to
5985 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5986 this case, but the address of the object can be found.
5988 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5989 look through nodes that serve as markers of a greater alignment than
5990 the one that can be deduced from the expression. These nodes make it
5991 possible for front-ends to prevent temporaries from being created by
5992 the middle-end on alignment considerations. For that purpose, the
5993 normal operating mode at high-level is to always pass FALSE so that
5994 the ultimate containing object is really returned; moreover, the
5995 associated predicate handled_component_p will always return TRUE
5996 on these nodes, thus indicating that they are essentially handled
5997 by get_inner_reference. TRUE should only be passed when the caller
5998 is scanning the expression in order to build another representation
5999 and specifically knows how to handle these nodes; as such, this is
6000 the normal operating mode in the RTL expanders. */
6003 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6004 HOST_WIDE_INT *pbitpos, tree *poffset,
6005 enum machine_mode *pmode, int *punsignedp,
6006 int *pvolatilep, bool keep_aligning)
6009 enum machine_mode mode = VOIDmode;
6010 bool blkmode_bitfield = false;
6011 tree offset = size_zero_node;
6012 double_int bit_offset = double_int_zero;
6014 /* First get the mode, signedness, and size. We do this from just the
6015 outermost expression. */
6017 if (TREE_CODE (exp) == COMPONENT_REF)
6019 tree field = TREE_OPERAND (exp, 1);
6020 size_tree = DECL_SIZE (field);
6021 if (!DECL_BIT_FIELD (field))
6022 mode = DECL_MODE (field);
6023 else if (DECL_MODE (field) == BLKmode)
6024 blkmode_bitfield = true;
6025 else if (TREE_THIS_VOLATILE (exp)
6026 && flag_strict_volatile_bitfields > 0)
6027 /* Volatile bitfields should be accessed in the mode of the
6028 field's type, not the mode computed based on the bit
6030 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6032 *punsignedp = DECL_UNSIGNED (field);
6034 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6036 size_tree = TREE_OPERAND (exp, 1);
6037 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6038 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6040 /* For vector types, with the correct size of access, use the mode of
6042 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6043 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6044 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6045 mode = TYPE_MODE (TREE_TYPE (exp));
6049 mode = TYPE_MODE (TREE_TYPE (exp));
6050 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6052 if (mode == BLKmode)
6053 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6055 *pbitsize = GET_MODE_BITSIZE (mode);
6060 if (! host_integerp (size_tree, 1))
6061 mode = BLKmode, *pbitsize = -1;
6063 *pbitsize = tree_low_cst (size_tree, 1);
6066 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6067 and find the ultimate containing object. */
6070 switch (TREE_CODE (exp))
6074 = double_int_add (bit_offset,
6075 tree_to_double_int (TREE_OPERAND (exp, 2)));
6080 tree field = TREE_OPERAND (exp, 1);
6081 tree this_offset = component_ref_field_offset (exp);
6083 /* If this field hasn't been filled in yet, don't go past it.
6084 This should only happen when folding expressions made during
6085 type construction. */
6086 if (this_offset == 0)
6089 offset = size_binop (PLUS_EXPR, offset, this_offset);
6090 bit_offset = double_int_add (bit_offset,
6092 (DECL_FIELD_BIT_OFFSET (field)));
6094 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6099 case ARRAY_RANGE_REF:
6101 tree index = TREE_OPERAND (exp, 1);
6102 tree low_bound = array_ref_low_bound (exp);
6103 tree unit_size = array_ref_element_size (exp);
6105 /* We assume all arrays have sizes that are a multiple of a byte.
6106 First subtract the lower bound, if any, in the type of the
6107 index, then convert to sizetype and multiply by the size of
6108 the array element. */
6109 if (! integer_zerop (low_bound))
6110 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6113 offset = size_binop (PLUS_EXPR, offset,
6114 size_binop (MULT_EXPR,
6115 fold_convert (sizetype, index),
6124 bit_offset = double_int_add (bit_offset,
6125 uhwi_to_double_int (*pbitsize));
6128 case VIEW_CONVERT_EXPR:
6129 if (keep_aligning && STRICT_ALIGNMENT
6130 && (TYPE_ALIGN (TREE_TYPE (exp))
6131 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6132 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6133 < BIGGEST_ALIGNMENT)
6134 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6135 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6140 /* Hand back the decl for MEM[&decl, off]. */
6141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6143 tree off = TREE_OPERAND (exp, 1);
6144 if (!integer_zerop (off))
6146 double_int boff, coff = mem_ref_offset (exp);
6147 boff = double_int_lshift (coff,
6149 ? 3 : exact_log2 (BITS_PER_UNIT),
6150 HOST_BITS_PER_DOUBLE_INT, true);
6151 bit_offset = double_int_add (bit_offset, boff);
6153 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6161 /* If any reference in the chain is volatile, the effect is volatile. */
6162 if (TREE_THIS_VOLATILE (exp))
6165 exp = TREE_OPERAND (exp, 0);
6169 /* If OFFSET is constant, see if we can return the whole thing as a
6170 constant bit position. Make sure to handle overflow during
6172 if (host_integerp (offset, 0))
6174 double_int tem = double_int_lshift (tree_to_double_int (offset),
6176 ? 3 : exact_log2 (BITS_PER_UNIT),
6177 HOST_BITS_PER_DOUBLE_INT, true);
6178 tem = double_int_add (tem, bit_offset);
6179 if (double_int_fits_in_shwi_p (tem))
6181 *pbitpos = double_int_to_shwi (tem);
6182 *poffset = offset = NULL_TREE;
6186 /* Otherwise, split it up. */
6189 *pbitpos = double_int_to_shwi (bit_offset);
6193 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6194 if (mode == VOIDmode
6196 && (*pbitpos % BITS_PER_UNIT) == 0
6197 && (*pbitsize % BITS_PER_UNIT) == 0)
6205 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6206 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6207 EXP is marked as PACKED. */
6210 contains_packed_reference (const_tree exp)
6212 bool packed_p = false;
6216 switch (TREE_CODE (exp))
6220 tree field = TREE_OPERAND (exp, 1);
6221 packed_p = DECL_PACKED (field)
6222 || TYPE_PACKED (TREE_TYPE (field))
6223 || TYPE_PACKED (TREE_TYPE (exp));
6231 case ARRAY_RANGE_REF:
6234 case VIEW_CONVERT_EXPR:
6240 exp = TREE_OPERAND (exp, 0);
6246 /* Return a tree of sizetype representing the size, in bytes, of the element
6247 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6250 array_ref_element_size (tree exp)
6252 tree aligned_size = TREE_OPERAND (exp, 3);
6253 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6254 location_t loc = EXPR_LOCATION (exp);
6256 /* If a size was specified in the ARRAY_REF, it's the size measured
6257 in alignment units of the element type. So multiply by that value. */
6260 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6261 sizetype from another type of the same width and signedness. */
6262 if (TREE_TYPE (aligned_size) != sizetype)
6263 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6264 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6265 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6268 /* Otherwise, take the size from that of the element type. Substitute
6269 any PLACEHOLDER_EXPR that we have. */
6271 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6274 /* Return a tree representing the lower bound of the array mentioned in
6275 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6278 array_ref_low_bound (tree exp)
6280 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6282 /* If a lower bound is specified in EXP, use it. */
6283 if (TREE_OPERAND (exp, 2))
6284 return TREE_OPERAND (exp, 2);
6286 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6287 substituting for a PLACEHOLDER_EXPR as needed. */
6288 if (domain_type && TYPE_MIN_VALUE (domain_type))
6289 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6291 /* Otherwise, return a zero of the appropriate type. */
6292 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6295 /* Return a tree representing the upper bound of the array mentioned in
6296 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6299 array_ref_up_bound (tree exp)
6301 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6303 /* If there is a domain type and it has an upper bound, use it, substituting
6304 for a PLACEHOLDER_EXPR as needed. */
6305 if (domain_type && TYPE_MAX_VALUE (domain_type))
6306 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6308 /* Otherwise fail. */
6312 /* Return a tree representing the offset, in bytes, of the field referenced
6313 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6316 component_ref_field_offset (tree exp)
6318 tree aligned_offset = TREE_OPERAND (exp, 2);
6319 tree field = TREE_OPERAND (exp, 1);
6320 location_t loc = EXPR_LOCATION (exp);
6322 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6323 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6327 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6328 sizetype from another type of the same width and signedness. */
6329 if (TREE_TYPE (aligned_offset) != sizetype)
6330 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6331 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6332 size_int (DECL_OFFSET_ALIGN (field)
6336 /* Otherwise, take the offset from that of the field. Substitute
6337 any PLACEHOLDER_EXPR that we have. */
6339 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6342 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6344 static unsigned HOST_WIDE_INT
6345 target_align (const_tree target)
6347 /* We might have a chain of nested references with intermediate misaligning
6348 bitfields components, so need to recurse to find out. */
6350 unsigned HOST_WIDE_INT this_align, outer_align;
6352 switch (TREE_CODE (target))
6358 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6359 outer_align = target_align (TREE_OPERAND (target, 0));
6360 return MIN (this_align, outer_align);
6363 case ARRAY_RANGE_REF:
6364 this_align = TYPE_ALIGN (TREE_TYPE (target));
6365 outer_align = target_align (TREE_OPERAND (target, 0));
6366 return MIN (this_align, outer_align);
6369 case NON_LVALUE_EXPR:
6370 case VIEW_CONVERT_EXPR:
6371 this_align = TYPE_ALIGN (TREE_TYPE (target));
6372 outer_align = target_align (TREE_OPERAND (target, 0));
6373 return MAX (this_align, outer_align);
6376 return TYPE_ALIGN (TREE_TYPE (target));
6381 /* Given an rtx VALUE that may contain additions and multiplications, return
6382 an equivalent value that just refers to a register, memory, or constant.
6383 This is done by generating instructions to perform the arithmetic and
6384 returning a pseudo-register containing the value.
6386 The returned value may be a REG, SUBREG, MEM or constant. */
6389 force_operand (rtx value, rtx target)
6392 /* Use subtarget as the target for operand 0 of a binary operation. */
6393 rtx subtarget = get_subtarget (target);
6394 enum rtx_code code = GET_CODE (value);
6396 /* Check for subreg applied to an expression produced by loop optimizer. */
6398 && !REG_P (SUBREG_REG (value))
6399 && !MEM_P (SUBREG_REG (value)))
6402 = simplify_gen_subreg (GET_MODE (value),
6403 force_reg (GET_MODE (SUBREG_REG (value)),
6404 force_operand (SUBREG_REG (value),
6406 GET_MODE (SUBREG_REG (value)),
6407 SUBREG_BYTE (value));
6408 code = GET_CODE (value);
6411 /* Check for a PIC address load. */
6412 if ((code == PLUS || code == MINUS)
6413 && XEXP (value, 0) == pic_offset_table_rtx
6414 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6415 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6416 || GET_CODE (XEXP (value, 1)) == CONST))
6419 subtarget = gen_reg_rtx (GET_MODE (value));
6420 emit_move_insn (subtarget, value);
6424 if (ARITHMETIC_P (value))
6426 op2 = XEXP (value, 1);
6427 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6429 if (code == MINUS && CONST_INT_P (op2))
6432 op2 = negate_rtx (GET_MODE (value), op2);
6435 /* Check for an addition with OP2 a constant integer and our first
6436 operand a PLUS of a virtual register and something else. In that
6437 case, we want to emit the sum of the virtual register and the
6438 constant first and then add the other value. This allows virtual
6439 register instantiation to simply modify the constant rather than
6440 creating another one around this addition. */
6441 if (code == PLUS && CONST_INT_P (op2)
6442 && GET_CODE (XEXP (value, 0)) == PLUS
6443 && REG_P (XEXP (XEXP (value, 0), 0))
6444 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6445 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6447 rtx temp = expand_simple_binop (GET_MODE (value), code,
6448 XEXP (XEXP (value, 0), 0), op2,
6449 subtarget, 0, OPTAB_LIB_WIDEN);
6450 return expand_simple_binop (GET_MODE (value), code, temp,
6451 force_operand (XEXP (XEXP (value,
6453 target, 0, OPTAB_LIB_WIDEN);
6456 op1 = force_operand (XEXP (value, 0), subtarget);
6457 op2 = force_operand (op2, NULL_RTX);
6461 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6463 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6464 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6465 target, 1, OPTAB_LIB_WIDEN);
6467 return expand_divmod (0,
6468 FLOAT_MODE_P (GET_MODE (value))
6469 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6470 GET_MODE (value), op1, op2, target, 0);
6472 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6475 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6478 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6481 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6482 target, 0, OPTAB_LIB_WIDEN);
6484 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6485 target, 1, OPTAB_LIB_WIDEN);
6488 if (UNARY_P (value))
6491 target = gen_reg_rtx (GET_MODE (value));
6492 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6499 case FLOAT_TRUNCATE:
6500 convert_move (target, op1, code == ZERO_EXTEND);
6505 expand_fix (target, op1, code == UNSIGNED_FIX);
6509 case UNSIGNED_FLOAT:
6510 expand_float (target, op1, code == UNSIGNED_FLOAT);
6514 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6518 #ifdef INSN_SCHEDULING
6519 /* On machines that have insn scheduling, we want all memory reference to be
6520 explicit, so we need to deal with such paradoxical SUBREGs. */
6521 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6522 && (GET_MODE_SIZE (GET_MODE (value))
6523 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6525 = simplify_gen_subreg (GET_MODE (value),
6526 force_reg (GET_MODE (SUBREG_REG (value)),
6527 force_operand (SUBREG_REG (value),
6529 GET_MODE (SUBREG_REG (value)),
6530 SUBREG_BYTE (value));
6536 /* Subroutine of expand_expr: return nonzero iff there is no way that
6537 EXP can reference X, which is being modified. TOP_P is nonzero if this
6538 call is going to be used to determine whether we need a temporary
6539 for EXP, as opposed to a recursive call to this function.
6541 It is always safe for this routine to return zero since it merely
6542 searches for optimization opportunities. */
6545 safe_from_p (const_rtx x, tree exp, int top_p)
6551 /* If EXP has varying size, we MUST use a target since we currently
6552 have no way of allocating temporaries of variable size
6553 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6554 So we assume here that something at a higher level has prevented a
6555 clash. This is somewhat bogus, but the best we can do. Only
6556 do this when X is BLKmode and when we are at the top level. */
6557 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6558 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6559 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6560 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6561 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6563 && GET_MODE (x) == BLKmode)
6564 /* If X is in the outgoing argument area, it is always safe. */
6566 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6567 || (GET_CODE (XEXP (x, 0)) == PLUS
6568 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6571 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6572 find the underlying pseudo. */
6573 if (GET_CODE (x) == SUBREG)
6576 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6580 /* Now look at our tree code and possibly recurse. */
6581 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6583 case tcc_declaration:
6584 exp_rtl = DECL_RTL_IF_SET (exp);
6590 case tcc_exceptional:
6591 if (TREE_CODE (exp) == TREE_LIST)
6595 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6597 exp = TREE_CHAIN (exp);
6600 if (TREE_CODE (exp) != TREE_LIST)
6601 return safe_from_p (x, exp, 0);
6604 else if (TREE_CODE (exp) == CONSTRUCTOR)
6606 constructor_elt *ce;
6607 unsigned HOST_WIDE_INT idx;
6609 FOR_EACH_VEC_ELT (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce)
6610 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6611 || !safe_from_p (x, ce->value, 0))
6615 else if (TREE_CODE (exp) == ERROR_MARK)
6616 return 1; /* An already-visited SAVE_EXPR? */
6621 /* The only case we look at here is the DECL_INITIAL inside a
6623 return (TREE_CODE (exp) != DECL_EXPR
6624 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6625 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6626 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6629 case tcc_comparison:
6630 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6635 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6637 case tcc_expression:
6640 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6641 the expression. If it is set, we conflict iff we are that rtx or
6642 both are in memory. Otherwise, we check all operands of the
6643 expression recursively. */
6645 switch (TREE_CODE (exp))
6648 /* If the operand is static or we are static, we can't conflict.
6649 Likewise if we don't conflict with the operand at all. */
6650 if (staticp (TREE_OPERAND (exp, 0))
6651 || TREE_STATIC (exp)
6652 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6655 /* Otherwise, the only way this can conflict is if we are taking
6656 the address of a DECL a that address if part of X, which is
6658 exp = TREE_OPERAND (exp, 0);
6661 if (!DECL_RTL_SET_P (exp)
6662 || !MEM_P (DECL_RTL (exp)))
6665 exp_rtl = XEXP (DECL_RTL (exp), 0);
6671 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6672 get_alias_set (exp)))
6677 /* Assume that the call will clobber all hard registers and
6679 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6684 case WITH_CLEANUP_EXPR:
6685 case CLEANUP_POINT_EXPR:
6686 /* Lowered by gimplify.c. */
6690 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6696 /* If we have an rtx, we do not need to scan our operands. */
6700 nops = TREE_OPERAND_LENGTH (exp);
6701 for (i = 0; i < nops; i++)
6702 if (TREE_OPERAND (exp, i) != 0
6703 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6709 /* Should never get a type here. */
6713 /* If we have an rtl, find any enclosed object. Then see if we conflict
6717 if (GET_CODE (exp_rtl) == SUBREG)
6719 exp_rtl = SUBREG_REG (exp_rtl);
6721 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6725 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6726 are memory and they conflict. */
6727 return ! (rtx_equal_p (x, exp_rtl)
6728 || (MEM_P (x) && MEM_P (exp_rtl)
6729 && true_dependence (exp_rtl, VOIDmode, x,
6730 rtx_addr_varies_p)));
6733 /* If we reach here, it is safe. */
6738 /* Return the highest power of two that EXP is known to be a multiple of.
6739 This is used in updating alignment of MEMs in array references. */
6741 unsigned HOST_WIDE_INT
6742 highest_pow2_factor (const_tree exp)
6744 unsigned HOST_WIDE_INT c0, c1;
6746 switch (TREE_CODE (exp))
6749 /* We can find the lowest bit that's a one. If the low
6750 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6751 We need to handle this case since we can find it in a COND_EXPR,
6752 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6753 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6755 if (TREE_OVERFLOW (exp))
6756 return BIGGEST_ALIGNMENT;
6759 /* Note: tree_low_cst is intentionally not used here,
6760 we don't care about the upper bits. */
6761 c0 = TREE_INT_CST_LOW (exp);
6763 return c0 ? c0 : BIGGEST_ALIGNMENT;
6767 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6768 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6769 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6770 return MIN (c0, c1);
6773 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6774 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6777 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6779 if (integer_pow2p (TREE_OPERAND (exp, 1))
6780 && host_integerp (TREE_OPERAND (exp, 1), 1))
6782 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6783 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6784 return MAX (1, c0 / c1);
6789 /* The highest power of two of a bit-and expression is the maximum of
6790 that of its operands. We typically get here for a complex LHS and
6791 a constant negative power of two on the RHS to force an explicit
6792 alignment, so don't bother looking at the LHS. */
6793 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6797 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6800 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6803 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6804 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6805 return MIN (c0, c1);
6814 /* Similar, except that the alignment requirements of TARGET are
6815 taken into account. Assume it is at least as aligned as its
6816 type, unless it is a COMPONENT_REF in which case the layout of
6817 the structure gives the alignment. */
6819 static unsigned HOST_WIDE_INT
6820 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6822 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6823 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6825 return MAX (factor, talign);
6828 /* Subroutine of expand_expr. Expand the two operands of a binary
6829 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6830 The value may be stored in TARGET if TARGET is nonzero. The
6831 MODIFIER argument is as documented by expand_expr. */
6834 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6835 enum expand_modifier modifier)
6837 if (! safe_from_p (target, exp1, 1))
6839 if (operand_equal_p (exp0, exp1, 0))
6841 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6842 *op1 = copy_rtx (*op0);
6846 /* If we need to preserve evaluation order, copy exp0 into its own
6847 temporary variable so that it can't be clobbered by exp1. */
6848 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6849 exp0 = save_expr (exp0);
6850 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6851 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6856 /* Return a MEM that contains constant EXP. DEFER is as for
6857 output_constant_def and MODIFIER is as for expand_expr. */
6860 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6864 mem = output_constant_def (exp, defer);
6865 if (modifier != EXPAND_INITIALIZER)
6866 mem = use_anchored_address (mem);
6870 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6871 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6874 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6875 enum expand_modifier modifier, addr_space_t as)
6877 rtx result, subtarget;
6879 HOST_WIDE_INT bitsize, bitpos;
6880 int volatilep, unsignedp;
6881 enum machine_mode mode1;
6883 /* If we are taking the address of a constant and are at the top level,
6884 we have to use output_constant_def since we can't call force_const_mem
6886 /* ??? This should be considered a front-end bug. We should not be
6887 generating ADDR_EXPR of something that isn't an LVALUE. The only
6888 exception here is STRING_CST. */
6889 if (CONSTANT_CLASS_P (exp))
6890 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6892 /* Everything must be something allowed by is_gimple_addressable. */
6893 switch (TREE_CODE (exp))
6896 /* This case will happen via recursion for &a->b. */
6897 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6901 tree tem = TREE_OPERAND (exp, 0);
6902 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6903 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6905 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6906 return expand_expr (tem, target, tmode, modifier);
6910 /* Expand the initializer like constants above. */
6911 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6914 /* The real part of the complex number is always first, therefore
6915 the address is the same as the address of the parent object. */
6918 inner = TREE_OPERAND (exp, 0);
6922 /* The imaginary part of the complex number is always second.
6923 The expression is therefore always offset by the size of the
6926 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6927 inner = TREE_OPERAND (exp, 0);
6931 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6932 expand_expr, as that can have various side effects; LABEL_DECLs for
6933 example, may not have their DECL_RTL set yet. Expand the rtl of
6934 CONSTRUCTORs too, which should yield a memory reference for the
6935 constructor's contents. Assume language specific tree nodes can
6936 be expanded in some interesting way. */
6937 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6939 || TREE_CODE (exp) == CONSTRUCTOR
6940 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6942 result = expand_expr (exp, target, tmode,
6943 modifier == EXPAND_INITIALIZER
6944 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6946 /* If the DECL isn't in memory, then the DECL wasn't properly
6947 marked TREE_ADDRESSABLE, which will be either a front-end
6948 or a tree optimizer bug. */
6949 gcc_assert (MEM_P (result));
6950 result = XEXP (result, 0);
6952 /* ??? Is this needed anymore? */
6953 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6955 assemble_external (exp);
6956 TREE_USED (exp) = 1;
6959 if (modifier != EXPAND_INITIALIZER
6960 && modifier != EXPAND_CONST_ADDRESS)
6961 result = force_operand (result, target);
6965 /* Pass FALSE as the last argument to get_inner_reference although
6966 we are expanding to RTL. The rationale is that we know how to
6967 handle "aligning nodes" here: we can just bypass them because
6968 they won't change the final object whose address will be returned
6969 (they actually exist only for that purpose). */
6970 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6971 &mode1, &unsignedp, &volatilep, false);
6975 /* We must have made progress. */
6976 gcc_assert (inner != exp);
6978 subtarget = offset || bitpos ? NULL_RTX : target;
6979 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6980 inner alignment, force the inner to be sufficiently aligned. */
6981 if (CONSTANT_CLASS_P (inner)
6982 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6984 inner = copy_node (inner);
6985 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6986 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6987 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6989 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6995 if (modifier != EXPAND_NORMAL)
6996 result = force_operand (result, NULL);
6997 tmp = expand_expr (offset, NULL_RTX, tmode,
6998 modifier == EXPAND_INITIALIZER
6999 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7001 result = convert_memory_address_addr_space (tmode, result, as);
7002 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7004 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7005 result = simplify_gen_binary (PLUS, tmode, result, tmp);
7008 subtarget = bitpos ? NULL_RTX : target;
7009 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7010 1, OPTAB_LIB_WIDEN);
7016 /* Someone beforehand should have rejected taking the address
7017 of such an object. */
7018 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7020 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7021 if (modifier < EXPAND_SUM)
7022 result = force_operand (result, target);
7028 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7029 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7032 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7033 enum expand_modifier modifier)
7035 addr_space_t as = ADDR_SPACE_GENERIC;
7036 enum machine_mode address_mode = Pmode;
7037 enum machine_mode pointer_mode = ptr_mode;
7038 enum machine_mode rmode;
7041 /* Target mode of VOIDmode says "whatever's natural". */
7042 if (tmode == VOIDmode)
7043 tmode = TYPE_MODE (TREE_TYPE (exp));
7045 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7047 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7048 address_mode = targetm.addr_space.address_mode (as);
7049 pointer_mode = targetm.addr_space.pointer_mode (as);
7052 /* We can get called with some Weird Things if the user does silliness
7053 like "(short) &a". In that case, convert_memory_address won't do
7054 the right thing, so ignore the given target mode. */
7055 if (tmode != address_mode && tmode != pointer_mode)
7056 tmode = address_mode;
7058 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7059 tmode, modifier, as);
7061 /* Despite expand_expr claims concerning ignoring TMODE when not
7062 strictly convenient, stuff breaks if we don't honor it. Note
7063 that combined with the above, we only do this for pointer modes. */
7064 rmode = GET_MODE (result);
7065 if (rmode == VOIDmode)
7068 result = convert_memory_address_addr_space (tmode, result, as);
7073 /* Generate code for computing CONSTRUCTOR EXP.
7074 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7075 is TRUE, instead of creating a temporary variable in memory
7076 NULL is returned and the caller needs to handle it differently. */
7079 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7080 bool avoid_temp_mem)
7082 tree type = TREE_TYPE (exp);
7083 enum machine_mode mode = TYPE_MODE (type);
7085 /* Try to avoid creating a temporary at all. This is possible
7086 if all of the initializer is zero.
7087 FIXME: try to handle all [0..255] initializers we can handle
7089 if (TREE_STATIC (exp)
7090 && !TREE_ADDRESSABLE (exp)
7091 && target != 0 && mode == BLKmode
7092 && all_zeros_p (exp))
7094 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7098 /* All elts simple constants => refer to a constant in memory. But
7099 if this is a non-BLKmode mode, let it store a field at a time
7100 since that should make a CONST_INT or CONST_DOUBLE when we
7101 fold. Likewise, if we have a target we can use, it is best to
7102 store directly into the target unless the type is large enough
7103 that memcpy will be used. If we are making an initializer and
7104 all operands are constant, put it in memory as well.
7106 FIXME: Avoid trying to fill vector constructors piece-meal.
7107 Output them with output_constant_def below unless we're sure
7108 they're zeros. This should go away when vector initializers
7109 are treated like VECTOR_CST instead of arrays. */
7110 if ((TREE_STATIC (exp)
7111 && ((mode == BLKmode
7112 && ! (target != 0 && safe_from_p (target, exp, 1)))
7113 || TREE_ADDRESSABLE (exp)
7114 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7115 && (! MOVE_BY_PIECES_P
7116 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7118 && ! mostly_zeros_p (exp))))
7119 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7120 && TREE_CONSTANT (exp)))
7127 constructor = expand_expr_constant (exp, 1, modifier);
7129 if (modifier != EXPAND_CONST_ADDRESS
7130 && modifier != EXPAND_INITIALIZER
7131 && modifier != EXPAND_SUM)
7132 constructor = validize_mem (constructor);
7137 /* Handle calls that pass values in multiple non-contiguous
7138 locations. The Irix 6 ABI has examples of this. */
7139 if (target == 0 || ! safe_from_p (target, exp, 1)
7140 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7146 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7147 | (TREE_READONLY (exp)
7148 * TYPE_QUAL_CONST))),
7149 0, TREE_ADDRESSABLE (exp), 1);
7152 store_constructor (exp, target, 0, int_expr_size (exp));
7157 /* expand_expr: generate code for computing expression EXP.
7158 An rtx for the computed value is returned. The value is never null.
7159 In the case of a void EXP, const0_rtx is returned.
7161 The value may be stored in TARGET if TARGET is nonzero.
7162 TARGET is just a suggestion; callers must assume that
7163 the rtx returned may not be the same as TARGET.
7165 If TARGET is CONST0_RTX, it means that the value will be ignored.
7167 If TMODE is not VOIDmode, it suggests generating the
7168 result in mode TMODE. But this is done only when convenient.
7169 Otherwise, TMODE is ignored and the value generated in its natural mode.
7170 TMODE is just a suggestion; callers must assume that
7171 the rtx returned may not have mode TMODE.
7173 Note that TARGET may have neither TMODE nor MODE. In that case, it
7174 probably will not be used.
7176 If MODIFIER is EXPAND_SUM then when EXP is an addition
7177 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7178 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7179 products as above, or REG or MEM, or constant.
7180 Ordinarily in such cases we would output mul or add instructions
7181 and then return a pseudo reg containing the sum.
7183 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7184 it also marks a label as absolutely required (it can't be dead).
7185 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7186 This is used for outputting expressions used in initializers.
7188 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7189 with a constant address even if that address is not normally legitimate.
7190 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7192 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7193 a call parameter. Such targets require special care as we haven't yet
7194 marked TARGET so that it's safe from being trashed by libcalls. We
7195 don't want to use TARGET for anything but the final result;
7196 Intermediate values must go elsewhere. Additionally, calls to
7197 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7199 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7200 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7201 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7202 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7206 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7207 enum expand_modifier modifier, rtx *alt_rtl)
7211 /* Handle ERROR_MARK before anybody tries to access its type. */
7212 if (TREE_CODE (exp) == ERROR_MARK
7213 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7215 ret = CONST0_RTX (tmode);
7216 return ret ? ret : const0_rtx;
7219 /* If this is an expression of some kind and it has an associated line
7220 number, then emit the line number before expanding the expression.
7222 We need to save and restore the file and line information so that
7223 errors discovered during expansion are emitted with the right
7224 information. It would be better of the diagnostic routines
7225 used the file/line information embedded in the tree nodes rather
7227 if (cfun && EXPR_HAS_LOCATION (exp))
7229 location_t saved_location = input_location;
7230 location_t saved_curr_loc = get_curr_insn_source_location ();
7231 tree saved_block = get_curr_insn_block ();
7232 input_location = EXPR_LOCATION (exp);
7233 set_curr_insn_source_location (input_location);
7235 /* Record where the insns produced belong. */
7236 set_curr_insn_block (TREE_BLOCK (exp));
7238 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7240 input_location = saved_location;
7241 set_curr_insn_block (saved_block);
7242 set_curr_insn_source_location (saved_curr_loc);
7246 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7253 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7254 enum expand_modifier modifier)
7256 rtx op0, op1, op2, temp;
7259 enum machine_mode mode;
7260 enum tree_code code = ops->code;
7262 rtx subtarget, original_target;
7264 bool reduce_bit_field;
7265 location_t loc = ops->location;
7266 tree treeop0, treeop1, treeop2;
7267 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7268 ? reduce_to_bit_field_precision ((expr), \
7274 mode = TYPE_MODE (type);
7275 unsignedp = TYPE_UNSIGNED (type);
7281 /* We should be called only on simple (binary or unary) expressions,
7282 exactly those that are valid in gimple expressions that aren't
7283 GIMPLE_SINGLE_RHS (or invalid). */
7284 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7285 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7286 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7288 ignore = (target == const0_rtx
7289 || ((CONVERT_EXPR_CODE_P (code)
7290 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7291 && TREE_CODE (type) == VOID_TYPE));
7293 /* We should be called only if we need the result. */
7294 gcc_assert (!ignore);
7296 /* An operation in what may be a bit-field type needs the
7297 result to be reduced to the precision of the bit-field type,
7298 which is narrower than that of the type's mode. */
7299 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7300 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7302 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7305 /* Use subtarget as the target for operand 0 of a binary operation. */
7306 subtarget = get_subtarget (target);
7307 original_target = target;
7311 case NON_LVALUE_EXPR:
7314 if (treeop0 == error_mark_node)
7317 if (TREE_CODE (type) == UNION_TYPE)
7319 tree valtype = TREE_TYPE (treeop0);
7321 /* If both input and output are BLKmode, this conversion isn't doing
7322 anything except possibly changing memory attribute. */
7323 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7325 rtx result = expand_expr (treeop0, target, tmode,
7328 result = copy_rtx (result);
7329 set_mem_attributes (result, type, 0);
7335 if (TYPE_MODE (type) != BLKmode)
7336 target = gen_reg_rtx (TYPE_MODE (type));
7338 target = assign_temp (type, 0, 1, 1);
7342 /* Store data into beginning of memory target. */
7343 store_expr (treeop0,
7344 adjust_address (target, TYPE_MODE (valtype), 0),
7345 modifier == EXPAND_STACK_PARM,
7350 gcc_assert (REG_P (target));
7352 /* Store this field into a union of the proper type. */
7353 store_field (target,
7354 MIN ((int_size_in_bytes (TREE_TYPE
7357 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7358 0, TYPE_MODE (valtype), treeop0,
7362 /* Return the entire union. */
7366 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7368 op0 = expand_expr (treeop0, target, VOIDmode,
7371 /* If the signedness of the conversion differs and OP0 is
7372 a promoted SUBREG, clear that indication since we now
7373 have to do the proper extension. */
7374 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7375 && GET_CODE (op0) == SUBREG)
7376 SUBREG_PROMOTED_VAR_P (op0) = 0;
7378 return REDUCE_BIT_FIELD (op0);
7381 op0 = expand_expr (treeop0, NULL_RTX, mode,
7382 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7383 if (GET_MODE (op0) == mode)
7386 /* If OP0 is a constant, just convert it into the proper mode. */
7387 else if (CONSTANT_P (op0))
7389 tree inner_type = TREE_TYPE (treeop0);
7390 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7392 if (modifier == EXPAND_INITIALIZER)
7393 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7394 subreg_lowpart_offset (mode,
7397 op0= convert_modes (mode, inner_mode, op0,
7398 TYPE_UNSIGNED (inner_type));
7401 else if (modifier == EXPAND_INITIALIZER)
7402 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7404 else if (target == 0)
7405 op0 = convert_to_mode (mode, op0,
7406 TYPE_UNSIGNED (TREE_TYPE
7410 convert_move (target, op0,
7411 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7415 return REDUCE_BIT_FIELD (op0);
7417 case ADDR_SPACE_CONVERT_EXPR:
7419 tree treeop0_type = TREE_TYPE (treeop0);
7421 addr_space_t as_from;
7423 gcc_assert (POINTER_TYPE_P (type));
7424 gcc_assert (POINTER_TYPE_P (treeop0_type));
7426 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7427 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7429 /* Conversions between pointers to the same address space should
7430 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7431 gcc_assert (as_to != as_from);
7433 /* Ask target code to handle conversion between pointers
7434 to overlapping address spaces. */
7435 if (targetm.addr_space.subset_p (as_to, as_from)
7436 || targetm.addr_space.subset_p (as_from, as_to))
7438 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7439 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7444 /* For disjoint address spaces, converting anything but
7445 a null pointer invokes undefined behaviour. We simply
7446 always return a null pointer here. */
7447 return CONST0_RTX (mode);
7450 case POINTER_PLUS_EXPR:
7451 /* Even though the sizetype mode and the pointer's mode can be different
7452 expand is able to handle this correctly and get the correct result out
7453 of the PLUS_EXPR code. */
7454 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7455 if sizetype precision is smaller than pointer precision. */
7456 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7457 treeop1 = fold_convert_loc (loc, type,
7458 fold_convert_loc (loc, ssizetype,
7461 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7462 something else, make sure we add the register to the constant and
7463 then to the other thing. This case can occur during strength
7464 reduction and doing it this way will produce better code if the
7465 frame pointer or argument pointer is eliminated.
7467 fold-const.c will ensure that the constant is always in the inner
7468 PLUS_EXPR, so the only case we need to do anything about is if
7469 sp, ap, or fp is our second argument, in which case we must swap
7470 the innermost first argument and our second argument. */
7472 if (TREE_CODE (treeop0) == PLUS_EXPR
7473 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7474 && TREE_CODE (treeop1) == VAR_DECL
7475 && (DECL_RTL (treeop1) == frame_pointer_rtx
7476 || DECL_RTL (treeop1) == stack_pointer_rtx
7477 || DECL_RTL (treeop1) == arg_pointer_rtx))
7481 treeop1 = TREE_OPERAND (treeop0, 0);
7482 TREE_OPERAND (treeop0, 0) = t;
7485 /* If the result is to be ptr_mode and we are adding an integer to
7486 something, we might be forming a constant. So try to use
7487 plus_constant. If it produces a sum and we can't accept it,
7488 use force_operand. This allows P = &ARR[const] to generate
7489 efficient code on machines where a SYMBOL_REF is not a valid
7492 If this is an EXPAND_SUM call, always return the sum. */
7493 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7494 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7496 if (modifier == EXPAND_STACK_PARM)
7498 if (TREE_CODE (treeop0) == INTEGER_CST
7499 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7500 && TREE_CONSTANT (treeop1))
7504 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7506 /* Use immed_double_const to ensure that the constant is
7507 truncated according to the mode of OP1, then sign extended
7508 to a HOST_WIDE_INT. Using the constant directly can result
7509 in non-canonical RTL in a 64x32 cross compile. */
7511 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7513 TYPE_MODE (TREE_TYPE (treeop1)));
7514 op1 = plus_constant (op1, INTVAL (constant_part));
7515 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7516 op1 = force_operand (op1, target);
7517 return REDUCE_BIT_FIELD (op1);
7520 else if (TREE_CODE (treeop1) == INTEGER_CST
7521 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7522 && TREE_CONSTANT (treeop0))
7526 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7527 (modifier == EXPAND_INITIALIZER
7528 ? EXPAND_INITIALIZER : EXPAND_SUM));
7529 if (! CONSTANT_P (op0))
7531 op1 = expand_expr (treeop1, NULL_RTX,
7532 VOIDmode, modifier);
7533 /* Return a PLUS if modifier says it's OK. */
7534 if (modifier == EXPAND_SUM
7535 || modifier == EXPAND_INITIALIZER)
7536 return simplify_gen_binary (PLUS, mode, op0, op1);
7539 /* Use immed_double_const to ensure that the constant is
7540 truncated according to the mode of OP1, then sign extended
7541 to a HOST_WIDE_INT. Using the constant directly can result
7542 in non-canonical RTL in a 64x32 cross compile. */
7544 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7546 TYPE_MODE (TREE_TYPE (treeop0)));
7547 op0 = plus_constant (op0, INTVAL (constant_part));
7548 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7549 op0 = force_operand (op0, target);
7550 return REDUCE_BIT_FIELD (op0);
7554 /* Use TER to expand pointer addition of a negated value
7555 as pointer subtraction. */
7556 if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
7557 || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
7558 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
7559 && TREE_CODE (treeop1) == SSA_NAME
7560 && TYPE_MODE (TREE_TYPE (treeop0))
7561 == TYPE_MODE (TREE_TYPE (treeop1)))
7563 gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
7566 treeop1 = gimple_assign_rhs1 (def);
7572 /* No sense saving up arithmetic to be done
7573 if it's all in the wrong mode to form part of an address.
7574 And force_operand won't know whether to sign-extend or
7576 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7577 || mode != ptr_mode)
7579 expand_operands (treeop0, treeop1,
7580 subtarget, &op0, &op1, EXPAND_NORMAL);
7581 if (op0 == const0_rtx)
7583 if (op1 == const0_rtx)
7588 expand_operands (treeop0, treeop1,
7589 subtarget, &op0, &op1, modifier);
7590 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7594 /* For initializers, we are allowed to return a MINUS of two
7595 symbolic constants. Here we handle all cases when both operands
7597 /* Handle difference of two symbolic constants,
7598 for the sake of an initializer. */
7599 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7600 && really_constant_p (treeop0)
7601 && really_constant_p (treeop1))
7603 expand_operands (treeop0, treeop1,
7604 NULL_RTX, &op0, &op1, modifier);
7606 /* If the last operand is a CONST_INT, use plus_constant of
7607 the negated constant. Else make the MINUS. */
7608 if (CONST_INT_P (op1))
7609 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7611 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7614 /* No sense saving up arithmetic to be done
7615 if it's all in the wrong mode to form part of an address.
7616 And force_operand won't know whether to sign-extend or
7618 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7619 || mode != ptr_mode)
7622 expand_operands (treeop0, treeop1,
7623 subtarget, &op0, &op1, modifier);
7625 /* Convert A - const to A + (-const). */
7626 if (CONST_INT_P (op1))
7628 op1 = negate_rtx (mode, op1);
7629 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7634 case WIDEN_MULT_PLUS_EXPR:
7635 case WIDEN_MULT_MINUS_EXPR:
7636 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7637 op2 = expand_normal (treeop2);
7638 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7642 case WIDEN_MULT_EXPR:
7643 /* If first operand is constant, swap them.
7644 Thus the following special case checks need only
7645 check the second operand. */
7646 if (TREE_CODE (treeop0) == INTEGER_CST)
7653 /* First, check if we have a multiplication of one signed and one
7654 unsigned operand. */
7655 if (TREE_CODE (treeop1) != INTEGER_CST
7656 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7657 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7659 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7660 this_optab = usmul_widen_optab;
7661 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7663 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7665 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7666 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7669 expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
7675 /* Check for a multiplication with matching signedness. */
7676 else if ((TREE_CODE (treeop1) == INTEGER_CST
7677 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7678 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7679 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7681 tree op0type = TREE_TYPE (treeop0);
7682 enum machine_mode innermode = TYPE_MODE (op0type);
7683 bool zextend_p = TYPE_UNSIGNED (op0type);
7684 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7685 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7687 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7688 && TREE_CODE (treeop0) != INTEGER_CST)
7690 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7692 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7694 temp = expand_widening_mult (mode, op0, op1, target,
7695 unsignedp, this_optab);
7696 return REDUCE_BIT_FIELD (temp);
7698 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7699 && innermode == word_mode)
7702 op0 = expand_normal (treeop0);
7703 if (TREE_CODE (treeop1) == INTEGER_CST)
7704 op1 = convert_modes (innermode, mode,
7705 expand_normal (treeop1), unsignedp);
7707 op1 = expand_normal (treeop1);
7708 temp = expand_binop (mode, other_optab, op0, op1, target,
7709 unsignedp, OPTAB_LIB_WIDEN);
7710 hipart = gen_highpart (innermode, temp);
7711 htem = expand_mult_highpart_adjust (innermode, hipart,
7715 emit_move_insn (hipart, htem);
7716 return REDUCE_BIT_FIELD (temp);
7720 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7721 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7722 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7723 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7727 optab opt = fma_optab;
7730 /* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
7732 if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
7734 tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
7737 gcc_assert (fn != NULL_TREE);
7738 call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
7739 return expand_builtin (call_expr, target, subtarget, mode, false);
7742 def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
7743 def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
7748 && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
7751 op0 = expand_normal (gimple_assign_rhs1 (def0));
7752 op2 = expand_normal (gimple_assign_rhs1 (def2));
7755 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
7758 op0 = expand_normal (gimple_assign_rhs1 (def0));
7761 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
7764 op2 = expand_normal (gimple_assign_rhs1 (def2));
7768 op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
7770 op2 = expand_normal (treeop2);
7771 op1 = expand_normal (treeop1);
7773 return expand_ternary_op (TYPE_MODE (type), opt,
7774 op0, op1, op2, target, 0);
7778 /* If this is a fixed-point operation, then we cannot use the code
7779 below because "expand_mult" doesn't support sat/no-sat fixed-point
7781 if (ALL_FIXED_POINT_MODE_P (mode))
7784 /* If first operand is constant, swap them.
7785 Thus the following special case checks need only
7786 check the second operand. */
7787 if (TREE_CODE (treeop0) == INTEGER_CST)
7794 /* Attempt to return something suitable for generating an
7795 indexed address, for machines that support that. */
7797 if (modifier == EXPAND_SUM && mode == ptr_mode
7798 && host_integerp (treeop1, 0))
7800 tree exp1 = treeop1;
7802 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7806 op0 = force_operand (op0, NULL_RTX);
7808 op0 = copy_to_mode_reg (mode, op0);
7810 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7811 gen_int_mode (tree_low_cst (exp1, 0),
7812 TYPE_MODE (TREE_TYPE (exp1)))));
7815 if (modifier == EXPAND_STACK_PARM)
7818 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7819 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7821 case TRUNC_DIV_EXPR:
7822 case FLOOR_DIV_EXPR:
7824 case ROUND_DIV_EXPR:
7825 case EXACT_DIV_EXPR:
7826 /* If this is a fixed-point operation, then we cannot use the code
7827 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7829 if (ALL_FIXED_POINT_MODE_P (mode))
7832 if (modifier == EXPAND_STACK_PARM)
7834 /* Possible optimization: compute the dividend with EXPAND_SUM
7835 then if the divisor is constant can optimize the case
7836 where some terms of the dividend have coeffs divisible by it. */
7837 expand_operands (treeop0, treeop1,
7838 subtarget, &op0, &op1, EXPAND_NORMAL);
7839 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7844 case TRUNC_MOD_EXPR:
7845 case FLOOR_MOD_EXPR:
7847 case ROUND_MOD_EXPR:
7848 if (modifier == EXPAND_STACK_PARM)
7850 expand_operands (treeop0, treeop1,
7851 subtarget, &op0, &op1, EXPAND_NORMAL);
7852 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7854 case FIXED_CONVERT_EXPR:
7855 op0 = expand_normal (treeop0);
7856 if (target == 0 || modifier == EXPAND_STACK_PARM)
7857 target = gen_reg_rtx (mode);
7859 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7860 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7861 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7862 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7864 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7867 case FIX_TRUNC_EXPR:
7868 op0 = expand_normal (treeop0);
7869 if (target == 0 || modifier == EXPAND_STACK_PARM)
7870 target = gen_reg_rtx (mode);
7871 expand_fix (target, op0, unsignedp);
7875 op0 = expand_normal (treeop0);
7876 if (target == 0 || modifier == EXPAND_STACK_PARM)
7877 target = gen_reg_rtx (mode);
7878 /* expand_float can't figure out what to do if FROM has VOIDmode.
7879 So give it the correct mode. With -O, cse will optimize this. */
7880 if (GET_MODE (op0) == VOIDmode)
7881 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7883 expand_float (target, op0,
7884 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7888 op0 = expand_expr (treeop0, subtarget,
7889 VOIDmode, EXPAND_NORMAL);
7890 if (modifier == EXPAND_STACK_PARM)
7892 temp = expand_unop (mode,
7893 optab_for_tree_code (NEGATE_EXPR, type,
7897 return REDUCE_BIT_FIELD (temp);
7900 op0 = expand_expr (treeop0, subtarget,
7901 VOIDmode, EXPAND_NORMAL);
7902 if (modifier == EXPAND_STACK_PARM)
7905 /* ABS_EXPR is not valid for complex arguments. */
7906 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7907 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7909 /* Unsigned abs is simply the operand. Testing here means we don't
7910 risk generating incorrect code below. */
7911 if (TYPE_UNSIGNED (type))
7914 return expand_abs (mode, op0, target, unsignedp,
7915 safe_from_p (target, treeop0, 1));
7919 target = original_target;
7921 || modifier == EXPAND_STACK_PARM
7922 || (MEM_P (target) && MEM_VOLATILE_P (target))
7923 || GET_MODE (target) != mode
7925 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7926 target = gen_reg_rtx (mode);
7927 expand_operands (treeop0, treeop1,
7928 target, &op0, &op1, EXPAND_NORMAL);
7930 /* First try to do it with a special MIN or MAX instruction.
7931 If that does not win, use a conditional jump to select the proper
7933 this_optab = optab_for_tree_code (code, type, optab_default);
7934 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7939 /* At this point, a MEM target is no longer useful; we will get better
7942 if (! REG_P (target))
7943 target = gen_reg_rtx (mode);
7945 /* If op1 was placed in target, swap op0 and op1. */
7946 if (target != op0 && target == op1)
7953 /* We generate better code and avoid problems with op1 mentioning
7954 target by forcing op1 into a pseudo if it isn't a constant. */
7955 if (! CONSTANT_P (op1))
7956 op1 = force_reg (mode, op1);
7959 enum rtx_code comparison_code;
7962 if (code == MAX_EXPR)
7963 comparison_code = unsignedp ? GEU : GE;
7965 comparison_code = unsignedp ? LEU : LE;
7967 /* Canonicalize to comparisons against 0. */
7968 if (op1 == const1_rtx)
7970 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7971 or (a != 0 ? a : 1) for unsigned.
7972 For MIN we are safe converting (a <= 1 ? a : 1)
7973 into (a <= 0 ? a : 1) */
7974 cmpop1 = const0_rtx;
7975 if (code == MAX_EXPR)
7976 comparison_code = unsignedp ? NE : GT;
7978 if (op1 == constm1_rtx && !unsignedp)
7980 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7981 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7982 cmpop1 = const0_rtx;
7983 if (code == MIN_EXPR)
7984 comparison_code = LT;
7986 #ifdef HAVE_conditional_move
7987 /* Use a conditional move if possible. */
7988 if (can_conditionally_move_p (mode))
7992 /* ??? Same problem as in expmed.c: emit_conditional_move
7993 forces a stack adjustment via compare_from_rtx, and we
7994 lose the stack adjustment if the sequence we are about
7995 to create is discarded. */
7996 do_pending_stack_adjust ();
8000 /* Try to emit the conditional move. */
8001 insn = emit_conditional_move (target, comparison_code,
8006 /* If we could do the conditional move, emit the sequence,
8010 rtx seq = get_insns ();
8016 /* Otherwise discard the sequence and fall back to code with
8022 emit_move_insn (target, op0);
8024 temp = gen_label_rtx ();
8025 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8026 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8029 emit_move_insn (target, op1);
8034 op0 = expand_expr (treeop0, subtarget,
8035 VOIDmode, EXPAND_NORMAL);
8036 if (modifier == EXPAND_STACK_PARM)
8038 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8042 /* ??? Can optimize bitwise operations with one arg constant.
8043 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8044 and (a bitwise1 b) bitwise2 b (etc)
8045 but that is probably not worth while. */
8047 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8048 boolean values when we want in all cases to compute both of them. In
8049 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8050 as actual zero-or-1 values and then bitwise anding. In cases where
8051 there cannot be any side effects, better code would be made by
8052 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8053 how to recognize those cases. */
8055 case TRUTH_AND_EXPR:
8056 code = BIT_AND_EXPR;
8061 code = BIT_IOR_EXPR;
8065 case TRUTH_XOR_EXPR:
8066 code = BIT_XOR_EXPR;
8072 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8073 || (GET_MODE_PRECISION (TYPE_MODE (type))
8074 == TYPE_PRECISION (type)));
8079 /* If this is a fixed-point operation, then we cannot use the code
8080 below because "expand_shift" doesn't support sat/no-sat fixed-point
8082 if (ALL_FIXED_POINT_MODE_P (mode))
8085 if (! safe_from_p (subtarget, treeop1, 1))
8087 if (modifier == EXPAND_STACK_PARM)
8089 op0 = expand_expr (treeop0, subtarget,
8090 VOIDmode, EXPAND_NORMAL);
8091 temp = expand_shift (code, mode, op0, treeop1, target,
8093 if (code == LSHIFT_EXPR)
8094 temp = REDUCE_BIT_FIELD (temp);
8097 /* Could determine the answer when only additive constants differ. Also,
8098 the addition of one can be handled by changing the condition. */
8105 case UNORDERED_EXPR:
8113 temp = do_store_flag (ops,
8114 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8115 tmode != VOIDmode ? tmode : mode);
8119 /* Use a compare and a jump for BLKmode comparisons, or for function
8120 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8123 || modifier == EXPAND_STACK_PARM
8124 || ! safe_from_p (target, treeop0, 1)
8125 || ! safe_from_p (target, treeop1, 1)
8126 /* Make sure we don't have a hard reg (such as function's return
8127 value) live across basic blocks, if not optimizing. */
8128 || (!optimize && REG_P (target)
8129 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8130 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8132 emit_move_insn (target, const0_rtx);
8134 op1 = gen_label_rtx ();
8135 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8137 if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8138 emit_move_insn (target, constm1_rtx);
8140 emit_move_insn (target, const1_rtx);
8145 case TRUTH_NOT_EXPR:
8146 if (modifier == EXPAND_STACK_PARM)
8148 op0 = expand_expr (treeop0, target,
8149 VOIDmode, EXPAND_NORMAL);
8150 /* The parser is careful to generate TRUTH_NOT_EXPR
8151 only with operands that are always zero or one. */
8152 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8153 target, 1, OPTAB_LIB_WIDEN);
8158 /* Get the rtx code of the operands. */
8159 op0 = expand_normal (treeop0);
8160 op1 = expand_normal (treeop1);
8163 target = gen_reg_rtx (TYPE_MODE (type));
8165 /* Move the real (op0) and imaginary (op1) parts to their location. */
8166 write_complex_part (target, op0, false);
8167 write_complex_part (target, op1, true);
8171 case WIDEN_SUM_EXPR:
8173 tree oprnd0 = treeop0;
8174 tree oprnd1 = treeop1;
8176 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8177 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8182 case REDUC_MAX_EXPR:
8183 case REDUC_MIN_EXPR:
8184 case REDUC_PLUS_EXPR:
8186 op0 = expand_normal (treeop0);
8187 this_optab = optab_for_tree_code (code, type, optab_default);
8188 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8193 case VEC_EXTRACT_EVEN_EXPR:
8194 case VEC_EXTRACT_ODD_EXPR:
8196 expand_operands (treeop0, treeop1,
8197 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8198 this_optab = optab_for_tree_code (code, type, optab_default);
8199 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8205 case VEC_INTERLEAVE_HIGH_EXPR:
8206 case VEC_INTERLEAVE_LOW_EXPR:
8208 expand_operands (treeop0, treeop1,
8209 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8210 this_optab = optab_for_tree_code (code, type, optab_default);
8211 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8217 case VEC_LSHIFT_EXPR:
8218 case VEC_RSHIFT_EXPR:
8220 target = expand_vec_shift_expr (ops, target);
8224 case VEC_UNPACK_HI_EXPR:
8225 case VEC_UNPACK_LO_EXPR:
8227 op0 = expand_normal (treeop0);
8228 this_optab = optab_for_tree_code (code, type, optab_default);
8229 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8235 case VEC_UNPACK_FLOAT_HI_EXPR:
8236 case VEC_UNPACK_FLOAT_LO_EXPR:
8238 op0 = expand_normal (treeop0);
8239 /* The signedness is determined from input operand. */
8240 this_optab = optab_for_tree_code (code,
8241 TREE_TYPE (treeop0),
8243 temp = expand_widen_pattern_expr
8244 (ops, op0, NULL_RTX, NULL_RTX,
8245 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8251 case VEC_WIDEN_MULT_HI_EXPR:
8252 case VEC_WIDEN_MULT_LO_EXPR:
8254 tree oprnd0 = treeop0;
8255 tree oprnd1 = treeop1;
8257 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8258 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8260 gcc_assert (target);
8264 case VEC_PACK_TRUNC_EXPR:
8265 case VEC_PACK_SAT_EXPR:
8266 case VEC_PACK_FIX_TRUNC_EXPR:
8267 mode = TYPE_MODE (TREE_TYPE (treeop0));
8274 /* Here to do an ordinary binary operator. */
8276 expand_operands (treeop0, treeop1,
8277 subtarget, &op0, &op1, EXPAND_NORMAL);
8279 this_optab = optab_for_tree_code (code, type, optab_default);
8281 if (modifier == EXPAND_STACK_PARM)
8283 temp = expand_binop (mode, this_optab, op0, op1, target,
8284 unsignedp, OPTAB_LIB_WIDEN);
8286 return REDUCE_BIT_FIELD (temp);
8288 #undef REDUCE_BIT_FIELD
8291 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8292 enum expand_modifier modifier, rtx *alt_rtl)
8294 rtx op0, op1, temp, decl_rtl;
8297 enum machine_mode mode;
8298 enum tree_code code = TREE_CODE (exp);
8300 rtx subtarget, original_target;
8303 bool reduce_bit_field;
8304 location_t loc = EXPR_LOCATION (exp);
8305 struct separate_ops ops;
8306 tree treeop0, treeop1, treeop2;
8307 tree ssa_name = NULL_TREE;
8310 type = TREE_TYPE (exp);
8311 mode = TYPE_MODE (type);
8312 unsignedp = TYPE_UNSIGNED (type);
8314 treeop0 = treeop1 = treeop2 = NULL_TREE;
8315 if (!VL_EXP_CLASS_P (exp))
8316 switch (TREE_CODE_LENGTH (code))
8319 case 3: treeop2 = TREE_OPERAND (exp, 2);
8320 case 2: treeop1 = TREE_OPERAND (exp, 1);
8321 case 1: treeop0 = TREE_OPERAND (exp, 0);
8331 ignore = (target == const0_rtx
8332 || ((CONVERT_EXPR_CODE_P (code)
8333 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8334 && TREE_CODE (type) == VOID_TYPE));
8336 /* An operation in what may be a bit-field type needs the
8337 result to be reduced to the precision of the bit-field type,
8338 which is narrower than that of the type's mode. */
8339 reduce_bit_field = (!ignore
8340 && TREE_CODE (type) == INTEGER_TYPE
8341 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8343 /* If we are going to ignore this result, we need only do something
8344 if there is a side-effect somewhere in the expression. If there
8345 is, short-circuit the most common cases here. Note that we must
8346 not call expand_expr with anything but const0_rtx in case this
8347 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8351 if (! TREE_SIDE_EFFECTS (exp))
8354 /* Ensure we reference a volatile object even if value is ignored, but
8355 don't do this if all we are doing is taking its address. */
8356 if (TREE_THIS_VOLATILE (exp)
8357 && TREE_CODE (exp) != FUNCTION_DECL
8358 && mode != VOIDmode && mode != BLKmode
8359 && modifier != EXPAND_CONST_ADDRESS)
8361 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8363 temp = copy_to_reg (temp);
8367 if (TREE_CODE_CLASS (code) == tcc_unary
8368 || code == COMPONENT_REF || code == INDIRECT_REF)
8369 return expand_expr (treeop0, const0_rtx, VOIDmode,
8372 else if (TREE_CODE_CLASS (code) == tcc_binary
8373 || TREE_CODE_CLASS (code) == tcc_comparison
8374 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8376 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8377 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8380 else if (code == BIT_FIELD_REF)
8382 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8383 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8384 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8391 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8394 /* Use subtarget as the target for operand 0 of a binary operation. */
8395 subtarget = get_subtarget (target);
8396 original_target = target;
8402 tree function = decl_function_context (exp);
8404 temp = label_rtx (exp);
8405 temp = gen_rtx_LABEL_REF (Pmode, temp);
8407 if (function != current_function_decl
8409 LABEL_REF_NONLOCAL_P (temp) = 1;
8411 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8416 /* ??? ivopts calls expander, without any preparation from
8417 out-of-ssa. So fake instructions as if this was an access to the
8418 base variable. This unnecessarily allocates a pseudo, see how we can
8419 reuse it, if partition base vars have it set already. */
8420 if (!currently_expanding_to_rtl)
8421 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8424 g = get_gimple_for_ssa_name (exp);
8425 /* For EXPAND_INITIALIZER try harder to get something simpler. */
8427 && modifier == EXPAND_INITIALIZER
8428 && !SSA_NAME_IS_DEFAULT_DEF (exp)
8429 && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
8430 && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
8431 g = SSA_NAME_DEF_STMT (exp);
8433 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8437 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8438 exp = SSA_NAME_VAR (ssa_name);
8439 goto expand_decl_rtl;
8443 /* If a static var's type was incomplete when the decl was written,
8444 but the type is complete now, lay out the decl now. */
8445 if (DECL_SIZE (exp) == 0
8446 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8447 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8448 layout_decl (exp, 0);
8450 /* ... fall through ... */
8454 decl_rtl = DECL_RTL (exp);
8456 gcc_assert (decl_rtl);
8457 decl_rtl = copy_rtx (decl_rtl);
8458 /* Record writes to register variables. */
8459 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8460 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8462 int i = REGNO (decl_rtl);
8463 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8466 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8472 /* Ensure variable marked as used even if it doesn't go through
8473 a parser. If it hasn't be used yet, write out an external
8475 if (! TREE_USED (exp))
8477 assemble_external (exp);
8478 TREE_USED (exp) = 1;
8481 /* Show we haven't gotten RTL for this yet. */
8484 /* Variables inherited from containing functions should have
8485 been lowered by this point. */
8486 context = decl_function_context (exp);
8487 gcc_assert (!context
8488 || context == current_function_decl
8489 || TREE_STATIC (exp)
8490 || DECL_EXTERNAL (exp)
8491 /* ??? C++ creates functions that are not TREE_STATIC. */
8492 || TREE_CODE (exp) == FUNCTION_DECL);
8494 /* This is the case of an array whose size is to be determined
8495 from its initializer, while the initializer is still being parsed.
8498 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8499 temp = validize_mem (decl_rtl);
8501 /* If DECL_RTL is memory, we are in the normal case and the
8502 address is not valid, get the address into a register. */
8504 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8507 *alt_rtl = decl_rtl;
8508 decl_rtl = use_anchored_address (decl_rtl);
8509 if (modifier != EXPAND_CONST_ADDRESS
8510 && modifier != EXPAND_SUM
8511 && !memory_address_addr_space_p (DECL_MODE (exp),
8513 MEM_ADDR_SPACE (decl_rtl)))
8514 temp = replace_equiv_address (decl_rtl,
8515 copy_rtx (XEXP (decl_rtl, 0)));
8518 /* If we got something, return it. But first, set the alignment
8519 if the address is a register. */
8522 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8523 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8528 /* If the mode of DECL_RTL does not match that of the decl, it
8529 must be a promoted value. We return a SUBREG of the wanted mode,
8530 but mark it so that we know that it was already extended. */
8531 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8533 enum machine_mode pmode;
8535 /* Get the signedness to be used for this variable. Ensure we get
8536 the same mode we got when the variable was declared. */
8537 if (code == SSA_NAME
8538 && (g = SSA_NAME_DEF_STMT (ssa_name))
8539 && gimple_code (g) == GIMPLE_CALL)
8540 pmode = promote_function_mode (type, mode, &unsignedp,
8542 (TREE_TYPE (gimple_call_fn (g))),
8545 pmode = promote_decl_mode (exp, &unsignedp);
8546 gcc_assert (GET_MODE (decl_rtl) == pmode);
8548 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8549 SUBREG_PROMOTED_VAR_P (temp) = 1;
8550 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8557 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8558 TREE_INT_CST_HIGH (exp), mode);
8564 tree tmp = NULL_TREE;
8565 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8566 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8567 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8568 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8569 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8570 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8571 return const_vector_from_tree (exp);
8572 if (GET_MODE_CLASS (mode) == MODE_INT)
8574 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8576 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8579 tmp = build_constructor_from_list (type,
8580 TREE_VECTOR_CST_ELTS (exp));
8581 return expand_expr (tmp, ignore ? const0_rtx : target,
8586 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8589 /* If optimized, generate immediate CONST_DOUBLE
8590 which will be turned into memory by reload if necessary.
8592 We used to force a register so that loop.c could see it. But
8593 this does not allow gen_* patterns to perform optimizations with
8594 the constants. It also produces two insns in cases like "x = 1.0;".
8595 On most machines, floating-point constants are not permitted in
8596 many insns, so we'd end up copying it to a register in any case.
8598 Now, we do the copying in expand_binop, if appropriate. */
8599 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8600 TYPE_MODE (TREE_TYPE (exp)));
8603 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8604 TYPE_MODE (TREE_TYPE (exp)));
8607 /* Handle evaluating a complex constant in a CONCAT target. */
8608 if (original_target && GET_CODE (original_target) == CONCAT)
8610 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8613 rtarg = XEXP (original_target, 0);
8614 itarg = XEXP (original_target, 1);
8616 /* Move the real and imaginary parts separately. */
8617 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8618 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8621 emit_move_insn (rtarg, op0);
8623 emit_move_insn (itarg, op1);
8625 return original_target;
8628 /* ... fall through ... */
8631 temp = expand_expr_constant (exp, 1, modifier);
8633 /* temp contains a constant address.
8634 On RISC machines where a constant address isn't valid,
8635 make some insns to get that address into a register. */
8636 if (modifier != EXPAND_CONST_ADDRESS
8637 && modifier != EXPAND_INITIALIZER
8638 && modifier != EXPAND_SUM
8639 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8640 MEM_ADDR_SPACE (temp)))
8641 return replace_equiv_address (temp,
8642 copy_rtx (XEXP (temp, 0)));
8648 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8650 if (!SAVE_EXPR_RESOLVED_P (exp))
8652 /* We can indeed still hit this case, typically via builtin
8653 expanders calling save_expr immediately before expanding
8654 something. Assume this means that we only have to deal
8655 with non-BLKmode values. */
8656 gcc_assert (GET_MODE (ret) != BLKmode);
8658 val = build_decl (EXPR_LOCATION (exp),
8659 VAR_DECL, NULL, TREE_TYPE (exp));
8660 DECL_ARTIFICIAL (val) = 1;
8661 DECL_IGNORED_P (val) = 1;
8663 TREE_OPERAND (exp, 0) = treeop0;
8664 SAVE_EXPR_RESOLVED_P (exp) = 1;
8666 if (!CONSTANT_P (ret))
8667 ret = copy_to_reg (ret);
8668 SET_DECL_RTL (val, ret);
8676 /* If we don't need the result, just ensure we evaluate any
8680 unsigned HOST_WIDE_INT idx;
8683 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8684 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8689 return expand_constructor (exp, target, modifier, false);
8691 case TARGET_MEM_REF:
8693 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8694 struct mem_address addr;
8697 get_address_description (exp, &addr);
8698 op0 = addr_for_mem_ref (&addr, as, true);
8699 op0 = memory_address_addr_space (mode, op0, as);
8700 temp = gen_rtx_MEM (mode, op0);
8701 set_mem_attributes (temp, exp, 0);
8702 set_mem_addr_space (temp, as);
8703 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8704 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8706 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8707 /* If the target does not have special handling for unaligned
8708 loads of mode then it can use regular moves for them. */
8709 && ((icode = optab_handler (movmisalign_optab, mode))
8710 != CODE_FOR_nothing))
8714 /* We've already validated the memory, and we're creating a
8715 new pseudo destination. The predicates really can't fail. */
8716 reg = gen_reg_rtx (mode);
8718 /* Nor can the insn generator. */
8719 insn = GEN_FCN (icode) (reg, temp);
8720 gcc_assert (insn != NULL_RTX);
8731 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8732 enum machine_mode address_mode;
8733 tree base = TREE_OPERAND (exp, 0);
8736 /* Handle expansion of non-aliased memory with non-BLKmode. That
8737 might end up in a register. */
8738 if (TREE_CODE (base) == ADDR_EXPR)
8740 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8742 base = TREE_OPERAND (base, 0);
8746 base = get_addr_base_and_unit_offset (base, &off);
8750 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8751 decl we must use bitfield operations. */
8753 && !TREE_ADDRESSABLE (base)
8754 && DECL_MODE (base) != BLKmode
8755 && DECL_RTL_SET_P (base)
8756 && !MEM_P (DECL_RTL (base)))
8760 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8761 && (GET_MODE_BITSIZE (DECL_MODE (base))
8762 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8763 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8764 TREE_TYPE (exp), base),
8765 target, tmode, modifier);
8766 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8767 bftype = TREE_TYPE (base);
8768 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8769 bftype = TREE_TYPE (exp);
8770 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8772 TYPE_SIZE (TREE_TYPE (exp)),
8774 target, tmode, modifier);
8777 address_mode = targetm.addr_space.address_mode (as);
8778 base = TREE_OPERAND (exp, 0);
8779 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8781 tree mask = gimple_assign_rhs2 (def_stmt);
8782 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8783 gimple_assign_rhs1 (def_stmt), mask);
8784 TREE_OPERAND (exp, 0) = base;
8786 align = MAX (TYPE_ALIGN (TREE_TYPE (exp)),
8787 get_object_alignment (exp, BIGGEST_ALIGNMENT));
8788 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
8789 op0 = memory_address_addr_space (address_mode, op0, as);
8790 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8793 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8794 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8796 op0 = memory_address_addr_space (mode, op0, as);
8797 temp = gen_rtx_MEM (mode, op0);
8798 set_mem_attributes (temp, exp, 0);
8799 set_mem_addr_space (temp, as);
8800 if (TREE_THIS_VOLATILE (exp))
8801 MEM_VOLATILE_P (temp) = 1;
8803 && (unsigned) align < GET_MODE_ALIGNMENT (mode)
8804 /* If the target does not have special handling for unaligned
8805 loads of mode then it can use regular moves for them. */
8806 && ((icode = optab_handler (movmisalign_optab, mode))
8807 != CODE_FOR_nothing))
8811 /* We've already validated the memory, and we're creating a
8812 new pseudo destination. The predicates really can't fail. */
8813 reg = gen_reg_rtx (mode);
8815 /* Nor can the insn generator. */
8816 insn = GEN_FCN (icode) (reg, temp);
8827 tree array = treeop0;
8828 tree index = treeop1;
8830 /* Fold an expression like: "foo"[2].
8831 This is not done in fold so it won't happen inside &.
8832 Don't fold if this is for wide characters since it's too
8833 difficult to do correctly and this is a very rare case. */
8835 if (modifier != EXPAND_CONST_ADDRESS
8836 && modifier != EXPAND_INITIALIZER
8837 && modifier != EXPAND_MEMORY)
8839 tree t = fold_read_from_constant_string (exp);
8842 return expand_expr (t, target, tmode, modifier);
8845 /* If this is a constant index into a constant array,
8846 just get the value from the array. Handle both the cases when
8847 we have an explicit constructor and when our operand is a variable
8848 that was declared const. */
8850 if (modifier != EXPAND_CONST_ADDRESS
8851 && modifier != EXPAND_INITIALIZER
8852 && modifier != EXPAND_MEMORY
8853 && TREE_CODE (array) == CONSTRUCTOR
8854 && ! TREE_SIDE_EFFECTS (array)
8855 && TREE_CODE (index) == INTEGER_CST)
8857 unsigned HOST_WIDE_INT ix;
8860 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8862 if (tree_int_cst_equal (field, index))
8864 if (!TREE_SIDE_EFFECTS (value))
8865 return expand_expr (fold (value), target, tmode, modifier);
8870 else if (optimize >= 1
8871 && modifier != EXPAND_CONST_ADDRESS
8872 && modifier != EXPAND_INITIALIZER
8873 && modifier != EXPAND_MEMORY
8874 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8875 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8876 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8877 && const_value_known_p (array))
8879 if (TREE_CODE (index) == INTEGER_CST)
8881 tree init = DECL_INITIAL (array);
8883 if (TREE_CODE (init) == CONSTRUCTOR)
8885 unsigned HOST_WIDE_INT ix;
8888 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8890 if (tree_int_cst_equal (field, index))
8892 if (TREE_SIDE_EFFECTS (value))
8895 if (TREE_CODE (value) == CONSTRUCTOR)
8897 /* If VALUE is a CONSTRUCTOR, this
8898 optimization is only useful if
8899 this doesn't store the CONSTRUCTOR
8900 into memory. If it does, it is more
8901 efficient to just load the data from
8902 the array directly. */
8903 rtx ret = expand_constructor (value, target,
8905 if (ret == NULL_RTX)
8909 return expand_expr (fold (value), target, tmode,
8913 else if(TREE_CODE (init) == STRING_CST)
8915 tree index1 = index;
8916 tree low_bound = array_ref_low_bound (exp);
8917 index1 = fold_convert_loc (loc, sizetype,
8920 /* Optimize the special-case of a zero lower bound.
8922 We convert the low_bound to sizetype to avoid some problems
8923 with constant folding. (E.g. suppose the lower bound is 1,
8924 and its mode is QI. Without the conversion,l (ARRAY
8925 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8926 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8928 if (! integer_zerop (low_bound))
8929 index1 = size_diffop_loc (loc, index1,
8930 fold_convert_loc (loc, sizetype,
8933 if (0 > compare_tree_int (index1,
8934 TREE_STRING_LENGTH (init)))
8936 tree type = TREE_TYPE (TREE_TYPE (init));
8937 enum machine_mode mode = TYPE_MODE (type);
8939 if (GET_MODE_CLASS (mode) == MODE_INT
8940 && GET_MODE_SIZE (mode) == 1)
8941 return gen_int_mode (TREE_STRING_POINTER (init)
8942 [TREE_INT_CST_LOW (index1)],
8949 goto normal_inner_ref;
8952 /* If the operand is a CONSTRUCTOR, we can just extract the
8953 appropriate field if it is present. */
8954 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8956 unsigned HOST_WIDE_INT idx;
8959 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8961 if (field == treeop1
8962 /* We can normally use the value of the field in the
8963 CONSTRUCTOR. However, if this is a bitfield in
8964 an integral mode that we can fit in a HOST_WIDE_INT,
8965 we must mask only the number of bits in the bitfield,
8966 since this is done implicitly by the constructor. If
8967 the bitfield does not meet either of those conditions,
8968 we can't do this optimization. */
8969 && (! DECL_BIT_FIELD (field)
8970 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8971 && (GET_MODE_BITSIZE (DECL_MODE (field))
8972 <= HOST_BITS_PER_WIDE_INT))))
8974 if (DECL_BIT_FIELD (field)
8975 && modifier == EXPAND_STACK_PARM)
8977 op0 = expand_expr (value, target, tmode, modifier);
8978 if (DECL_BIT_FIELD (field))
8980 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8981 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8983 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8985 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8986 op0 = expand_and (imode, op0, op1, target);
8991 = build_int_cst (NULL_TREE,
8992 GET_MODE_BITSIZE (imode) - bitsize);
8994 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8996 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9004 goto normal_inner_ref;
9007 case ARRAY_RANGE_REF:
9010 enum machine_mode mode1, mode2;
9011 HOST_WIDE_INT bitsize, bitpos;
9013 int volatilep = 0, must_force_mem;
9014 bool packedp = false;
9015 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9016 &mode1, &unsignedp, &volatilep, true);
9017 rtx orig_op0, memloc;
9019 /* If we got back the original object, something is wrong. Perhaps
9020 we are evaluating an expression too early. In any event, don't
9021 infinitely recurse. */
9022 gcc_assert (tem != exp);
9024 if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9025 || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9026 && DECL_PACKED (TREE_OPERAND (exp, 1))))
9029 /* If TEM's type is a union of variable size, pass TARGET to the inner
9030 computation, since it will need a temporary and TARGET is known
9031 to have to do. This occurs in unchecked conversion in Ada. */
9034 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9035 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9037 && modifier != EXPAND_STACK_PARM
9038 ? target : NULL_RTX),
9040 (modifier == EXPAND_INITIALIZER
9041 || modifier == EXPAND_CONST_ADDRESS
9042 || modifier == EXPAND_STACK_PARM)
9043 ? modifier : EXPAND_NORMAL);
9046 /* If the bitfield is volatile, we want to access it in the
9047 field's mode, not the computed mode.
9048 If a MEM has VOIDmode (external with incomplete type),
9049 use BLKmode for it instead. */
9052 if (volatilep && flag_strict_volatile_bitfields > 0)
9053 op0 = adjust_address (op0, mode1, 0);
9054 else if (GET_MODE (op0) == VOIDmode)
9055 op0 = adjust_address (op0, BLKmode, 0);
9059 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9061 /* If we have either an offset, a BLKmode result, or a reference
9062 outside the underlying object, we must force it to memory.
9063 Such a case can occur in Ada if we have unchecked conversion
9064 of an expression from a scalar type to an aggregate type or
9065 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9066 passed a partially uninitialized object or a view-conversion
9067 to a larger size. */
9068 must_force_mem = (offset
9070 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9072 /* Handle CONCAT first. */
9073 if (GET_CODE (op0) == CONCAT && !must_force_mem)
9076 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9079 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9082 op0 = XEXP (op0, 0);
9083 mode2 = GET_MODE (op0);
9085 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9086 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9090 op0 = XEXP (op0, 1);
9092 mode2 = GET_MODE (op0);
9095 /* Otherwise force into memory. */
9099 /* If this is a constant, put it in a register if it is a legitimate
9100 constant and we don't need a memory reference. */
9101 if (CONSTANT_P (op0)
9103 && LEGITIMATE_CONSTANT_P (op0)
9105 op0 = force_reg (mode2, op0);
9107 /* Otherwise, if this is a constant, try to force it to the constant
9108 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9109 is a legitimate constant. */
9110 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9111 op0 = validize_mem (memloc);
9113 /* Otherwise, if this is a constant or the object is not in memory
9114 and need be, put it there. */
9115 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9117 tree nt = build_qualified_type (TREE_TYPE (tem),
9118 (TYPE_QUALS (TREE_TYPE (tem))
9119 | TYPE_QUAL_CONST));
9120 memloc = assign_temp (nt, 1, 1, 1);
9121 emit_move_insn (memloc, op0);
9127 enum machine_mode address_mode;
9128 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9131 gcc_assert (MEM_P (op0));
9134 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9135 if (GET_MODE (offset_rtx) != address_mode)
9136 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9138 if (GET_MODE (op0) == BLKmode
9139 /* A constant address in OP0 can have VOIDmode, we must
9140 not try to call force_reg in that case. */
9141 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9143 && (bitpos % bitsize) == 0
9144 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9145 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9147 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9151 op0 = offset_address (op0, offset_rtx,
9152 highest_pow2_factor (offset));
9155 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9156 record its alignment as BIGGEST_ALIGNMENT. */
9157 if (MEM_P (op0) && bitpos == 0 && offset != 0
9158 && is_aligning_offset (offset, tem))
9159 set_mem_align (op0, BIGGEST_ALIGNMENT);
9161 /* Don't forget about volatility even if this is a bitfield. */
9162 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9164 if (op0 == orig_op0)
9165 op0 = copy_rtx (op0);
9167 MEM_VOLATILE_P (op0) = 1;
9170 /* In cases where an aligned union has an unaligned object
9171 as a field, we might be extracting a BLKmode value from
9172 an integer-mode (e.g., SImode) object. Handle this case
9173 by doing the extract into an object as wide as the field
9174 (which we know to be the width of a basic mode), then
9175 storing into memory, and changing the mode to BLKmode. */
9176 if (mode1 == VOIDmode
9177 || REG_P (op0) || GET_CODE (op0) == SUBREG
9178 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9179 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9180 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9181 && modifier != EXPAND_CONST_ADDRESS
9182 && modifier != EXPAND_INITIALIZER)
9183 /* If the field is volatile, we always want an aligned
9185 || (volatilep && flag_strict_volatile_bitfields > 0)
9186 /* If the field isn't aligned enough to fetch as a memref,
9187 fetch it as a bit field. */
9188 || (mode1 != BLKmode
9189 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9190 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9192 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9193 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9194 && ((modifier == EXPAND_CONST_ADDRESS
9195 || modifier == EXPAND_INITIALIZER)
9197 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9198 || (bitpos % BITS_PER_UNIT != 0)))
9199 /* If the type and the field are a constant size and the
9200 size of the type isn't the same size as the bitfield,
9201 we must use bitfield operations. */
9203 && TYPE_SIZE (TREE_TYPE (exp))
9204 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9205 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9208 enum machine_mode ext_mode = mode;
9210 if (ext_mode == BLKmode
9211 && ! (target != 0 && MEM_P (op0)
9213 && bitpos % BITS_PER_UNIT == 0))
9214 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9216 if (ext_mode == BLKmode)
9219 target = assign_temp (type, 0, 1, 1);
9224 /* In this case, BITPOS must start at a byte boundary and
9225 TARGET, if specified, must be a MEM. */
9226 gcc_assert (MEM_P (op0)
9227 && (!target || MEM_P (target))
9228 && !(bitpos % BITS_PER_UNIT));
9230 emit_block_move (target,
9231 adjust_address (op0, VOIDmode,
9232 bitpos / BITS_PER_UNIT),
9233 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9235 (modifier == EXPAND_STACK_PARM
9236 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9241 op0 = validize_mem (op0);
9243 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9244 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9246 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
9247 (modifier == EXPAND_STACK_PARM
9248 ? NULL_RTX : target),
9249 ext_mode, ext_mode);
9251 /* If the result is a record type and BITSIZE is narrower than
9252 the mode of OP0, an integral mode, and this is a big endian
9253 machine, we must put the field into the high-order bits. */
9254 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9255 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9256 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9257 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9258 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9262 /* If the result type is BLKmode, store the data into a temporary
9263 of the appropriate type, but with the mode corresponding to the
9264 mode for the data we have (op0's mode). It's tempting to make
9265 this a constant type, since we know it's only being stored once,
9266 but that can cause problems if we are taking the address of this
9267 COMPONENT_REF because the MEM of any reference via that address
9268 will have flags corresponding to the type, which will not
9269 necessarily be constant. */
9270 if (mode == BLKmode)
9272 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9275 /* If the reference doesn't use the alias set of its type,
9276 we cannot create the temporary using that type. */
9277 if (component_uses_parent_alias_set (exp))
9279 new_rtx = assign_stack_local (ext_mode, size, 0);
9280 set_mem_alias_set (new_rtx, get_alias_set (exp));
9283 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9285 emit_move_insn (new_rtx, op0);
9286 op0 = copy_rtx (new_rtx);
9287 PUT_MODE (op0, BLKmode);
9288 set_mem_attributes (op0, exp, 1);
9294 /* If the result is BLKmode, use that to access the object
9296 if (mode == BLKmode)
9299 /* Get a reference to just this component. */
9300 if (modifier == EXPAND_CONST_ADDRESS
9301 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9302 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9304 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9306 if (op0 == orig_op0)
9307 op0 = copy_rtx (op0);
9309 set_mem_attributes (op0, exp, 0);
9310 if (REG_P (XEXP (op0, 0)))
9311 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9313 MEM_VOLATILE_P (op0) |= volatilep;
9314 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9315 || modifier == EXPAND_CONST_ADDRESS
9316 || modifier == EXPAND_INITIALIZER)
9318 else if (target == 0)
9319 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9321 convert_move (target, op0, unsignedp);
9326 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9329 /* All valid uses of __builtin_va_arg_pack () are removed during
9331 if (CALL_EXPR_VA_ARG_PACK (exp))
9332 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9334 tree fndecl = get_callee_fndecl (exp), attr;
9337 && (attr = lookup_attribute ("error",
9338 DECL_ATTRIBUTES (fndecl))) != NULL)
9339 error ("%Kcall to %qs declared with attribute error: %s",
9340 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9341 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9343 && (attr = lookup_attribute ("warning",
9344 DECL_ATTRIBUTES (fndecl))) != NULL)
9345 warning_at (tree_nonartificial_location (exp),
9346 0, "%Kcall to %qs declared with attribute warning: %s",
9347 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9348 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9350 /* Check for a built-in function. */
9351 if (fndecl && DECL_BUILT_IN (fndecl))
9353 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9354 return expand_builtin (exp, target, subtarget, tmode, ignore);
9357 return expand_call (exp, target, ignore);
9359 case VIEW_CONVERT_EXPR:
9362 /* If we are converting to BLKmode, try to avoid an intermediate
9363 temporary by fetching an inner memory reference. */
9365 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9366 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9367 && handled_component_p (treeop0))
9369 enum machine_mode mode1;
9370 HOST_WIDE_INT bitsize, bitpos;
9375 = get_inner_reference (treeop0, &bitsize, &bitpos,
9376 &offset, &mode1, &unsignedp, &volatilep,
9380 /* ??? We should work harder and deal with non-zero offsets. */
9382 && (bitpos % BITS_PER_UNIT) == 0
9384 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9386 /* See the normal_inner_ref case for the rationale. */
9389 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9390 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9392 && modifier != EXPAND_STACK_PARM
9393 ? target : NULL_RTX),
9395 (modifier == EXPAND_INITIALIZER
9396 || modifier == EXPAND_CONST_ADDRESS
9397 || modifier == EXPAND_STACK_PARM)
9398 ? modifier : EXPAND_NORMAL);
9400 if (MEM_P (orig_op0))
9404 /* Get a reference to just this component. */
9405 if (modifier == EXPAND_CONST_ADDRESS
9406 || modifier == EXPAND_SUM
9407 || modifier == EXPAND_INITIALIZER)
9408 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9410 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9412 if (op0 == orig_op0)
9413 op0 = copy_rtx (op0);
9415 set_mem_attributes (op0, treeop0, 0);
9416 if (REG_P (XEXP (op0, 0)))
9417 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9419 MEM_VOLATILE_P (op0) |= volatilep;
9425 op0 = expand_expr (treeop0,
9426 NULL_RTX, VOIDmode, modifier);
9428 /* If the input and output modes are both the same, we are done. */
9429 if (mode == GET_MODE (op0))
9431 /* If neither mode is BLKmode, and both modes are the same size
9432 then we can use gen_lowpart. */
9433 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9434 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9435 && !COMPLEX_MODE_P (GET_MODE (op0)))
9437 if (GET_CODE (op0) == SUBREG)
9438 op0 = force_reg (GET_MODE (op0), op0);
9439 temp = gen_lowpart_common (mode, op0);
9444 if (!REG_P (op0) && !MEM_P (op0))
9445 op0 = force_reg (GET_MODE (op0), op0);
9446 op0 = gen_lowpart (mode, op0);
9449 /* If both types are integral, convert from one mode to the other. */
9450 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9451 op0 = convert_modes (mode, GET_MODE (op0), op0,
9452 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9453 /* As a last resort, spill op0 to memory, and reload it in a
9455 else if (!MEM_P (op0))
9457 /* If the operand is not a MEM, force it into memory. Since we
9458 are going to be changing the mode of the MEM, don't call
9459 force_const_mem for constants because we don't allow pool
9460 constants to change mode. */
9461 tree inner_type = TREE_TYPE (treeop0);
9463 gcc_assert (!TREE_ADDRESSABLE (exp));
9465 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9467 = assign_stack_temp_for_type
9468 (TYPE_MODE (inner_type),
9469 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9471 emit_move_insn (target, op0);
9475 /* At this point, OP0 is in the correct mode. If the output type is
9476 such that the operand is known to be aligned, indicate that it is.
9477 Otherwise, we need only be concerned about alignment for non-BLKmode
9481 op0 = copy_rtx (op0);
9483 if (TYPE_ALIGN_OK (type))
9484 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9485 else if (STRICT_ALIGNMENT
9487 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9489 tree inner_type = TREE_TYPE (treeop0);
9490 HOST_WIDE_INT temp_size
9491 = MAX (int_size_in_bytes (inner_type),
9492 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9494 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9495 rtx new_with_op0_mode
9496 = adjust_address (new_rtx, GET_MODE (op0), 0);
9498 gcc_assert (!TREE_ADDRESSABLE (exp));
9500 if (GET_MODE (op0) == BLKmode)
9501 emit_block_move (new_with_op0_mode, op0,
9502 GEN_INT (GET_MODE_SIZE (mode)),
9503 (modifier == EXPAND_STACK_PARM
9504 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9506 emit_move_insn (new_with_op0_mode, op0);
9511 op0 = adjust_address (op0, mode, 0);
9516 /* Use a compare and a jump for BLKmode comparisons, or for function
9517 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9519 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9520 are occassionally created by folding during expansion. */
9521 case TRUTH_ANDIF_EXPR:
9522 case TRUTH_ORIF_EXPR:
9525 || modifier == EXPAND_STACK_PARM
9526 || ! safe_from_p (target, treeop0, 1)
9527 || ! safe_from_p (target, treeop1, 1)
9528 /* Make sure we don't have a hard reg (such as function's return
9529 value) live across basic blocks, if not optimizing. */
9530 || (!optimize && REG_P (target)
9531 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9532 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9535 emit_move_insn (target, const0_rtx);
9537 op1 = gen_label_rtx ();
9538 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9541 emit_move_insn (target, const1_rtx);
9544 return ignore ? const0_rtx : target;
9546 case STATEMENT_LIST:
9548 tree_stmt_iterator iter;
9550 gcc_assert (ignore);
9552 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9553 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9558 /* A COND_EXPR with its type being VOID_TYPE represents a
9559 conditional jump and is handled in
9560 expand_gimple_cond_expr. */
9561 gcc_assert (!VOID_TYPE_P (type));
9563 /* Note that COND_EXPRs whose type is a structure or union
9564 are required to be constructed to contain assignments of
9565 a temporary variable, so that we can evaluate them here
9566 for side effect only. If type is void, we must do likewise. */
9568 gcc_assert (!TREE_ADDRESSABLE (type)
9570 && TREE_TYPE (treeop1) != void_type_node
9571 && TREE_TYPE (treeop2) != void_type_node);
9573 /* If we are not to produce a result, we have no target. Otherwise,
9574 if a target was specified use it; it will not be used as an
9575 intermediate target unless it is safe. If no target, use a
9578 if (modifier != EXPAND_STACK_PARM
9580 && safe_from_p (original_target, treeop0, 1)
9581 && GET_MODE (original_target) == mode
9582 #ifdef HAVE_conditional_move
9583 && (! can_conditionally_move_p (mode)
9584 || REG_P (original_target))
9586 && !MEM_P (original_target))
9587 temp = original_target;
9589 temp = assign_temp (type, 0, 0, 1);
9591 do_pending_stack_adjust ();
9593 op0 = gen_label_rtx ();
9594 op1 = gen_label_rtx ();
9595 jumpifnot (treeop0, op0, -1);
9596 store_expr (treeop1, temp,
9597 modifier == EXPAND_STACK_PARM,
9600 emit_jump_insn (gen_jump (op1));
9603 store_expr (treeop2, temp,
9604 modifier == EXPAND_STACK_PARM,
9612 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9619 gcc_assert (ignore);
9621 /* Check for |= or &= of a bitfield of size one into another bitfield
9622 of size 1. In this case, (unless we need the result of the
9623 assignment) we can do this more efficiently with a
9624 test followed by an assignment, if necessary.
9626 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9627 things change so we do, this code should be enhanced to
9629 if (TREE_CODE (lhs) == COMPONENT_REF
9630 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9631 || TREE_CODE (rhs) == BIT_AND_EXPR)
9632 && TREE_OPERAND (rhs, 0) == lhs
9633 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9634 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9635 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9637 rtx label = gen_label_rtx ();
9638 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9639 do_jump (TREE_OPERAND (rhs, 1),
9641 value ? 0 : label, -1);
9642 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9643 MOVE_NONTEMPORAL (exp));
9644 do_pending_stack_adjust ();
9649 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9654 return expand_expr_addr_expr (exp, target, tmode, modifier);
9657 op0 = expand_normal (treeop0);
9658 return read_complex_part (op0, false);
9661 op0 = expand_normal (treeop0);
9662 return read_complex_part (op0, true);
9669 /* Expanded in cfgexpand.c. */
9672 case TRY_CATCH_EXPR:
9674 case EH_FILTER_EXPR:
9675 case TRY_FINALLY_EXPR:
9676 /* Lowered by tree-eh.c. */
9679 case WITH_CLEANUP_EXPR:
9680 case CLEANUP_POINT_EXPR:
9682 case CASE_LABEL_EXPR:
9688 case PREINCREMENT_EXPR:
9689 case PREDECREMENT_EXPR:
9690 case POSTINCREMENT_EXPR:
9691 case POSTDECREMENT_EXPR:
9694 /* Lowered by gimplify.c. */
9698 /* Function descriptors are not valid except for as
9699 initialization constants, and should not be expanded. */
9702 case WITH_SIZE_EXPR:
9703 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9704 have pulled out the size to use in whatever context it needed. */
9705 return expand_expr_real (treeop0, original_target, tmode,
9708 case REALIGN_LOAD_EXPR:
9710 tree oprnd0 = treeop0;
9711 tree oprnd1 = treeop1;
9712 tree oprnd2 = treeop2;
9715 this_optab = optab_for_tree_code (code, type, optab_default);
9716 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9717 op2 = expand_normal (oprnd2);
9718 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9726 tree oprnd0 = treeop0;
9727 tree oprnd1 = treeop1;
9728 tree oprnd2 = treeop2;
9731 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9732 op2 = expand_normal (oprnd2);
9733 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9738 case COMPOUND_LITERAL_EXPR:
9740 /* Initialize the anonymous variable declared in the compound
9741 literal, then return the variable. */
9742 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9744 /* Create RTL for this variable. */
9745 if (!DECL_RTL_SET_P (decl))
9747 if (DECL_HARD_REGISTER (decl))
9748 /* The user specified an assembler name for this variable.
9750 rest_of_decl_compilation (decl, 0, 0);
9755 return expand_expr_real (decl, original_target, tmode,
9760 return expand_expr_real_2 (&ops, target, tmode, modifier);
9764 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9765 signedness of TYPE), possibly returning the result in TARGET. */
9767 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9769 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9770 if (target && GET_MODE (target) != GET_MODE (exp))
9772 /* For constant values, reduce using build_int_cst_type. */
9773 if (CONST_INT_P (exp))
9775 HOST_WIDE_INT value = INTVAL (exp);
9776 tree t = build_int_cst_type (type, value);
9777 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9779 else if (TYPE_UNSIGNED (type))
9781 rtx mask = immed_double_int_const (double_int_mask (prec),
9783 return expand_and (GET_MODE (exp), exp, mask, target);
9787 tree count = build_int_cst (NULL_TREE,
9788 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9789 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9790 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9794 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9795 when applied to the address of EXP produces an address known to be
9796 aligned more than BIGGEST_ALIGNMENT. */
9799 is_aligning_offset (const_tree offset, const_tree exp)
9801 /* Strip off any conversions. */
9802 while (CONVERT_EXPR_P (offset))
9803 offset = TREE_OPERAND (offset, 0);
9805 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9806 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9807 if (TREE_CODE (offset) != BIT_AND_EXPR
9808 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9809 || compare_tree_int (TREE_OPERAND (offset, 1),
9810 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9811 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9814 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9815 It must be NEGATE_EXPR. Then strip any more conversions. */
9816 offset = TREE_OPERAND (offset, 0);
9817 while (CONVERT_EXPR_P (offset))
9818 offset = TREE_OPERAND (offset, 0);
9820 if (TREE_CODE (offset) != NEGATE_EXPR)
9823 offset = TREE_OPERAND (offset, 0);
9824 while (CONVERT_EXPR_P (offset))
9825 offset = TREE_OPERAND (offset, 0);
9827 /* This must now be the address of EXP. */
9828 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9831 /* Return the tree node if an ARG corresponds to a string constant or zero
9832 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9833 in bytes within the string that ARG is accessing. The type of the
9834 offset will be `sizetype'. */
9837 string_constant (tree arg, tree *ptr_offset)
9839 tree array, offset, lower_bound;
9842 if (TREE_CODE (arg) == ADDR_EXPR)
9844 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9846 *ptr_offset = size_zero_node;
9847 return TREE_OPERAND (arg, 0);
9849 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9851 array = TREE_OPERAND (arg, 0);
9852 offset = size_zero_node;
9854 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9856 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9857 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9858 if (TREE_CODE (array) != STRING_CST
9859 && TREE_CODE (array) != VAR_DECL)
9862 /* Check if the array has a nonzero lower bound. */
9863 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9864 if (!integer_zerop (lower_bound))
9866 /* If the offset and base aren't both constants, return 0. */
9867 if (TREE_CODE (lower_bound) != INTEGER_CST)
9869 if (TREE_CODE (offset) != INTEGER_CST)
9871 /* Adjust offset by the lower bound. */
9872 offset = size_diffop (fold_convert (sizetype, offset),
9873 fold_convert (sizetype, lower_bound));
9879 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9881 tree arg0 = TREE_OPERAND (arg, 0);
9882 tree arg1 = TREE_OPERAND (arg, 1);
9887 if (TREE_CODE (arg0) == ADDR_EXPR
9888 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9889 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9891 array = TREE_OPERAND (arg0, 0);
9894 else if (TREE_CODE (arg1) == ADDR_EXPR
9895 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9896 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9898 array = TREE_OPERAND (arg1, 0);
9907 if (TREE_CODE (array) == STRING_CST)
9909 *ptr_offset = fold_convert (sizetype, offset);
9912 else if (TREE_CODE (array) == VAR_DECL
9913 || TREE_CODE (array) == CONST_DECL)
9917 /* Variables initialized to string literals can be handled too. */
9918 if (!const_value_known_p (array)
9919 || !DECL_INITIAL (array)
9920 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9923 /* Avoid const char foo[4] = "abcde"; */
9924 if (DECL_SIZE_UNIT (array) == NULL_TREE
9925 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9926 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9927 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9930 /* If variable is bigger than the string literal, OFFSET must be constant
9931 and inside of the bounds of the string literal. */
9932 offset = fold_convert (sizetype, offset);
9933 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9934 && (! host_integerp (offset, 1)
9935 || compare_tree_int (offset, length) >= 0))
9938 *ptr_offset = offset;
9939 return DECL_INITIAL (array);
9945 /* Generate code to calculate OPS, and exploded expression
9946 using a store-flag instruction and return an rtx for the result.
9947 OPS reflects a comparison.
9949 If TARGET is nonzero, store the result there if convenient.
9951 Return zero if there is no suitable set-flag instruction
9952 available on this machine.
9954 Once expand_expr has been called on the arguments of the comparison,
9955 we are committed to doing the store flag, since it is not safe to
9956 re-evaluate the expression. We emit the store-flag insn by calling
9957 emit_store_flag, but only expand the arguments if we have a reason
9958 to believe that emit_store_flag will be successful. If we think that
9959 it will, but it isn't, we have to simulate the store-flag with a
9960 set/jump/set sequence. */
9963 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9966 tree arg0, arg1, type;
9968 enum machine_mode operand_mode;
9971 rtx subtarget = target;
9972 location_t loc = ops->location;
9977 /* Don't crash if the comparison was erroneous. */
9978 if (arg0 == error_mark_node || arg1 == error_mark_node)
9981 type = TREE_TYPE (arg0);
9982 operand_mode = TYPE_MODE (type);
9983 unsignedp = TYPE_UNSIGNED (type);
9985 /* We won't bother with BLKmode store-flag operations because it would mean
9986 passing a lot of information to emit_store_flag. */
9987 if (operand_mode == BLKmode)
9990 /* We won't bother with store-flag operations involving function pointers
9991 when function pointers must be canonicalized before comparisons. */
9992 #ifdef HAVE_canonicalize_funcptr_for_compare
9993 if (HAVE_canonicalize_funcptr_for_compare
9994 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9995 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9997 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9998 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9999 == FUNCTION_TYPE))))
10006 /* Get the rtx comparison code to use. We know that EXP is a comparison
10007 operation of some type. Some comparisons against 1 and -1 can be
10008 converted to comparisons with zero. Do so here so that the tests
10009 below will be aware that we have a comparison with zero. These
10010 tests will not catch constants in the first operand, but constants
10011 are rarely passed as the first operand. */
10022 if (integer_onep (arg1))
10023 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10025 code = unsignedp ? LTU : LT;
10028 if (! unsignedp && integer_all_onesp (arg1))
10029 arg1 = integer_zero_node, code = LT;
10031 code = unsignedp ? LEU : LE;
10034 if (! unsignedp && integer_all_onesp (arg1))
10035 arg1 = integer_zero_node, code = GE;
10037 code = unsignedp ? GTU : GT;
10040 if (integer_onep (arg1))
10041 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10043 code = unsignedp ? GEU : GE;
10046 case UNORDERED_EXPR:
10072 gcc_unreachable ();
10075 /* Put a constant second. */
10076 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10077 || TREE_CODE (arg0) == FIXED_CST)
10079 tem = arg0; arg0 = arg1; arg1 = tem;
10080 code = swap_condition (code);
10083 /* If this is an equality or inequality test of a single bit, we can
10084 do this by shifting the bit being tested to the low-order bit and
10085 masking the result with the constant 1. If the condition was EQ,
10086 we xor it with 1. This does not require an scc insn and is faster
10087 than an scc insn even if we have it.
10089 The code to make this transformation was moved into fold_single_bit_test,
10090 so we just call into the folder and expand its result. */
10092 if ((code == NE || code == EQ)
10093 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10094 && integer_pow2p (TREE_OPERAND (arg0, 1))
10095 && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10097 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10098 return expand_expr (fold_single_bit_test (loc,
10099 code == NE ? NE_EXPR : EQ_EXPR,
10101 target, VOIDmode, EXPAND_NORMAL);
10104 if (! get_subtarget (target)
10105 || GET_MODE (subtarget) != operand_mode)
10108 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10111 target = gen_reg_rtx (mode);
10113 /* Try a cstore if possible. */
10114 return emit_store_flag_force (target, code, op0, op1,
10115 operand_mode, unsignedp,
10116 (TYPE_PRECISION (ops->type) == 1
10117 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10121 /* Stubs in case we haven't got a casesi insn. */
10122 #ifndef HAVE_casesi
10123 # define HAVE_casesi 0
10124 # define gen_casesi(a, b, c, d, e) (0)
10125 # define CODE_FOR_casesi CODE_FOR_nothing
10128 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10129 0 otherwise (i.e. if there is no casesi instruction). */
10131 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10132 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10133 rtx fallback_label ATTRIBUTE_UNUSED)
10135 enum machine_mode index_mode = SImode;
10136 int index_bits = GET_MODE_BITSIZE (index_mode);
10137 rtx op1, op2, index;
10138 enum machine_mode op_mode;
10143 /* Convert the index to SImode. */
10144 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10146 enum machine_mode omode = TYPE_MODE (index_type);
10147 rtx rangertx = expand_normal (range);
10149 /* We must handle the endpoints in the original mode. */
10150 index_expr = build2 (MINUS_EXPR, index_type,
10151 index_expr, minval);
10152 minval = integer_zero_node;
10153 index = expand_normal (index_expr);
10155 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10156 omode, 1, default_label);
10157 /* Now we can safely truncate. */
10158 index = convert_to_mode (index_mode, index, 0);
10162 if (TYPE_MODE (index_type) != index_mode)
10164 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10165 index_expr = fold_convert (index_type, index_expr);
10168 index = expand_normal (index_expr);
10171 do_pending_stack_adjust ();
10173 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10174 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10176 index = copy_to_mode_reg (op_mode, index);
10178 op1 = expand_normal (minval);
10180 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10181 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10182 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10183 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10185 op1 = copy_to_mode_reg (op_mode, op1);
10187 op2 = expand_normal (range);
10189 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10190 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10191 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10192 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10194 op2 = copy_to_mode_reg (op_mode, op2);
10196 emit_jump_insn (gen_casesi (index, op1, op2,
10197 table_label, !default_label
10198 ? fallback_label : default_label));
10202 /* Attempt to generate a tablejump instruction; same concept. */
10203 #ifndef HAVE_tablejump
10204 #define HAVE_tablejump 0
10205 #define gen_tablejump(x, y) (0)
10208 /* Subroutine of the next function.
10210 INDEX is the value being switched on, with the lowest value
10211 in the table already subtracted.
10212 MODE is its expected mode (needed if INDEX is constant).
10213 RANGE is the length of the jump table.
10214 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10216 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10217 index value is out of range. */
10220 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10225 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10226 cfun->cfg->max_jumptable_ents = INTVAL (range);
10228 /* Do an unsigned comparison (in the proper mode) between the index
10229 expression and the value which represents the length of the range.
10230 Since we just finished subtracting the lower bound of the range
10231 from the index expression, this comparison allows us to simultaneously
10232 check that the original index expression value is both greater than
10233 or equal to the minimum value of the range and less than or equal to
10234 the maximum value of the range. */
10237 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10240 /* If index is in range, it must fit in Pmode.
10241 Convert to Pmode so we can index with it. */
10243 index = convert_to_mode (Pmode, index, 1);
10245 /* Don't let a MEM slip through, because then INDEX that comes
10246 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10247 and break_out_memory_refs will go to work on it and mess it up. */
10248 #ifdef PIC_CASE_VECTOR_ADDRESS
10249 if (flag_pic && !REG_P (index))
10250 index = copy_to_mode_reg (Pmode, index);
10253 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10254 GET_MODE_SIZE, because this indicates how large insns are. The other
10255 uses should all be Pmode, because they are addresses. This code
10256 could fail if addresses and insns are not the same size. */
10257 index = gen_rtx_PLUS (Pmode,
10258 gen_rtx_MULT (Pmode, index,
10259 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10260 gen_rtx_LABEL_REF (Pmode, table_label));
10261 #ifdef PIC_CASE_VECTOR_ADDRESS
10263 index = PIC_CASE_VECTOR_ADDRESS (index);
10266 index = memory_address (CASE_VECTOR_MODE, index);
10267 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10268 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10269 convert_move (temp, vector, 0);
10271 emit_jump_insn (gen_tablejump (temp, table_label));
10273 /* If we are generating PIC code or if the table is PC-relative, the
10274 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10275 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10280 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10281 rtx table_label, rtx default_label)
10285 if (! HAVE_tablejump)
10288 index_expr = fold_build2 (MINUS_EXPR, index_type,
10289 fold_convert (index_type, index_expr),
10290 fold_convert (index_type, minval));
10291 index = expand_normal (index_expr);
10292 do_pending_stack_adjust ();
10294 do_tablejump (index, TYPE_MODE (index_type),
10295 convert_modes (TYPE_MODE (index_type),
10296 TYPE_MODE (TREE_TYPE (range)),
10297 expand_normal (range),
10298 TYPE_UNSIGNED (TREE_TYPE (range))),
10299 table_label, default_label);
10303 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10305 const_vector_from_tree (tree exp)
10310 enum machine_mode inner, mode;
10312 mode = TYPE_MODE (TREE_TYPE (exp));
10314 if (initializer_zerop (exp))
10315 return CONST0_RTX (mode);
10317 units = GET_MODE_NUNITS (mode);
10318 inner = GET_MODE_INNER (mode);
10320 v = rtvec_alloc (units);
10322 link = TREE_VECTOR_CST_ELTS (exp);
10323 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10325 elt = TREE_VALUE (link);
10327 if (TREE_CODE (elt) == REAL_CST)
10328 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10330 else if (TREE_CODE (elt) == FIXED_CST)
10331 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10334 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10338 /* Initialize remaining elements to 0. */
10339 for (; i < units; ++i)
10340 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10342 return gen_rtx_CONST_VECTOR (mode, v);
10345 /* Build a decl for a personality function given a language prefix. */
10348 build_personality_function (const char *lang)
10350 const char *unwind_and_version;
10354 switch (targetm.except_unwind_info (&global_options))
10359 unwind_and_version = "_sj0";
10363 unwind_and_version = "_v0";
10366 gcc_unreachable ();
10369 name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
10371 type = build_function_type_list (integer_type_node, integer_type_node,
10372 long_long_unsigned_type_node,
10373 ptr_type_node, ptr_type_node, NULL_TREE);
10374 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10375 get_identifier (name), type);
10376 DECL_ARTIFICIAL (decl) = 1;
10377 DECL_EXTERNAL (decl) = 1;
10378 TREE_PUBLIC (decl) = 1;
10380 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10381 are the flags assigned by targetm.encode_section_info. */
10382 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10387 /* Extracts the personality function of DECL and returns the corresponding
10391 get_personality_function (tree decl)
10393 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10394 enum eh_personality_kind pk;
10396 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10397 if (pk == eh_personality_none)
10401 && pk == eh_personality_any)
10402 personality = lang_hooks.eh_personality ();
10404 if (pk == eh_personality_lang)
10405 gcc_assert (personality != NULL_TREE);
10407 return XEXP (DECL_RTL (personality), 0);
10410 #include "gt-expr.h"