1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
56 #include "target-globals.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
93 struct move_by_pieces_d
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces_d
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces_d *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces_d *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* This macro is used to determine whether move_by_pieces should be called
163 to perform a structure copy. */
164 #ifndef MOVE_BY_PIECES_P
165 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
166 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
167 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
170 /* This macro is used to determine whether clear_by_pieces should be
171 called to clear storage. */
172 #ifndef CLEAR_BY_PIECES_P
173 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
174 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
175 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
178 /* This macro is used to determine whether store_by_pieces should be
179 called to "memset" storage with byte values other than zero. */
180 #ifndef SET_BY_PIECES_P
181 #define SET_BY_PIECES_P(SIZE, ALIGN) \
182 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
183 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
186 /* This macro is used to determine whether store_by_pieces should be
187 called to "memcpy" storage when the source is a constant string. */
188 #ifndef STORE_BY_PIECES_P
189 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
190 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
191 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
194 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
196 #ifndef SLOW_UNALIGNED_ACCESS
197 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
200 /* This is run to set up which modes can be used
201 directly in memory and to initialize the block move optab. It is run
202 at the beginning of compilation and when the target is reinitialized. */
205 init_expr_target (void)
208 enum machine_mode mode;
213 /* Try indexing by frame ptr and try by stack ptr.
214 It is known that on the Convex the stack ptr isn't a valid index.
215 With luck, one or the other is valid on any machine. */
216 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
217 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
219 /* A scratch register we can modify in-place below to avoid
220 useless RTL allocations. */
221 reg = gen_rtx_REG (VOIDmode, -1);
223 insn = rtx_alloc (INSN);
224 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
225 PATTERN (insn) = pat;
227 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
228 mode = (enum machine_mode) ((int) mode + 1))
232 direct_load[(int) mode] = direct_store[(int) mode] = 0;
233 PUT_MODE (mem, mode);
234 PUT_MODE (mem1, mode);
235 PUT_MODE (reg, mode);
237 /* See if there is some register that can be used in this mode and
238 directly loaded or stored from memory. */
240 if (mode != VOIDmode && mode != BLKmode)
241 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
242 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
245 if (! HARD_REGNO_MODE_OK (regno, mode))
248 SET_REGNO (reg, regno);
251 SET_DEST (pat) = reg;
252 if (recog (pat, insn, &num_clobbers) >= 0)
253 direct_load[(int) mode] = 1;
255 SET_SRC (pat) = mem1;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
261 SET_DEST (pat) = mem;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_store[(int) mode] = 1;
266 SET_DEST (pat) = mem1;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
272 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
274 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
275 mode = GET_MODE_WIDER_MODE (mode))
277 enum machine_mode srcmode;
278 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
279 srcmode = GET_MODE_WIDER_MODE (srcmode))
283 ic = can_extend_p (mode, srcmode, 0);
284 if (ic == CODE_FOR_nothing)
287 PUT_MODE (mem, srcmode);
289 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
290 float_extend_from_mem[mode][srcmode] = true;
295 /* This is run at the start of compiling a function. */
300 memset (&crtl->expr, 0, sizeof (crtl->expr));
303 /* Copy data from FROM to TO, where the machine modes are not the same.
304 Both modes may be integer, or both may be floating, or both may be
306 UNSIGNEDP should be nonzero if FROM is an unsigned type.
307 This causes zero-extension instead of sign-extension. */
310 convert_move (rtx to, rtx from, int unsignedp)
312 enum machine_mode to_mode = GET_MODE (to);
313 enum machine_mode from_mode = GET_MODE (from);
314 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
315 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
319 /* rtx code for making an equivalent value. */
320 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
321 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
324 gcc_assert (to_real == from_real);
325 gcc_assert (to_mode != BLKmode);
326 gcc_assert (from_mode != BLKmode);
328 /* If the source and destination are already the same, then there's
333 /* If FROM is a SUBREG that indicates that we have already done at least
334 the required extension, strip it. We don't handle such SUBREGs as
337 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
338 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
339 >= GET_MODE_SIZE (to_mode))
340 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
341 from = gen_lowpart (to_mode, from), from_mode = to_mode;
343 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
345 if (to_mode == from_mode
346 || (from_mode == VOIDmode && CONSTANT_P (from)))
348 emit_move_insn (to, from);
352 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
354 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
356 if (VECTOR_MODE_P (to_mode))
357 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
359 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
361 emit_move_insn (to, from);
365 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
367 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
368 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
377 gcc_assert ((GET_MODE_PRECISION (from_mode)
378 != GET_MODE_PRECISION (to_mode))
379 || (DECIMAL_FLOAT_MODE_P (from_mode)
380 != DECIMAL_FLOAT_MODE_P (to_mode)));
382 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
383 /* Conversion between decimal float and binary float, same size. */
384 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
385 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
390 /* Try converting directly if the insn is supported. */
392 code = convert_optab_handler (tab, to_mode, from_mode);
393 if (code != CODE_FOR_nothing)
395 emit_unop_insn (code, to, from,
396 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
400 /* Otherwise use a libcall. */
401 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
403 /* Is this conversion implemented yet? */
404 gcc_assert (libcall);
407 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
409 insns = get_insns ();
411 emit_libcall_block (insns, to, value,
412 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
414 : gen_rtx_FLOAT_EXTEND (to_mode, from));
418 /* Handle pointer conversion. */ /* SPEE 900220. */
419 /* Targets are expected to provide conversion insns between PxImode and
420 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
421 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
423 enum machine_mode full_mode
424 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
426 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
427 != CODE_FOR_nothing);
429 if (full_mode != from_mode)
430 from = convert_to_mode (full_mode, from, unsignedp);
431 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
435 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
438 enum machine_mode full_mode
439 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
441 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)
442 != CODE_FOR_nothing);
444 if (to_mode == full_mode)
446 emit_unop_insn (convert_optab_handler (sext_optab, full_mode,
452 new_from = gen_reg_rtx (full_mode);
453 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode),
454 new_from, from, UNKNOWN);
456 /* else proceed to integer conversions below. */
457 from_mode = full_mode;
461 /* Make sure both are fixed-point modes or both are not. */
462 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
463 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
464 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
466 /* If we widen from_mode to to_mode and they are in the same class,
467 we won't saturate the result.
468 Otherwise, always saturate the result to play safe. */
469 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
470 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
471 expand_fixed_convert (to, from, 0, 0);
473 expand_fixed_convert (to, from, 0, 1);
477 /* Now both modes are integers. */
479 /* Handle expanding beyond a word. */
480 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
481 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
488 enum machine_mode lowpart_mode;
489 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
491 /* Try converting directly if the insn is supported. */
492 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
495 /* If FROM is a SUBREG, put it into a register. Do this
496 so that we always generate the same set of insns for
497 better cse'ing; if an intermediate assignment occurred,
498 we won't be doing the operation directly on the SUBREG. */
499 if (optimize > 0 && GET_CODE (from) == SUBREG)
500 from = force_reg (from_mode, from);
501 emit_unop_insn (code, to, from, equiv_code);
504 /* Next, try converting via full word. */
505 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
506 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
507 != CODE_FOR_nothing))
509 rtx word_to = gen_reg_rtx (word_mode);
512 if (reg_overlap_mentioned_p (to, from))
513 from = force_reg (from_mode, from);
516 convert_move (word_to, from, unsignedp);
517 emit_unop_insn (code, to, word_to, equiv_code);
521 /* No special multiword conversion insn; do it by hand. */
524 /* Since we will turn this into a no conflict block, we must ensure
525 that the source does not overlap the target. */
527 if (reg_overlap_mentioned_p (to, from))
528 from = force_reg (from_mode, from);
530 /* Get a copy of FROM widened to a word, if necessary. */
531 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
532 lowpart_mode = word_mode;
534 lowpart_mode = from_mode;
536 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
538 lowpart = gen_lowpart (lowpart_mode, to);
539 emit_move_insn (lowpart, lowfrom);
541 /* Compute the value to put in each remaining word. */
543 fill_value = const0_rtx;
545 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
546 LT, lowfrom, const0_rtx,
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
568 /* Truncating multi-word to a word or less. */
569 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
570 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
573 && ! MEM_VOLATILE_P (from)
574 && direct_load[(int) to_mode]
575 && ! mode_dependent_address_p (XEXP (from, 0)))
577 || GET_CODE (from) == SUBREG))
578 from = force_reg (from_mode, from);
579 convert_move (to, gen_lowpart (word_mode, from), 0);
583 /* Now follow all the conversions between integers
584 no more than a word long. */
586 /* For truncation, usually we can just refer to FROM in a narrower mode. */
587 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
588 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
589 GET_MODE_BITSIZE (from_mode)))
592 && ! MEM_VOLATILE_P (from)
593 && direct_load[(int) to_mode]
594 && ! mode_dependent_address_p (XEXP (from, 0)))
596 || GET_CODE (from) == SUBREG))
597 from = force_reg (from_mode, from);
598 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
599 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
600 from = copy_to_reg (from);
601 emit_move_insn (to, gen_lowpart (to_mode, from));
605 /* Handle extension. */
606 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
608 /* Convert directly if that works. */
609 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
612 emit_unop_insn (code, to, from, equiv_code);
617 enum machine_mode intermediate;
621 /* Search for a mode to convert via. */
622 for (intermediate = from_mode; intermediate != VOIDmode;
623 intermediate = GET_MODE_WIDER_MODE (intermediate))
624 if (((can_extend_p (to_mode, intermediate, unsignedp)
626 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
627 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
628 GET_MODE_BITSIZE (intermediate))))
629 && (can_extend_p (intermediate, from_mode, unsignedp)
630 != CODE_FOR_nothing))
632 convert_move (to, convert_to_mode (intermediate, from,
633 unsignedp), unsignedp);
637 /* No suitable intermediate mode.
638 Generate what we need with shifts. */
639 shift_amount = build_int_cst (NULL_TREE,
640 GET_MODE_BITSIZE (to_mode)
641 - GET_MODE_BITSIZE (from_mode));
642 from = gen_lowpart (to_mode, force_reg (from_mode, from));
643 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
645 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
648 emit_move_insn (to, tmp);
653 /* Support special truncate insns for certain modes. */
654 if (convert_optab_handler (trunc_optab, to_mode,
655 from_mode) != CODE_FOR_nothing)
657 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
662 /* Handle truncation of volatile memrefs, and so on;
663 the things that couldn't be truncated directly,
664 and for which there was no special instruction.
666 ??? Code above formerly short-circuited this, for most integer
667 mode pairs, with a force_reg in from_mode followed by a recursive
668 call to this routine. Appears always to have been wrong. */
669 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
671 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
672 emit_move_insn (to, temp);
676 /* Mode combination is not recognized. */
680 /* Return an rtx for a value that would result
681 from converting X to mode MODE.
682 Both X and MODE may be floating, or both integer.
683 UNSIGNEDP is nonzero if X is an unsigned value.
684 This can be done by referring to a part of X in place
685 or by copying to a new temporary with conversion. */
688 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
690 return convert_modes (mode, VOIDmode, x, unsignedp);
693 /* Return an rtx for a value that would result
694 from converting X from mode OLDMODE to mode MODE.
695 Both modes may be floating, or both integer.
696 UNSIGNEDP is nonzero if X is an unsigned value.
698 This can be done by referring to a part of X in place
699 or by copying to a new temporary with conversion.
701 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
704 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
708 /* If FROM is a SUBREG that indicates that we have already done at least
709 the required extension, strip it. */
711 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
712 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
713 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
714 x = gen_lowpart (mode, x);
716 if (GET_MODE (x) != VOIDmode)
717 oldmode = GET_MODE (x);
722 /* There is one case that we must handle specially: If we are converting
723 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
724 we are to interpret the constant as unsigned, gen_lowpart will do
725 the wrong if the constant appears negative. What we want to do is
726 make the high-order word of the constant zero, not all ones. */
728 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
729 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
730 && CONST_INT_P (x) && INTVAL (x) < 0)
732 double_int val = uhwi_to_double_int (INTVAL (x));
734 /* We need to zero extend VAL. */
735 if (oldmode != VOIDmode)
736 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
738 return immed_double_int_const (val, mode);
741 /* We can do this with a gen_lowpart if both desired and current modes
742 are integer, and this is either a constant integer, a register, or a
743 non-volatile MEM. Except for the constant case where MODE is no
744 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
747 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
748 || (GET_MODE_CLASS (mode) == MODE_INT
749 && GET_MODE_CLASS (oldmode) == MODE_INT
750 && (GET_CODE (x) == CONST_DOUBLE
751 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
752 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
753 && direct_load[(int) mode])
755 && (! HARD_REGISTER_P (x)
756 || HARD_REGNO_MODE_OK (REGNO (x), mode))
757 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
758 GET_MODE_BITSIZE (GET_MODE (x)))))))))
760 /* ?? If we don't know OLDMODE, we have to assume here that
761 X does not need sign- or zero-extension. This may not be
762 the case, but it's the best we can do. */
763 if (CONST_INT_P (x) && oldmode != VOIDmode
764 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
766 HOST_WIDE_INT val = INTVAL (x);
767 int width = GET_MODE_BITSIZE (oldmode);
769 /* We must sign or zero-extend in this case. Start by
770 zero-extending, then sign extend if we need to. */
771 val &= ((HOST_WIDE_INT) 1 << width) - 1;
773 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
774 val |= (HOST_WIDE_INT) (-1) << width;
776 return gen_int_mode (val, mode);
779 return gen_lowpart (mode, x);
782 /* Converting from integer constant into mode is always equivalent to an
784 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
786 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
787 return simplify_gen_subreg (mode, x, oldmode, 0);
790 temp = gen_reg_rtx (mode);
791 convert_move (temp, x, unsignedp);
795 /* STORE_MAX_PIECES is the number of bytes at a time that we can
796 store efficiently. Due to internal GCC limitations, this is
797 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
798 for an immediate constant. */
800 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
802 /* Determine whether the LEN bytes can be moved by using several move
803 instructions. Return nonzero if a call to move_by_pieces should
807 can_move_by_pieces (unsigned HOST_WIDE_INT len,
808 unsigned int align ATTRIBUTE_UNUSED)
810 return MOVE_BY_PIECES_P (len, align);
813 /* Generate several move instructions to copy LEN bytes from block FROM to
814 block TO. (These are MEM rtx's with BLKmode).
816 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
817 used to push FROM to the stack.
819 ALIGN is maximum stack alignment we can assume.
821 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
822 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
826 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
827 unsigned int align, int endp)
829 struct move_by_pieces_d data;
830 enum machine_mode to_addr_mode, from_addr_mode
831 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
832 rtx to_addr, from_addr = XEXP (from, 0);
833 unsigned int max_size = MOVE_MAX_PIECES + 1;
834 enum machine_mode mode = VOIDmode, tmode;
835 enum insn_code icode;
837 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
840 data.from_addr = from_addr;
843 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
844 to_addr = XEXP (to, 0);
847 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
848 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
850 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
854 to_addr_mode = VOIDmode;
858 #ifdef STACK_GROWS_DOWNWARD
864 data.to_addr = to_addr;
867 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
868 || GET_CODE (from_addr) == POST_INC
869 || GET_CODE (from_addr) == POST_DEC);
871 data.explicit_inc_from = 0;
872 data.explicit_inc_to = 0;
873 if (data.reverse) data.offset = len;
876 /* If copying requires more than two move insns,
877 copy addresses to registers (to make displacements shorter)
878 and use post-increment if available. */
879 if (!(data.autinc_from && data.autinc_to)
880 && move_by_pieces_ninsns (len, align, max_size) > 2)
882 /* Find the mode of the largest move... */
883 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
884 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
885 if (GET_MODE_SIZE (tmode) < max_size)
888 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
890 data.from_addr = copy_to_mode_reg (from_addr_mode,
891 plus_constant (from_addr, len));
892 data.autinc_from = 1;
893 data.explicit_inc_from = -1;
895 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
897 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
898 data.autinc_from = 1;
899 data.explicit_inc_from = 1;
901 if (!data.autinc_from && CONSTANT_P (from_addr))
902 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
903 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
905 data.to_addr = copy_to_mode_reg (to_addr_mode,
906 plus_constant (to_addr, len));
908 data.explicit_inc_to = -1;
910 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
912 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
914 data.explicit_inc_to = 1;
916 if (!data.autinc_to && CONSTANT_P (to_addr))
917 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
920 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
921 if (align >= GET_MODE_ALIGNMENT (tmode))
922 align = GET_MODE_ALIGNMENT (tmode);
925 enum machine_mode xmode;
927 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
929 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
930 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
931 || SLOW_UNALIGNED_ACCESS (tmode, align))
934 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
937 /* First move what we can in the largest integer mode, then go to
938 successively smaller modes. */
942 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
943 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
944 if (GET_MODE_SIZE (tmode) < max_size)
947 if (mode == VOIDmode)
950 icode = optab_handler (mov_optab, mode);
951 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
952 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
954 max_size = GET_MODE_SIZE (mode);
957 /* The code above should have handled everything. */
958 gcc_assert (!data.len);
964 gcc_assert (!data.reverse);
969 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
970 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
972 data.to_addr = copy_to_mode_reg (to_addr_mode,
973 plus_constant (data.to_addr,
976 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
983 to1 = adjust_address (data.to, QImode, data.offset);
991 /* Return number of insns required to move L bytes by pieces.
992 ALIGN (in bits) is maximum alignment we can assume. */
994 static unsigned HOST_WIDE_INT
995 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
996 unsigned int max_size)
998 unsigned HOST_WIDE_INT n_insns = 0;
999 enum machine_mode tmode;
1001 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1002 if (align >= GET_MODE_ALIGNMENT (tmode))
1003 align = GET_MODE_ALIGNMENT (tmode);
1006 enum machine_mode tmode, xmode;
1008 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1010 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1011 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1012 || SLOW_UNALIGNED_ACCESS (tmode, align))
1015 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1018 while (max_size > 1)
1020 enum machine_mode mode = VOIDmode;
1021 enum insn_code icode;
1023 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1024 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1025 if (GET_MODE_SIZE (tmode) < max_size)
1028 if (mode == VOIDmode)
1031 icode = optab_handler (mov_optab, mode);
1032 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1033 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1035 max_size = GET_MODE_SIZE (mode);
1042 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1043 with move instructions for mode MODE. GENFUN is the gen_... function
1044 to make a move insn for that mode. DATA has all the other info. */
1047 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1048 struct move_by_pieces_d *data)
1050 unsigned int size = GET_MODE_SIZE (mode);
1051 rtx to1 = NULL_RTX, from1;
1053 while (data->len >= size)
1056 data->offset -= size;
1060 if (data->autinc_to)
1061 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1064 to1 = adjust_address (data->to, mode, data->offset);
1067 if (data->autinc_from)
1068 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1071 from1 = adjust_address (data->from, mode, data->offset);
1073 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1074 emit_insn (gen_add2_insn (data->to_addr,
1075 GEN_INT (-(HOST_WIDE_INT)size)));
1076 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1077 emit_insn (gen_add2_insn (data->from_addr,
1078 GEN_INT (-(HOST_WIDE_INT)size)));
1081 emit_insn ((*genfun) (to1, from1));
1084 #ifdef PUSH_ROUNDING
1085 emit_single_push_insn (mode, from1, NULL);
1091 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1092 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1093 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1094 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1096 if (! data->reverse)
1097 data->offset += size;
1103 /* Emit code to move a block Y to a block X. This may be done with
1104 string-move instructions, with multiple scalar move instructions,
1105 or with a library call.
1107 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1108 SIZE is an rtx that says how long they are.
1109 ALIGN is the maximum alignment we can assume they have.
1110 METHOD describes what kind of copy this is, and what mechanisms may be used.
1112 Return the address of the new block, if memcpy is called and returns it,
1116 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1117 unsigned int expected_align, HOST_WIDE_INT expected_size)
1124 if (CONST_INT_P (size)
1125 && INTVAL (size) == 0)
1130 case BLOCK_OP_NORMAL:
1131 case BLOCK_OP_TAILCALL:
1132 may_use_call = true;
1135 case BLOCK_OP_CALL_PARM:
1136 may_use_call = block_move_libcall_safe_for_call_parm ();
1138 /* Make inhibit_defer_pop nonzero around the library call
1139 to force it to pop the arguments right away. */
1143 case BLOCK_OP_NO_LIBCALL:
1144 may_use_call = false;
1151 gcc_assert (MEM_P (x) && MEM_P (y));
1152 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1153 gcc_assert (align >= BITS_PER_UNIT);
1155 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1156 block copy is more efficient for other large modes, e.g. DCmode. */
1157 x = adjust_address (x, BLKmode, 0);
1158 y = adjust_address (y, BLKmode, 0);
1160 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1161 can be incorrect is coming from __builtin_memcpy. */
1162 if (CONST_INT_P (size))
1164 x = shallow_copy_rtx (x);
1165 y = shallow_copy_rtx (y);
1166 set_mem_size (x, size);
1167 set_mem_size (y, size);
1170 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1171 move_by_pieces (x, y, INTVAL (size), align, 0);
1172 else if (emit_block_move_via_movmem (x, y, size, align,
1173 expected_align, expected_size))
1175 else if (may_use_call
1176 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1177 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1178 retval = emit_block_move_via_libcall (x, y, size,
1179 method == BLOCK_OP_TAILCALL);
1181 emit_block_move_via_loop (x, y, size, align);
1183 if (method == BLOCK_OP_CALL_PARM)
1190 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1192 return emit_block_move_hints (x, y, size, method, 0, -1);
1195 /* A subroutine of emit_block_move. Returns true if calling the
1196 block move libcall will not clobber any parameters which may have
1197 already been placed on the stack. */
1200 block_move_libcall_safe_for_call_parm (void)
1202 #if defined (REG_PARM_STACK_SPACE)
1206 /* If arguments are pushed on the stack, then they're safe. */
1210 /* If registers go on the stack anyway, any argument is sure to clobber
1211 an outgoing argument. */
1212 #if defined (REG_PARM_STACK_SPACE)
1213 fn = emit_block_move_libcall_fn (false);
1214 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1215 depend on its argument. */
1217 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1218 && REG_PARM_STACK_SPACE (fn) != 0)
1222 /* If any argument goes in memory, then it might clobber an outgoing
1225 CUMULATIVE_ARGS args_so_far;
1228 fn = emit_block_move_libcall_fn (false);
1229 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1231 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1232 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1234 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1235 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1237 if (!tmp || !REG_P (tmp))
1239 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1241 targetm.calls.function_arg_advance (&args_so_far, mode,
1248 /* A subroutine of emit_block_move. Expand a movmem pattern;
1249 return true if successful. */
1252 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1253 unsigned int expected_align, HOST_WIDE_INT expected_size)
1255 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1256 int save_volatile_ok = volatile_ok;
1257 enum machine_mode mode;
1259 if (expected_align < align)
1260 expected_align = align;
1262 /* Since this is a move insn, we don't care about volatility. */
1265 /* Try the most limited insn first, because there's no point
1266 including more than one in the machine description unless
1267 the more limited one has some advantage. */
1269 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1270 mode = GET_MODE_WIDER_MODE (mode))
1272 enum insn_code code = direct_optab_handler (movmem_optab, mode);
1273 insn_operand_predicate_fn pred;
1275 if (code != CODE_FOR_nothing
1276 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1277 here because if SIZE is less than the mode mask, as it is
1278 returned by the macro, it will definitely be less than the
1279 actual mode mask. */
1280 && ((CONST_INT_P (size)
1281 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1282 <= (GET_MODE_MASK (mode) >> 1)))
1283 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1284 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1285 || (*pred) (x, BLKmode))
1286 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1287 || (*pred) (y, BLKmode))
1288 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1289 || (*pred) (opalign, VOIDmode)))
1292 rtx last = get_last_insn ();
1295 op2 = convert_to_mode (mode, size, 1);
1296 pred = insn_data[(int) code].operand[2].predicate;
1297 if (pred != 0 && ! (*pred) (op2, mode))
1298 op2 = copy_to_mode_reg (mode, op2);
1300 /* ??? When called via emit_block_move_for_call, it'd be
1301 nice if there were some way to inform the backend, so
1302 that it doesn't fail the expansion because it thinks
1303 emitting the libcall would be more efficient. */
1305 if (insn_data[(int) code].n_operands == 4)
1306 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1308 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1309 GEN_INT (expected_align
1311 GEN_INT (expected_size));
1315 volatile_ok = save_volatile_ok;
1319 delete_insns_since (last);
1323 volatile_ok = save_volatile_ok;
1327 /* A subroutine of emit_block_move. Expand a call to memcpy.
1328 Return the return value from memcpy, 0 otherwise. */
1331 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1333 rtx dst_addr, src_addr;
1334 tree call_expr, fn, src_tree, dst_tree, size_tree;
1335 enum machine_mode size_mode;
1338 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1339 pseudos. We can then place those new pseudos into a VAR_DECL and
1342 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1343 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1345 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1346 src_addr = convert_memory_address (ptr_mode, src_addr);
1348 dst_tree = make_tree (ptr_type_node, dst_addr);
1349 src_tree = make_tree (ptr_type_node, src_addr);
1351 size_mode = TYPE_MODE (sizetype);
1353 size = convert_to_mode (size_mode, size, 1);
1354 size = copy_to_mode_reg (size_mode, size);
1356 /* It is incorrect to use the libcall calling conventions to call
1357 memcpy in this context. This could be a user call to memcpy and
1358 the user may wish to examine the return value from memcpy. For
1359 targets where libcalls and normal calls have different conventions
1360 for returning pointers, we could end up generating incorrect code. */
1362 size_tree = make_tree (sizetype, size);
1364 fn = emit_block_move_libcall_fn (true);
1365 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1366 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1368 retval = expand_normal (call_expr);
1373 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1374 for the function we use for block copies. The first time FOR_CALL
1375 is true, we call assemble_external. */
1377 static GTY(()) tree block_move_fn;
1380 init_block_move_fn (const char *asmspec)
1386 fn = get_identifier ("memcpy");
1387 args = build_function_type_list (ptr_type_node, ptr_type_node,
1388 const_ptr_type_node, sizetype,
1391 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1392 DECL_EXTERNAL (fn) = 1;
1393 TREE_PUBLIC (fn) = 1;
1394 DECL_ARTIFICIAL (fn) = 1;
1395 TREE_NOTHROW (fn) = 1;
1396 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1397 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1403 set_user_assembler_name (block_move_fn, asmspec);
1407 emit_block_move_libcall_fn (int for_call)
1409 static bool emitted_extern;
1412 init_block_move_fn (NULL);
1414 if (for_call && !emitted_extern)
1416 emitted_extern = true;
1417 make_decl_rtl (block_move_fn);
1418 assemble_external (block_move_fn);
1421 return block_move_fn;
1424 /* A subroutine of emit_block_move. Copy the data via an explicit
1425 loop. This is used only when libcalls are forbidden. */
1426 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1430 unsigned int align ATTRIBUTE_UNUSED)
1432 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1433 enum machine_mode x_addr_mode
1434 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1435 enum machine_mode y_addr_mode
1436 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1437 enum machine_mode iter_mode;
1439 iter_mode = GET_MODE (size);
1440 if (iter_mode == VOIDmode)
1441 iter_mode = word_mode;
1443 top_label = gen_label_rtx ();
1444 cmp_label = gen_label_rtx ();
1445 iter = gen_reg_rtx (iter_mode);
1447 emit_move_insn (iter, const0_rtx);
1449 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1450 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1451 do_pending_stack_adjust ();
1453 emit_jump (cmp_label);
1454 emit_label (top_label);
1456 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1457 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1459 if (x_addr_mode != y_addr_mode)
1460 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1461 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1463 x = change_address (x, QImode, x_addr);
1464 y = change_address (y, QImode, y_addr);
1466 emit_move_insn (x, y);
1468 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1469 true, OPTAB_LIB_WIDEN);
1471 emit_move_insn (iter, tmp);
1473 emit_label (cmp_label);
1475 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1479 /* Copy all or part of a value X into registers starting at REGNO.
1480 The number of registers to be filled is NREGS. */
1483 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1486 #ifdef HAVE_load_multiple
1494 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1495 x = validize_mem (force_const_mem (mode, x));
1497 /* See if the machine can do this with a load multiple insn. */
1498 #ifdef HAVE_load_multiple
1499 if (HAVE_load_multiple)
1501 last = get_last_insn ();
1502 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1510 delete_insns_since (last);
1514 for (i = 0; i < nregs; i++)
1515 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1516 operand_subword_force (x, i, mode));
1519 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1520 The number of registers to be filled is NREGS. */
1523 move_block_from_reg (int regno, rtx x, int nregs)
1530 /* See if the machine can do this with a store multiple insn. */
1531 #ifdef HAVE_store_multiple
1532 if (HAVE_store_multiple)
1534 rtx last = get_last_insn ();
1535 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1543 delete_insns_since (last);
1547 for (i = 0; i < nregs; i++)
1549 rtx tem = operand_subword (x, i, 1, BLKmode);
1553 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1557 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1558 ORIG, where ORIG is a non-consecutive group of registers represented by
1559 a PARALLEL. The clone is identical to the original except in that the
1560 original set of registers is replaced by a new set of pseudo registers.
1561 The new set has the same modes as the original set. */
1564 gen_group_rtx (rtx orig)
1569 gcc_assert (GET_CODE (orig) == PARALLEL);
1571 length = XVECLEN (orig, 0);
1572 tmps = XALLOCAVEC (rtx, length);
1574 /* Skip a NULL entry in first slot. */
1575 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1580 for (; i < length; i++)
1582 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1583 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1588 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1591 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1592 except that values are placed in TMPS[i], and must later be moved
1593 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1596 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1600 enum machine_mode m = GET_MODE (orig_src);
1602 gcc_assert (GET_CODE (dst) == PARALLEL);
1605 && !SCALAR_INT_MODE_P (m)
1606 && !MEM_P (orig_src)
1607 && GET_CODE (orig_src) != CONCAT)
1609 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1610 if (imode == BLKmode)
1611 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1613 src = gen_reg_rtx (imode);
1614 if (imode != BLKmode)
1615 src = gen_lowpart (GET_MODE (orig_src), src);
1616 emit_move_insn (src, orig_src);
1617 /* ...and back again. */
1618 if (imode != BLKmode)
1619 src = gen_lowpart (imode, src);
1620 emit_group_load_1 (tmps, dst, src, type, ssize);
1624 /* Check for a NULL entry, used to indicate that the parameter goes
1625 both on the stack and in registers. */
1626 if (XEXP (XVECEXP (dst, 0, 0), 0))
1631 /* Process the pieces. */
1632 for (i = start; i < XVECLEN (dst, 0); i++)
1634 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1635 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1636 unsigned int bytelen = GET_MODE_SIZE (mode);
1639 /* Handle trailing fragments that run over the size of the struct. */
1640 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1642 /* Arrange to shift the fragment to where it belongs.
1643 extract_bit_field loads to the lsb of the reg. */
1645 #ifdef BLOCK_REG_PADDING
1646 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1647 == (BYTES_BIG_ENDIAN ? upward : downward)
1652 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1653 bytelen = ssize - bytepos;
1654 gcc_assert (bytelen > 0);
1657 /* If we won't be loading directly from memory, protect the real source
1658 from strange tricks we might play; but make sure that the source can
1659 be loaded directly into the destination. */
1661 if (!MEM_P (orig_src)
1662 && (!CONSTANT_P (orig_src)
1663 || (GET_MODE (orig_src) != mode
1664 && GET_MODE (orig_src) != VOIDmode)))
1666 if (GET_MODE (orig_src) == VOIDmode)
1667 src = gen_reg_rtx (mode);
1669 src = gen_reg_rtx (GET_MODE (orig_src));
1671 emit_move_insn (src, orig_src);
1674 /* Optimize the access just a bit. */
1676 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1677 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1678 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1679 && bytelen == GET_MODE_SIZE (mode))
1681 tmps[i] = gen_reg_rtx (mode);
1682 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1684 else if (COMPLEX_MODE_P (mode)
1685 && GET_MODE (src) == mode
1686 && bytelen == GET_MODE_SIZE (mode))
1687 /* Let emit_move_complex do the bulk of the work. */
1689 else if (GET_CODE (src) == CONCAT)
1691 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1692 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694 if ((bytepos == 0 && bytelen == slen0)
1695 || (bytepos != 0 && bytepos + bytelen <= slen))
1697 /* The following assumes that the concatenated objects all
1698 have the same size. In this case, a simple calculation
1699 can be used to determine the object and the bit field
1701 tmps[i] = XEXP (src, bytepos / slen0);
1702 if (! CONSTANT_P (tmps[i])
1703 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1704 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1705 (bytepos % slen0) * BITS_PER_UNIT,
1706 1, NULL_RTX, mode, mode);
1712 gcc_assert (!bytepos);
1713 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1714 emit_move_insn (mem, src);
1715 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1716 0, 1, NULL_RTX, mode, mode);
1719 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1720 SIMD register, which is currently broken. While we get GCC
1721 to emit proper RTL for these cases, let's dump to memory. */
1722 else if (VECTOR_MODE_P (GET_MODE (dst))
1725 int slen = GET_MODE_SIZE (GET_MODE (src));
1728 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1729 emit_move_insn (mem, src);
1730 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1733 && XVECLEN (dst, 0) > 1)
1734 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1735 else if (CONSTANT_P (src))
1737 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1745 gcc_assert (2 * len == ssize);
1746 split_double (src, &first, &second);
1753 else if (REG_P (src) && GET_MODE (src) == mode)
1756 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1757 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1761 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1762 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1766 /* Emit code to move a block SRC of type TYPE to a block DST,
1767 where DST is non-consecutive registers represented by a PARALLEL.
1768 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1772 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1777 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1778 emit_group_load_1 (tmps, dst, src, type, ssize);
1780 /* Copy the extracted pieces into the proper (probable) hard regs. */
1781 for (i = 0; i < XVECLEN (dst, 0); i++)
1783 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1786 emit_move_insn (d, tmps[i]);
1790 /* Similar, but load SRC into new pseudos in a format that looks like
1791 PARALLEL. This can later be fed to emit_group_move to get things
1792 in the right place. */
1795 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1800 vec = rtvec_alloc (XVECLEN (parallel, 0));
1801 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1803 /* Convert the vector to look just like the original PARALLEL, except
1804 with the computed values. */
1805 for (i = 0; i < XVECLEN (parallel, 0); i++)
1807 rtx e = XVECEXP (parallel, 0, i);
1808 rtx d = XEXP (e, 0);
1812 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1813 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1815 RTVEC_ELT (vec, i) = e;
1818 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1821 /* Emit code to move a block SRC to block DST, where SRC and DST are
1822 non-consecutive groups of registers, each represented by a PARALLEL. */
1825 emit_group_move (rtx dst, rtx src)
1829 gcc_assert (GET_CODE (src) == PARALLEL
1830 && GET_CODE (dst) == PARALLEL
1831 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1833 /* Skip first entry if NULL. */
1834 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1835 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1836 XEXP (XVECEXP (src, 0, i), 0));
1839 /* Move a group of registers represented by a PARALLEL into pseudos. */
1842 emit_group_move_into_temps (rtx src)
1844 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1847 for (i = 0; i < XVECLEN (src, 0); i++)
1849 rtx e = XVECEXP (src, 0, i);
1850 rtx d = XEXP (e, 0);
1853 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1854 RTVEC_ELT (vec, i) = e;
1857 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1860 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1861 where SRC is non-consecutive registers represented by a PARALLEL.
1862 SSIZE represents the total size of block ORIG_DST, or -1 if not
1866 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1869 int start, finish, i;
1870 enum machine_mode m = GET_MODE (orig_dst);
1872 gcc_assert (GET_CODE (src) == PARALLEL);
1874 if (!SCALAR_INT_MODE_P (m)
1875 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1877 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1878 if (imode == BLKmode)
1879 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1881 dst = gen_reg_rtx (imode);
1882 emit_group_store (dst, src, type, ssize);
1883 if (imode != BLKmode)
1884 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1885 emit_move_insn (orig_dst, dst);
1889 /* Check for a NULL entry, used to indicate that the parameter goes
1890 both on the stack and in registers. */
1891 if (XEXP (XVECEXP (src, 0, 0), 0))
1895 finish = XVECLEN (src, 0);
1897 tmps = XALLOCAVEC (rtx, finish);
1899 /* Copy the (probable) hard regs into pseudos. */
1900 for (i = start; i < finish; i++)
1902 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1903 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1905 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1906 emit_move_insn (tmps[i], reg);
1912 /* If we won't be storing directly into memory, protect the real destination
1913 from strange tricks we might play. */
1915 if (GET_CODE (dst) == PARALLEL)
1919 /* We can get a PARALLEL dst if there is a conditional expression in
1920 a return statement. In that case, the dst and src are the same,
1921 so no action is necessary. */
1922 if (rtx_equal_p (dst, src))
1925 /* It is unclear if we can ever reach here, but we may as well handle
1926 it. Allocate a temporary, and split this into a store/load to/from
1929 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1930 emit_group_store (temp, src, type, ssize);
1931 emit_group_load (dst, temp, type, ssize);
1934 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1936 enum machine_mode outer = GET_MODE (dst);
1937 enum machine_mode inner;
1938 HOST_WIDE_INT bytepos;
1942 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1943 dst = gen_reg_rtx (outer);
1945 /* Make life a bit easier for combine. */
1946 /* If the first element of the vector is the low part
1947 of the destination mode, use a paradoxical subreg to
1948 initialize the destination. */
1951 inner = GET_MODE (tmps[start]);
1952 bytepos = subreg_lowpart_offset (inner, outer);
1953 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1955 temp = simplify_gen_subreg (outer, tmps[start],
1959 emit_move_insn (dst, temp);
1966 /* If the first element wasn't the low part, try the last. */
1968 && start < finish - 1)
1970 inner = GET_MODE (tmps[finish - 1]);
1971 bytepos = subreg_lowpart_offset (inner, outer);
1972 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1974 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1978 emit_move_insn (dst, temp);
1985 /* Otherwise, simply initialize the result to zero. */
1987 emit_move_insn (dst, CONST0_RTX (outer));
1990 /* Process the pieces. */
1991 for (i = start; i < finish; i++)
1993 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1994 enum machine_mode mode = GET_MODE (tmps[i]);
1995 unsigned int bytelen = GET_MODE_SIZE (mode);
1996 unsigned int adj_bytelen = bytelen;
1999 /* Handle trailing fragments that run over the size of the struct. */
2000 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2001 adj_bytelen = ssize - bytepos;
2003 if (GET_CODE (dst) == CONCAT)
2005 if (bytepos + adj_bytelen
2006 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2007 dest = XEXP (dst, 0);
2008 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2011 dest = XEXP (dst, 1);
2015 enum machine_mode dest_mode = GET_MODE (dest);
2016 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2018 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2020 if (GET_MODE_ALIGNMENT (dest_mode)
2021 >= GET_MODE_ALIGNMENT (tmp_mode))
2023 dest = assign_stack_temp (dest_mode,
2024 GET_MODE_SIZE (dest_mode),
2026 emit_move_insn (adjust_address (dest,
2034 dest = assign_stack_temp (tmp_mode,
2035 GET_MODE_SIZE (tmp_mode),
2037 emit_move_insn (dest, tmps[i]);
2038 dst = adjust_address (dest, dest_mode, bytepos);
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2046 /* store_bit_field always takes its value from the lsb.
2047 Move the fragment to the lsb if it's not already there. */
2049 #ifdef BLOCK_REG_PADDING
2050 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2051 == (BYTES_BIG_ENDIAN ? upward : downward)
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2059 build_int_cst (NULL_TREE, shift),
2062 bytelen = adj_bytelen;
2065 /* Optimize the access just a bit. */
2067 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2068 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2069 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2070 && bytelen == GET_MODE_SIZE (mode))
2071 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2073 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2077 /* Copy from the pseudo into the (probable) hard reg. */
2078 if (orig_dst != dst)
2079 emit_move_insn (orig_dst, dst);
2082 /* Generate code to copy a BLKmode object of TYPE out of a
2083 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2084 is null, a stack temporary is created. TGTBLK is returned.
2086 The purpose of this routine is to handle functions that return
2087 BLKmode structures in registers. Some machines (the PA for example)
2088 want to return all small structures in registers regardless of the
2089 structure's alignment. */
2092 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2094 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2095 rtx src = NULL, dst = NULL;
2096 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2097 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2098 enum machine_mode copy_mode;
2102 tgtblk = assign_temp (build_qualified_type (type,
2104 | TYPE_QUAL_CONST)),
2106 preserve_temp_slots (tgtblk);
2109 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2110 into a new pseudo which is a full word. */
2112 if (GET_MODE (srcreg) != BLKmode
2113 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2114 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2116 /* If the structure doesn't take up a whole number of words, see whether
2117 SRCREG is padded on the left or on the right. If it's on the left,
2118 set PADDING_CORRECTION to the number of bits to skip.
2120 In most ABIs, the structure will be returned at the least end of
2121 the register, which translates to right padding on little-endian
2122 targets and left padding on big-endian targets. The opposite
2123 holds if the structure is returned at the most significant
2124 end of the register. */
2125 if (bytes % UNITS_PER_WORD != 0
2126 && (targetm.calls.return_in_msb (type)
2128 : BYTES_BIG_ENDIAN))
2130 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2132 /* Copy the structure BITSIZE bits at a time. If the target lives in
2133 memory, take care of not reading/writing past its end by selecting
2134 a copy mode suited to BITSIZE. This should always be possible given
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2141 copy_mode = word_mode;
2144 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2145 if (mem_mode != BLKmode)
2146 copy_mode = mem_mode;
2149 for (bitpos = 0, xbitpos = padding_correction;
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2153 /* We need a new source operand each time xbitpos is on a
2154 word boundary and when xbitpos == padding_correction
2155 (the first time through). */
2156 if (xbitpos % BITS_PER_WORD == 0
2157 || xbitpos == padding_correction)
2158 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2161 /* We need a new destination operand each time bitpos is on
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 bitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, copy_mode, copy_mode));
2177 /* Add a USE expression for REG to the (possibly empty) list pointed
2178 to by CALL_FUSAGE. REG must denote a hard register. */
2181 use_reg (rtx *call_fusage, rtx reg)
2183 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2186 = gen_rtx_EXPR_LIST (VOIDmode,
2187 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2190 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2191 starting at REGNO. All of these registers must be hard registers. */
2194 use_regs (rtx *call_fusage, int regno, int nregs)
2198 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2200 for (i = 0; i < nregs; i++)
2201 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2204 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2209 use_group_regs (rtx *call_fusage, rtx regs)
2213 for (i = 0; i < XVECLEN (regs, 0); i++)
2215 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2217 /* A NULL entry means the parameter goes both on the stack and in
2218 registers. This can also be a MEM for targets that pass values
2219 partially on the stack and partially in registers. */
2220 if (reg != 0 && REG_P (reg))
2221 use_reg (call_fusage, reg);
2225 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2226 assigment and the code of the expresion on the RHS is CODE. Return
2230 get_def_for_expr (tree name, enum tree_code code)
2234 if (TREE_CODE (name) != SSA_NAME)
2237 def_stmt = get_gimple_for_ssa_name (name);
2239 || gimple_assign_rhs_code (def_stmt) != code)
2246 /* Determine whether the LEN bytes generated by CONSTFUN can be
2247 stored to memory using several move instructions. CONSTFUNDATA is
2248 a pointer which will be passed as argument in every CONSTFUN call.
2249 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2250 a memset operation and false if it's a copy of a constant string.
2251 Return nonzero if a call to store_by_pieces should succeed. */
2254 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2255 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2256 void *constfundata, unsigned int align, bool memsetp)
2258 unsigned HOST_WIDE_INT l;
2259 unsigned int max_size;
2260 HOST_WIDE_INT offset = 0;
2261 enum machine_mode mode, tmode;
2262 enum insn_code icode;
2270 ? SET_BY_PIECES_P (len, align)
2271 : STORE_BY_PIECES_P (len, align)))
2274 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2275 if (align >= GET_MODE_ALIGNMENT (tmode))
2276 align = GET_MODE_ALIGNMENT (tmode);
2279 enum machine_mode xmode;
2281 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2283 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2284 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2285 || SLOW_UNALIGNED_ACCESS (tmode, align))
2288 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2291 /* We would first store what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2295 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2300 max_size = STORE_MAX_PIECES + 1;
2301 while (max_size > 1)
2303 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2304 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) < max_size)
2308 if (mode == VOIDmode)
2311 icode = optab_handler (mov_optab, mode);
2312 if (icode != CODE_FOR_nothing
2313 && align >= GET_MODE_ALIGNMENT (mode))
2315 unsigned int size = GET_MODE_SIZE (mode);
2322 cst = (*constfun) (constfundata, offset, mode);
2323 if (!LEGITIMATE_CONSTANT_P (cst))
2333 max_size = GET_MODE_SIZE (mode);
2336 /* The code above should have handled everything. */
2343 /* Generate several move instructions to store LEN bytes generated by
2344 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2345 pointer which will be passed as argument in every CONSTFUN call.
2346 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2347 a memset operation and false if it's a copy of a constant string.
2348 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2349 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2353 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2354 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2355 void *constfundata, unsigned int align, bool memsetp, int endp)
2357 enum machine_mode to_addr_mode
2358 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2359 struct store_by_pieces_d data;
2363 gcc_assert (endp != 2);
2368 ? SET_BY_PIECES_P (len, align)
2369 : STORE_BY_PIECES_P (len, align));
2370 data.constfun = constfun;
2371 data.constfundata = constfundata;
2374 store_by_pieces_1 (&data, align);
2379 gcc_assert (!data.reverse);
2384 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2385 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2387 data.to_addr = copy_to_mode_reg (to_addr_mode,
2388 plus_constant (data.to_addr,
2391 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2398 to1 = adjust_address (data.to, QImode, data.offset);
2406 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2410 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2412 struct store_by_pieces_d data;
2417 data.constfun = clear_by_pieces_1;
2418 data.constfundata = NULL;
2421 store_by_pieces_1 (&data, align);
2424 /* Callback routine for clear_by_pieces.
2425 Return const0_rtx unconditionally. */
2428 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2429 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2430 enum machine_mode mode ATTRIBUTE_UNUSED)
2435 /* Subroutine of clear_by_pieces and store_by_pieces.
2436 Generate several move instructions to store LEN bytes of block TO. (A MEM
2437 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2440 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2441 unsigned int align ATTRIBUTE_UNUSED)
2443 enum machine_mode to_addr_mode
2444 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2445 rtx to_addr = XEXP (data->to, 0);
2446 unsigned int max_size = STORE_MAX_PIECES + 1;
2447 enum machine_mode mode = VOIDmode, tmode;
2448 enum insn_code icode;
2451 data->to_addr = to_addr;
2453 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2454 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2456 data->explicit_inc_to = 0;
2458 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2460 data->offset = data->len;
2462 /* If storing requires more than two move insns,
2463 copy addresses to registers (to make displacements shorter)
2464 and use post-increment if available. */
2465 if (!data->autinc_to
2466 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2468 /* Determine the main mode we'll be using. */
2469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2471 if (GET_MODE_SIZE (tmode) < max_size)
2474 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2476 data->to_addr = copy_to_mode_reg (to_addr_mode,
2477 plus_constant (to_addr, data->len));
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = -1;
2482 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2483 && ! data->autinc_to)
2485 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2486 data->autinc_to = 1;
2487 data->explicit_inc_to = 1;
2490 if ( !data->autinc_to && CONSTANT_P (to_addr))
2491 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2494 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2495 if (align >= GET_MODE_ALIGNMENT (tmode))
2496 align = GET_MODE_ALIGNMENT (tmode);
2499 enum machine_mode xmode;
2501 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2503 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2504 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2505 || SLOW_UNALIGNED_ACCESS (tmode, align))
2508 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2511 /* First store what we can in the largest integer mode, then go to
2512 successively smaller modes. */
2514 while (max_size > 1)
2516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2517 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2518 if (GET_MODE_SIZE (tmode) < max_size)
2521 if (mode == VOIDmode)
2524 icode = optab_handler (mov_optab, mode);
2525 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2526 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2528 max_size = GET_MODE_SIZE (mode);
2531 /* The code above should have handled everything. */
2532 gcc_assert (!data->len);
2535 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2536 with move instructions for mode MODE. GENFUN is the gen_... function
2537 to make a move insn for that mode. DATA has all the other info. */
2540 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2541 struct store_by_pieces_d *data)
2543 unsigned int size = GET_MODE_SIZE (mode);
2546 while (data->len >= size)
2549 data->offset -= size;
2551 if (data->autinc_to)
2552 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2555 to1 = adjust_address (data->to, mode, data->offset);
2557 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2558 emit_insn (gen_add2_insn (data->to_addr,
2559 GEN_INT (-(HOST_WIDE_INT) size)));
2561 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2562 emit_insn ((*genfun) (to1, cst));
2564 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2565 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2567 if (! data->reverse)
2568 data->offset += size;
2574 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2575 its length in bytes. */
2578 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2579 unsigned int expected_align, HOST_WIDE_INT expected_size)
2581 enum machine_mode mode = GET_MODE (object);
2584 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2586 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2587 just move a zero. Otherwise, do this a piece at a time. */
2589 && CONST_INT_P (size)
2590 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2592 rtx zero = CONST0_RTX (mode);
2595 emit_move_insn (object, zero);
2599 if (COMPLEX_MODE_P (mode))
2601 zero = CONST0_RTX (GET_MODE_INNER (mode));
2604 write_complex_part (object, zero, 0);
2605 write_complex_part (object, zero, 1);
2611 if (size == const0_rtx)
2614 align = MEM_ALIGN (object);
2616 if (CONST_INT_P (size)
2617 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2618 clear_by_pieces (object, INTVAL (size), align);
2619 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2620 expected_align, expected_size))
2622 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2623 return set_storage_via_libcall (object, size, const0_rtx,
2624 method == BLOCK_OP_TAILCALL);
2632 clear_storage (rtx object, rtx size, enum block_op_methods method)
2634 return clear_storage_hints (object, size, method, 0, -1);
2638 /* A subroutine of clear_storage. Expand a call to memset.
2639 Return the return value of memset, 0 otherwise. */
2642 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2644 tree call_expr, fn, object_tree, size_tree, val_tree;
2645 enum machine_mode size_mode;
2648 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2649 place those into new pseudos into a VAR_DECL and use them later. */
2651 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2653 size_mode = TYPE_MODE (sizetype);
2654 size = convert_to_mode (size_mode, size, 1);
2655 size = copy_to_mode_reg (size_mode, size);
2657 /* It is incorrect to use the libcall calling conventions to call
2658 memset in this context. This could be a user call to memset and
2659 the user may wish to examine the return value from memset. For
2660 targets where libcalls and normal calls have different conventions
2661 for returning pointers, we could end up generating incorrect code. */
2663 object_tree = make_tree (ptr_type_node, object);
2664 if (!CONST_INT_P (val))
2665 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2666 size_tree = make_tree (sizetype, size);
2667 val_tree = make_tree (integer_type_node, val);
2669 fn = clear_storage_libcall_fn (true);
2670 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2671 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2673 retval = expand_normal (call_expr);
2678 /* A subroutine of set_storage_via_libcall. Create the tree node
2679 for the function we use for block clears. The first time FOR_CALL
2680 is true, we call assemble_external. */
2682 tree block_clear_fn;
2685 init_block_clear_fn (const char *asmspec)
2687 if (!block_clear_fn)
2691 fn = get_identifier ("memset");
2692 args = build_function_type_list (ptr_type_node, ptr_type_node,
2693 integer_type_node, sizetype,
2696 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2697 DECL_EXTERNAL (fn) = 1;
2698 TREE_PUBLIC (fn) = 1;
2699 DECL_ARTIFICIAL (fn) = 1;
2700 TREE_NOTHROW (fn) = 1;
2701 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2702 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2704 block_clear_fn = fn;
2708 set_user_assembler_name (block_clear_fn, asmspec);
2712 clear_storage_libcall_fn (int for_call)
2714 static bool emitted_extern;
2716 if (!block_clear_fn)
2717 init_block_clear_fn (NULL);
2719 if (for_call && !emitted_extern)
2721 emitted_extern = true;
2722 make_decl_rtl (block_clear_fn);
2723 assemble_external (block_clear_fn);
2726 return block_clear_fn;
2729 /* Expand a setmem pattern; return true if successful. */
2732 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2733 unsigned int expected_align, HOST_WIDE_INT expected_size)
2735 /* Try the most limited insn first, because there's no point
2736 including more than one in the machine description unless
2737 the more limited one has some advantage. */
2739 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2740 enum machine_mode mode;
2742 if (expected_align < align)
2743 expected_align = align;
2745 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2746 mode = GET_MODE_WIDER_MODE (mode))
2748 enum insn_code code = direct_optab_handler (setmem_optab, mode);
2749 insn_operand_predicate_fn pred;
2751 if (code != CODE_FOR_nothing
2752 /* We don't need MODE to be narrower than
2753 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2754 the mode mask, as it is returned by the macro, it will
2755 definitely be less than the actual mode mask. */
2756 && ((CONST_INT_P (size)
2757 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2758 <= (GET_MODE_MASK (mode) >> 1)))
2759 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2760 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2761 || (*pred) (object, BLKmode))
2762 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2763 || (*pred) (opalign, VOIDmode)))
2766 enum machine_mode char_mode;
2767 rtx last = get_last_insn ();
2770 opsize = convert_to_mode (mode, size, 1);
2771 pred = insn_data[(int) code].operand[1].predicate;
2772 if (pred != 0 && ! (*pred) (opsize, mode))
2773 opsize = copy_to_mode_reg (mode, opsize);
2776 char_mode = insn_data[(int) code].operand[2].mode;
2777 if (char_mode != VOIDmode)
2779 opchar = convert_to_mode (char_mode, opchar, 1);
2780 pred = insn_data[(int) code].operand[2].predicate;
2781 if (pred != 0 && ! (*pred) (opchar, char_mode))
2782 opchar = copy_to_mode_reg (char_mode, opchar);
2785 if (insn_data[(int) code].n_operands == 4)
2786 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2788 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2789 GEN_INT (expected_align
2791 GEN_INT (expected_size));
2798 delete_insns_since (last);
2806 /* Write to one of the components of the complex value CPLX. Write VAL to
2807 the real part if IMAG_P is false, and the imaginary part if its true. */
2810 write_complex_part (rtx cplx, rtx val, bool imag_p)
2812 enum machine_mode cmode;
2813 enum machine_mode imode;
2816 if (GET_CODE (cplx) == CONCAT)
2818 emit_move_insn (XEXP (cplx, imag_p), val);
2822 cmode = GET_MODE (cplx);
2823 imode = GET_MODE_INNER (cmode);
2824 ibitsize = GET_MODE_BITSIZE (imode);
2826 /* For MEMs simplify_gen_subreg may generate an invalid new address
2827 because, e.g., the original address is considered mode-dependent
2828 by the target, which restricts simplify_subreg from invoking
2829 adjust_address_nv. Instead of preparing fallback support for an
2830 invalid address, we call adjust_address_nv directly. */
2833 emit_move_insn (adjust_address_nv (cplx, imode,
2834 imag_p ? GET_MODE_SIZE (imode) : 0),
2839 /* If the sub-object is at least word sized, then we know that subregging
2840 will work. This special case is important, since store_bit_field
2841 wants to operate on integer modes, and there's rarely an OImode to
2842 correspond to TCmode. */
2843 if (ibitsize >= BITS_PER_WORD
2844 /* For hard regs we have exact predicates. Assume we can split
2845 the original object if it spans an even number of hard regs.
2846 This special case is important for SCmode on 64-bit platforms
2847 where the natural size of floating-point regs is 32-bit. */
2849 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2850 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2852 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2853 imag_p ? GET_MODE_SIZE (imode) : 0);
2856 emit_move_insn (part, val);
2860 /* simplify_gen_subreg may fail for sub-word MEMs. */
2861 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2864 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2867 /* Extract one of the components of the complex value CPLX. Extract the
2868 real part if IMAG_P is false, and the imaginary part if it's true. */
2871 read_complex_part (rtx cplx, bool imag_p)
2873 enum machine_mode cmode, imode;
2876 if (GET_CODE (cplx) == CONCAT)
2877 return XEXP (cplx, imag_p);
2879 cmode = GET_MODE (cplx);
2880 imode = GET_MODE_INNER (cmode);
2881 ibitsize = GET_MODE_BITSIZE (imode);
2883 /* Special case reads from complex constants that got spilled to memory. */
2884 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2886 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2887 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2889 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2890 if (CONSTANT_CLASS_P (part))
2891 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2895 /* For MEMs simplify_gen_subreg may generate an invalid new address
2896 because, e.g., the original address is considered mode-dependent
2897 by the target, which restricts simplify_subreg from invoking
2898 adjust_address_nv. Instead of preparing fallback support for an
2899 invalid address, we call adjust_address_nv directly. */
2901 return adjust_address_nv (cplx, imode,
2902 imag_p ? GET_MODE_SIZE (imode) : 0);
2904 /* If the sub-object is at least word sized, then we know that subregging
2905 will work. This special case is important, since extract_bit_field
2906 wants to operate on integer modes, and there's rarely an OImode to
2907 correspond to TCmode. */
2908 if (ibitsize >= BITS_PER_WORD
2909 /* For hard regs we have exact predicates. Assume we can split
2910 the original object if it spans an even number of hard regs.
2911 This special case is important for SCmode on 64-bit platforms
2912 where the natural size of floating-point regs is 32-bit. */
2914 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2915 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2917 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2918 imag_p ? GET_MODE_SIZE (imode) : 0);
2922 /* simplify_gen_subreg may fail for sub-word MEMs. */
2923 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2926 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2927 true, NULL_RTX, imode, imode);
2930 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2931 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2932 represented in NEW_MODE. If FORCE is true, this will never happen, as
2933 we'll force-create a SUBREG if needed. */
2936 emit_move_change_mode (enum machine_mode new_mode,
2937 enum machine_mode old_mode, rtx x, bool force)
2941 if (push_operand (x, GET_MODE (x)))
2943 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2944 MEM_COPY_ATTRIBUTES (ret, x);
2948 /* We don't have to worry about changing the address since the
2949 size in bytes is supposed to be the same. */
2950 if (reload_in_progress)
2952 /* Copy the MEM to change the mode and move any
2953 substitutions from the old MEM to the new one. */
2954 ret = adjust_address_nv (x, new_mode, 0);
2955 copy_replacements (x, ret);
2958 ret = adjust_address (x, new_mode, 0);
2962 /* Note that we do want simplify_subreg's behavior of validating
2963 that the new mode is ok for a hard register. If we were to use
2964 simplify_gen_subreg, we would create the subreg, but would
2965 probably run into the target not being able to implement it. */
2966 /* Except, of course, when FORCE is true, when this is exactly what
2967 we want. Which is needed for CCmodes on some targets. */
2969 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2971 ret = simplify_subreg (new_mode, x, old_mode, 0);
2977 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2978 an integer mode of the same size as MODE. Returns the instruction
2979 emitted, or NULL if such a move could not be generated. */
2982 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2984 enum machine_mode imode;
2985 enum insn_code code;
2987 /* There must exist a mode of the exact size we require. */
2988 imode = int_mode_for_mode (mode);
2989 if (imode == BLKmode)
2992 /* The target must support moves in this mode. */
2993 code = optab_handler (mov_optab, imode);
2994 if (code == CODE_FOR_nothing)
2997 x = emit_move_change_mode (imode, mode, x, force);
3000 y = emit_move_change_mode (imode, mode, y, force);
3003 return emit_insn (GEN_FCN (code) (x, y));
3006 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3007 Return an equivalent MEM that does not use an auto-increment. */
3010 emit_move_resolve_push (enum machine_mode mode, rtx x)
3012 enum rtx_code code = GET_CODE (XEXP (x, 0));
3013 HOST_WIDE_INT adjust;
3016 adjust = GET_MODE_SIZE (mode);
3017 #ifdef PUSH_ROUNDING
3018 adjust = PUSH_ROUNDING (adjust);
3020 if (code == PRE_DEC || code == POST_DEC)
3022 else if (code == PRE_MODIFY || code == POST_MODIFY)
3024 rtx expr = XEXP (XEXP (x, 0), 1);
3027 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3028 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3029 val = INTVAL (XEXP (expr, 1));
3030 if (GET_CODE (expr) == MINUS)
3032 gcc_assert (adjust == val || adjust == -val);
3036 /* Do not use anti_adjust_stack, since we don't want to update
3037 stack_pointer_delta. */
3038 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3039 GEN_INT (adjust), stack_pointer_rtx,
3040 0, OPTAB_LIB_WIDEN);
3041 if (temp != stack_pointer_rtx)
3042 emit_move_insn (stack_pointer_rtx, temp);
3049 temp = stack_pointer_rtx;
3054 temp = plus_constant (stack_pointer_rtx, -adjust);
3060 return replace_equiv_address (x, temp);
3063 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3064 X is known to satisfy push_operand, and MODE is known to be complex.
3065 Returns the last instruction emitted. */
3068 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3070 enum machine_mode submode = GET_MODE_INNER (mode);
3073 #ifdef PUSH_ROUNDING
3074 unsigned int submodesize = GET_MODE_SIZE (submode);
3076 /* In case we output to the stack, but the size is smaller than the
3077 machine can push exactly, we need to use move instructions. */
3078 if (PUSH_ROUNDING (submodesize) != submodesize)
3080 x = emit_move_resolve_push (mode, x);
3081 return emit_move_insn (x, y);
3085 /* Note that the real part always precedes the imag part in memory
3086 regardless of machine's endianness. */
3087 switch (GET_CODE (XEXP (x, 0)))
3101 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3102 read_complex_part (y, imag_first));
3103 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3104 read_complex_part (y, !imag_first));
3107 /* A subroutine of emit_move_complex. Perform the move from Y to X
3108 via two moves of the parts. Returns the last instruction emitted. */
3111 emit_move_complex_parts (rtx x, rtx y)
3113 /* Show the output dies here. This is necessary for SUBREGs
3114 of pseudos since we cannot track their lifetimes correctly;
3115 hard regs shouldn't appear here except as return values. */
3116 if (!reload_completed && !reload_in_progress
3117 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3120 write_complex_part (x, read_complex_part (y, false), false);
3121 write_complex_part (x, read_complex_part (y, true), true);
3123 return get_last_insn ();
3126 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3127 MODE is known to be complex. Returns the last instruction emitted. */
3130 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3134 /* Need to take special care for pushes, to maintain proper ordering
3135 of the data, and possibly extra padding. */
3136 if (push_operand (x, mode))
3137 return emit_move_complex_push (mode, x, y);
3139 /* See if we can coerce the target into moving both values at once. */
3141 /* Move floating point as parts. */
3142 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3143 && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3145 /* Not possible if the values are inherently not adjacent. */
3146 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3148 /* Is possible if both are registers (or subregs of registers). */
3149 else if (register_operand (x, mode) && register_operand (y, mode))
3151 /* If one of the operands is a memory, and alignment constraints
3152 are friendly enough, we may be able to do combined memory operations.
3153 We do not attempt this if Y is a constant because that combination is
3154 usually better with the by-parts thing below. */
3155 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3156 && (!STRICT_ALIGNMENT
3157 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3166 /* For memory to memory moves, optimal behavior can be had with the
3167 existing block move logic. */
3168 if (MEM_P (x) && MEM_P (y))
3170 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3171 BLOCK_OP_NO_LIBCALL);
3172 return get_last_insn ();
3175 ret = emit_move_via_integer (mode, x, y, true);
3180 return emit_move_complex_parts (x, y);
3183 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3184 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3187 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3191 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3194 enum insn_code code = optab_handler (mov_optab, CCmode);
3195 if (code != CODE_FOR_nothing)
3197 x = emit_move_change_mode (CCmode, mode, x, true);
3198 y = emit_move_change_mode (CCmode, mode, y, true);
3199 return emit_insn (GEN_FCN (code) (x, y));
3203 /* Otherwise, find the MODE_INT mode of the same width. */
3204 ret = emit_move_via_integer (mode, x, y, false);
3205 gcc_assert (ret != NULL);
3209 /* Return true if word I of OP lies entirely in the
3210 undefined bits of a paradoxical subreg. */
3213 undefined_operand_subword_p (const_rtx op, int i)
3215 enum machine_mode innermode, innermostmode;
3217 if (GET_CODE (op) != SUBREG)
3219 innermode = GET_MODE (op);
3220 innermostmode = GET_MODE (SUBREG_REG (op));
3221 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3222 /* The SUBREG_BYTE represents offset, as if the value were stored in
3223 memory, except for a paradoxical subreg where we define
3224 SUBREG_BYTE to be 0; undo this exception as in
3226 if (SUBREG_BYTE (op) == 0
3227 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3229 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3230 if (WORDS_BIG_ENDIAN)
3231 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3232 if (BYTES_BIG_ENDIAN)
3233 offset += difference % UNITS_PER_WORD;
3235 if (offset >= GET_MODE_SIZE (innermostmode)
3236 || offset <= -GET_MODE_SIZE (word_mode))
3241 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3242 MODE is any multi-word or full-word mode that lacks a move_insn
3243 pattern. Note that you will get better code if you define such
3244 patterns, even if they must turn into multiple assembler instructions. */
3247 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3254 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3256 /* If X is a push on the stack, do the push now and replace
3257 X with a reference to the stack pointer. */
3258 if (push_operand (x, mode))
3259 x = emit_move_resolve_push (mode, x);
3261 /* If we are in reload, see if either operand is a MEM whose address
3262 is scheduled for replacement. */
3263 if (reload_in_progress && MEM_P (x)
3264 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3265 x = replace_equiv_address_nv (x, inner);
3266 if (reload_in_progress && MEM_P (y)
3267 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3268 y = replace_equiv_address_nv (y, inner);
3272 need_clobber = false;
3274 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3277 rtx xpart = operand_subword (x, i, 1, mode);
3280 /* Do not generate code for a move if it would come entirely
3281 from the undefined bits of a paradoxical subreg. */
3282 if (undefined_operand_subword_p (y, i))
3285 ypart = operand_subword (y, i, 1, mode);
3287 /* If we can't get a part of Y, put Y into memory if it is a
3288 constant. Otherwise, force it into a register. Then we must
3289 be able to get a part of Y. */
3290 if (ypart == 0 && CONSTANT_P (y))
3292 y = use_anchored_address (force_const_mem (mode, y));
3293 ypart = operand_subword (y, i, 1, mode);
3295 else if (ypart == 0)
3296 ypart = operand_subword_force (y, i, mode);
3298 gcc_assert (xpart && ypart);
3300 need_clobber |= (GET_CODE (xpart) == SUBREG);
3302 last_insn = emit_move_insn (xpart, ypart);
3308 /* Show the output dies here. This is necessary for SUBREGs
3309 of pseudos since we cannot track their lifetimes correctly;
3310 hard regs shouldn't appear here except as return values.
3311 We never want to emit such a clobber after reload. */
3313 && ! (reload_in_progress || reload_completed)
3314 && need_clobber != 0)
3322 /* Low level part of emit_move_insn.
3323 Called just like emit_move_insn, but assumes X and Y
3324 are basically valid. */
3327 emit_move_insn_1 (rtx x, rtx y)
3329 enum machine_mode mode = GET_MODE (x);
3330 enum insn_code code;
3332 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3334 code = optab_handler (mov_optab, mode);
3335 if (code != CODE_FOR_nothing)
3336 return emit_insn (GEN_FCN (code) (x, y));
3338 /* Expand complex moves by moving real part and imag part. */
3339 if (COMPLEX_MODE_P (mode))
3340 return emit_move_complex (mode, x, y);
3342 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3343 || ALL_FIXED_POINT_MODE_P (mode))
3345 rtx result = emit_move_via_integer (mode, x, y, true);
3347 /* If we can't find an integer mode, use multi words. */
3351 return emit_move_multi_word (mode, x, y);
3354 if (GET_MODE_CLASS (mode) == MODE_CC)
3355 return emit_move_ccmode (mode, x, y);
3357 /* Try using a move pattern for the corresponding integer mode. This is
3358 only safe when simplify_subreg can convert MODE constants into integer
3359 constants. At present, it can only do this reliably if the value
3360 fits within a HOST_WIDE_INT. */
3361 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3363 rtx ret = emit_move_via_integer (mode, x, y, false);
3368 return emit_move_multi_word (mode, x, y);
3371 /* Generate code to copy Y into X.
3372 Both Y and X must have the same mode, except that
3373 Y can be a constant with VOIDmode.
3374 This mode cannot be BLKmode; use emit_block_move for that.
3376 Return the last instruction emitted. */
3379 emit_move_insn (rtx x, rtx y)
3381 enum machine_mode mode = GET_MODE (x);
3382 rtx y_cst = NULL_RTX;
3385 gcc_assert (mode != BLKmode
3386 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3391 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3392 && (last_insn = compress_float_constant (x, y)))
3397 if (!LEGITIMATE_CONSTANT_P (y))
3399 y = force_const_mem (mode, y);
3401 /* If the target's cannot_force_const_mem prevented the spill,
3402 assume that the target's move expanders will also take care
3403 of the non-legitimate constant. */
3407 y = use_anchored_address (y);
3411 /* If X or Y are memory references, verify that their addresses are valid
3414 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3416 && ! push_operand (x, GET_MODE (x))))
3417 x = validize_mem (x);
3420 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3421 MEM_ADDR_SPACE (y)))
3422 y = validize_mem (y);
3424 gcc_assert (mode != BLKmode);
3426 last_insn = emit_move_insn_1 (x, y);
3428 if (y_cst && REG_P (x)
3429 && (set = single_set (last_insn)) != NULL_RTX
3430 && SET_DEST (set) == x
3431 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3432 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3437 /* If Y is representable exactly in a narrower mode, and the target can
3438 perform the extension directly from constant or memory, then emit the
3439 move as an extension. */
3442 compress_float_constant (rtx x, rtx y)
3444 enum machine_mode dstmode = GET_MODE (x);
3445 enum machine_mode orig_srcmode = GET_MODE (y);
3446 enum machine_mode srcmode;
3448 int oldcost, newcost;
3449 bool speed = optimize_insn_for_speed_p ();
3451 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3453 if (LEGITIMATE_CONSTANT_P (y))
3454 oldcost = rtx_cost (y, SET, speed);
3456 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3458 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3459 srcmode != orig_srcmode;
3460 srcmode = GET_MODE_WIDER_MODE (srcmode))
3463 rtx trunc_y, last_insn;
3465 /* Skip if the target can't extend this way. */
3466 ic = can_extend_p (dstmode, srcmode, 0);
3467 if (ic == CODE_FOR_nothing)
3470 /* Skip if the narrowed value isn't exact. */
3471 if (! exact_real_truncate (srcmode, &r))
3474 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3476 if (LEGITIMATE_CONSTANT_P (trunc_y))
3478 /* Skip if the target needs extra instructions to perform
3480 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3482 /* This is valid, but may not be cheaper than the original. */
3483 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3484 if (oldcost < newcost)
3487 else if (float_extend_from_mem[dstmode][srcmode])
3489 trunc_y = force_const_mem (srcmode, trunc_y);
3490 /* This is valid, but may not be cheaper than the original. */
3491 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3492 if (oldcost < newcost)
3494 trunc_y = validize_mem (trunc_y);
3499 /* For CSE's benefit, force the compressed constant pool entry
3500 into a new pseudo. This constant may be used in different modes,
3501 and if not, combine will put things back together for us. */
3502 trunc_y = force_reg (srcmode, trunc_y);
3503 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3504 last_insn = get_last_insn ();
3507 set_unique_reg_note (last_insn, REG_EQUAL, y);
3515 /* Pushing data onto the stack. */
3517 /* Push a block of length SIZE (perhaps variable)
3518 and return an rtx to address the beginning of the block.
3519 The value may be virtual_outgoing_args_rtx.
3521 EXTRA is the number of bytes of padding to push in addition to SIZE.
3522 BELOW nonzero means this padding comes at low addresses;
3523 otherwise, the padding comes at high addresses. */
3526 push_block (rtx size, int extra, int below)
3530 size = convert_modes (Pmode, ptr_mode, size, 1);
3531 if (CONSTANT_P (size))
3532 anti_adjust_stack (plus_constant (size, extra));
3533 else if (REG_P (size) && extra == 0)
3534 anti_adjust_stack (size);
3537 temp = copy_to_mode_reg (Pmode, size);
3539 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3540 temp, 0, OPTAB_LIB_WIDEN);
3541 anti_adjust_stack (temp);
3544 #ifndef STACK_GROWS_DOWNWARD
3550 temp = virtual_outgoing_args_rtx;
3551 if (extra != 0 && below)
3552 temp = plus_constant (temp, extra);
3556 if (CONST_INT_P (size))
3557 temp = plus_constant (virtual_outgoing_args_rtx,
3558 -INTVAL (size) - (below ? 0 : extra));
3559 else if (extra != 0 && !below)
3560 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3561 negate_rtx (Pmode, plus_constant (size, extra)));
3563 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3564 negate_rtx (Pmode, size));
3567 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3570 #ifdef PUSH_ROUNDING
3572 /* Emit single push insn. */
3575 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3578 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3580 enum insn_code icode;
3581 insn_operand_predicate_fn pred;
3583 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3584 /* If there is push pattern, use it. Otherwise try old way of throwing
3585 MEM representing push operation to move expander. */
3586 icode = optab_handler (push_optab, mode);
3587 if (icode != CODE_FOR_nothing)
3589 if (((pred = insn_data[(int) icode].operand[0].predicate)
3590 && !((*pred) (x, mode))))
3591 x = force_reg (mode, x);
3592 emit_insn (GEN_FCN (icode) (x));
3595 if (GET_MODE_SIZE (mode) == rounded_size)
3596 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3597 /* If we are to pad downward, adjust the stack pointer first and
3598 then store X into the stack location using an offset. This is
3599 because emit_move_insn does not know how to pad; it does not have
3601 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3603 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3604 HOST_WIDE_INT offset;
3606 emit_move_insn (stack_pointer_rtx,
3607 expand_binop (Pmode,
3608 #ifdef STACK_GROWS_DOWNWARD
3614 GEN_INT (rounded_size),
3615 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3617 offset = (HOST_WIDE_INT) padding_size;
3618 #ifdef STACK_GROWS_DOWNWARD
3619 if (STACK_PUSH_CODE == POST_DEC)
3620 /* We have already decremented the stack pointer, so get the
3622 offset += (HOST_WIDE_INT) rounded_size;
3624 if (STACK_PUSH_CODE == POST_INC)
3625 /* We have already incremented the stack pointer, so get the
3627 offset -= (HOST_WIDE_INT) rounded_size;
3629 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3633 #ifdef STACK_GROWS_DOWNWARD
3634 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3635 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3636 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3638 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3639 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3640 GEN_INT (rounded_size));
3642 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3645 dest = gen_rtx_MEM (mode, dest_addr);
3649 set_mem_attributes (dest, type, 1);
3651 if (flag_optimize_sibling_calls)
3652 /* Function incoming arguments may overlap with sibling call
3653 outgoing arguments and we cannot allow reordering of reads
3654 from function arguments with stores to outgoing arguments
3655 of sibling calls. */
3656 set_mem_alias_set (dest, 0);
3658 emit_move_insn (dest, x);
3662 /* Generate code to push X onto the stack, assuming it has mode MODE and
3664 MODE is redundant except when X is a CONST_INT (since they don't
3666 SIZE is an rtx for the size of data to be copied (in bytes),
3667 needed only if X is BLKmode.
3669 ALIGN (in bits) is maximum alignment we can assume.
3671 If PARTIAL and REG are both nonzero, then copy that many of the first
3672 bytes of X into registers starting with REG, and push the rest of X.
3673 The amount of space pushed is decreased by PARTIAL bytes.
3674 REG must be a hard register in this case.
3675 If REG is zero but PARTIAL is not, take any all others actions for an
3676 argument partially in registers, but do not actually load any
3679 EXTRA is the amount in bytes of extra space to leave next to this arg.
3680 This is ignored if an argument block has already been allocated.
3682 On a machine that lacks real push insns, ARGS_ADDR is the address of
3683 the bottom of the argument block for this call. We use indexing off there
3684 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3685 argument block has not been preallocated.
3687 ARGS_SO_FAR is the size of args previously pushed for this call.
3689 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3690 for arguments passed in registers. If nonzero, it will be the number
3691 of bytes required. */
3694 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3695 unsigned int align, int partial, rtx reg, int extra,
3696 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3700 enum direction stack_direction
3701 #ifdef STACK_GROWS_DOWNWARD
3707 /* Decide where to pad the argument: `downward' for below,
3708 `upward' for above, or `none' for don't pad it.
3709 Default is below for small data on big-endian machines; else above. */
3710 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3712 /* Invert direction if stack is post-decrement.
3714 if (STACK_PUSH_CODE == POST_DEC)
3715 if (where_pad != none)
3716 where_pad = (where_pad == downward ? upward : downward);
3721 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3723 /* Copy a block into the stack, entirely or partially. */
3730 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3731 used = partial - offset;
3733 if (mode != BLKmode)
3735 /* A value is to be stored in an insufficiently aligned
3736 stack slot; copy via a suitably aligned slot if
3738 size = GEN_INT (GET_MODE_SIZE (mode));
3739 if (!MEM_P (xinner))
3741 temp = assign_temp (type, 0, 1, 1);
3742 emit_move_insn (temp, xinner);
3749 /* USED is now the # of bytes we need not copy to the stack
3750 because registers will take care of them. */
3753 xinner = adjust_address (xinner, BLKmode, used);
3755 /* If the partial register-part of the arg counts in its stack size,
3756 skip the part of stack space corresponding to the registers.
3757 Otherwise, start copying to the beginning of the stack space,
3758 by setting SKIP to 0. */
3759 skip = (reg_parm_stack_space == 0) ? 0 : used;
3761 #ifdef PUSH_ROUNDING
3762 /* Do it with several push insns if that doesn't take lots of insns
3763 and if there is no difficulty with push insns that skip bytes
3764 on the stack for alignment purposes. */
3767 && CONST_INT_P (size)
3769 && MEM_ALIGN (xinner) >= align
3770 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3771 /* Here we avoid the case of a structure whose weak alignment
3772 forces many pushes of a small amount of data,
3773 and such small pushes do rounding that causes trouble. */
3774 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3775 || align >= BIGGEST_ALIGNMENT
3776 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3777 == (align / BITS_PER_UNIT)))
3778 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3780 /* Push padding now if padding above and stack grows down,
3781 or if padding below and stack grows up.
3782 But if space already allocated, this has already been done. */
3783 if (extra && args_addr == 0
3784 && where_pad != none && where_pad != stack_direction)
3785 anti_adjust_stack (GEN_INT (extra));
3787 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3790 #endif /* PUSH_ROUNDING */
3794 /* Otherwise make space on the stack and copy the data
3795 to the address of that space. */
3797 /* Deduct words put into registers from the size we must copy. */
3800 if (CONST_INT_P (size))
3801 size = GEN_INT (INTVAL (size) - used);
3803 size = expand_binop (GET_MODE (size), sub_optab, size,
3804 GEN_INT (used), NULL_RTX, 0,
3808 /* Get the address of the stack space.
3809 In this case, we do not deal with EXTRA separately.
3810 A single stack adjust will do. */
3813 temp = push_block (size, extra, where_pad == downward);
3816 else if (CONST_INT_P (args_so_far))
3817 temp = memory_address (BLKmode,
3818 plus_constant (args_addr,
3819 skip + INTVAL (args_so_far)));
3821 temp = memory_address (BLKmode,
3822 plus_constant (gen_rtx_PLUS (Pmode,
3827 if (!ACCUMULATE_OUTGOING_ARGS)
3829 /* If the source is referenced relative to the stack pointer,
3830 copy it to another register to stabilize it. We do not need
3831 to do this if we know that we won't be changing sp. */
3833 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3834 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3835 temp = copy_to_reg (temp);
3838 target = gen_rtx_MEM (BLKmode, temp);
3840 /* We do *not* set_mem_attributes here, because incoming arguments
3841 may overlap with sibling call outgoing arguments and we cannot
3842 allow reordering of reads from function arguments with stores
3843 to outgoing arguments of sibling calls. We do, however, want
3844 to record the alignment of the stack slot. */
3845 /* ALIGN may well be better aligned than TYPE, e.g. due to
3846 PARM_BOUNDARY. Assume the caller isn't lying. */
3847 set_mem_align (target, align);
3849 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3852 else if (partial > 0)
3854 /* Scalar partly in registers. */
3856 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3859 /* # bytes of start of argument
3860 that we must make space for but need not store. */
3861 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3862 int args_offset = INTVAL (args_so_far);
3865 /* Push padding now if padding above and stack grows down,
3866 or if padding below and stack grows up.
3867 But if space already allocated, this has already been done. */
3868 if (extra && args_addr == 0
3869 && where_pad != none && where_pad != stack_direction)
3870 anti_adjust_stack (GEN_INT (extra));
3872 /* If we make space by pushing it, we might as well push
3873 the real data. Otherwise, we can leave OFFSET nonzero
3874 and leave the space uninitialized. */
3878 /* Now NOT_STACK gets the number of words that we don't need to
3879 allocate on the stack. Convert OFFSET to words too. */
3880 not_stack = (partial - offset) / UNITS_PER_WORD;
3881 offset /= UNITS_PER_WORD;
3883 /* If the partial register-part of the arg counts in its stack size,
3884 skip the part of stack space corresponding to the registers.
3885 Otherwise, start copying to the beginning of the stack space,
3886 by setting SKIP to 0. */
3887 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3889 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3890 x = validize_mem (force_const_mem (mode, x));
3892 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3893 SUBREGs of such registers are not allowed. */
3894 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3895 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3896 x = copy_to_reg (x);
3898 /* Loop over all the words allocated on the stack for this arg. */
3899 /* We can do it by words, because any scalar bigger than a word
3900 has a size a multiple of a word. */
3901 #ifndef PUSH_ARGS_REVERSED
3902 for (i = not_stack; i < size; i++)
3904 for (i = size - 1; i >= not_stack; i--)
3906 if (i >= not_stack + offset)
3907 emit_push_insn (operand_subword_force (x, i, mode),
3908 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3910 GEN_INT (args_offset + ((i - not_stack + skip)
3912 reg_parm_stack_space, alignment_pad);
3919 /* Push padding now if padding above and stack grows down,
3920 or if padding below and stack grows up.
3921 But if space already allocated, this has already been done. */
3922 if (extra && args_addr == 0
3923 && where_pad != none && where_pad != stack_direction)
3924 anti_adjust_stack (GEN_INT (extra));
3926 #ifdef PUSH_ROUNDING
3927 if (args_addr == 0 && PUSH_ARGS)
3928 emit_single_push_insn (mode, x, type);
3932 if (CONST_INT_P (args_so_far))
3934 = memory_address (mode,
3935 plus_constant (args_addr,
3936 INTVAL (args_so_far)));
3938 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3940 dest = gen_rtx_MEM (mode, addr);
3942 /* We do *not* set_mem_attributes here, because incoming arguments
3943 may overlap with sibling call outgoing arguments and we cannot
3944 allow reordering of reads from function arguments with stores
3945 to outgoing arguments of sibling calls. We do, however, want
3946 to record the alignment of the stack slot. */
3947 /* ALIGN may well be better aligned than TYPE, e.g. due to
3948 PARM_BOUNDARY. Assume the caller isn't lying. */
3949 set_mem_align (dest, align);
3951 emit_move_insn (dest, x);
3955 /* If part should go in registers, copy that part
3956 into the appropriate registers. Do this now, at the end,
3957 since mem-to-mem copies above may do function calls. */
3958 if (partial > 0 && reg != 0)
3960 /* Handle calls that pass values in multiple non-contiguous locations.
3961 The Irix 6 ABI has examples of this. */
3962 if (GET_CODE (reg) == PARALLEL)
3963 emit_group_load (reg, x, type, -1);
3966 gcc_assert (partial % UNITS_PER_WORD == 0);
3967 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3971 if (extra && args_addr == 0 && where_pad == stack_direction)
3972 anti_adjust_stack (GEN_INT (extra));
3974 if (alignment_pad && args_addr == 0)
3975 anti_adjust_stack (alignment_pad);
3978 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3982 get_subtarget (rtx x)
3986 /* Only registers can be subtargets. */
3988 /* Don't use hard regs to avoid extending their life. */
3989 || REGNO (x) < FIRST_PSEUDO_REGISTER
3993 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3994 FIELD is a bitfield. Returns true if the optimization was successful,
3995 and there's nothing else to do. */
3998 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3999 unsigned HOST_WIDE_INT bitpos,
4000 enum machine_mode mode1, rtx str_rtx,
4003 enum machine_mode str_mode = GET_MODE (str_rtx);
4004 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4009 if (mode1 != VOIDmode
4010 || bitsize >= BITS_PER_WORD
4011 || str_bitsize > BITS_PER_WORD
4012 || TREE_SIDE_EFFECTS (to)
4013 || TREE_THIS_VOLATILE (to))
4017 if (!BINARY_CLASS_P (src)
4018 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4021 op0 = TREE_OPERAND (src, 0);
4022 op1 = TREE_OPERAND (src, 1);
4025 if (!operand_equal_p (to, op0, 0))
4028 if (MEM_P (str_rtx))
4030 unsigned HOST_WIDE_INT offset1;
4032 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4033 str_mode = word_mode;
4034 str_mode = get_best_mode (bitsize, bitpos,
4035 MEM_ALIGN (str_rtx), str_mode, 0);
4036 if (str_mode == VOIDmode)
4038 str_bitsize = GET_MODE_BITSIZE (str_mode);
4041 bitpos %= str_bitsize;
4042 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4043 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4045 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4048 /* If the bit field covers the whole REG/MEM, store_field
4049 will likely generate better code. */
4050 if (bitsize >= str_bitsize)
4053 /* We can't handle fields split across multiple entities. */
4054 if (bitpos + bitsize > str_bitsize)
4057 if (BYTES_BIG_ENDIAN)
4058 bitpos = str_bitsize - bitpos - bitsize;
4060 switch (TREE_CODE (src))
4064 /* For now, just optimize the case of the topmost bitfield
4065 where we don't need to do any masking and also
4066 1 bit bitfields where xor can be used.
4067 We might win by one instruction for the other bitfields
4068 too if insv/extv instructions aren't used, so that
4069 can be added later. */
4070 if (bitpos + bitsize != str_bitsize
4071 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4074 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4075 value = convert_modes (str_mode,
4076 TYPE_MODE (TREE_TYPE (op1)), value,
4077 TYPE_UNSIGNED (TREE_TYPE (op1)));
4079 /* We may be accessing data outside the field, which means
4080 we can alias adjacent data. */
4081 if (MEM_P (str_rtx))
4083 str_rtx = shallow_copy_rtx (str_rtx);
4084 set_mem_alias_set (str_rtx, 0);
4085 set_mem_expr (str_rtx, 0);
4088 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4089 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4091 value = expand_and (str_mode, value, const1_rtx, NULL);
4094 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4095 build_int_cst (NULL_TREE, bitpos),
4097 result = expand_binop (str_mode, binop, str_rtx,
4098 value, str_rtx, 1, OPTAB_WIDEN);
4099 if (result != str_rtx)
4100 emit_move_insn (str_rtx, result);
4105 if (TREE_CODE (op1) != INTEGER_CST)
4107 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4108 value = convert_modes (GET_MODE (str_rtx),
4109 TYPE_MODE (TREE_TYPE (op1)), value,
4110 TYPE_UNSIGNED (TREE_TYPE (op1)));
4112 /* We may be accessing data outside the field, which means
4113 we can alias adjacent data. */
4114 if (MEM_P (str_rtx))
4116 str_rtx = shallow_copy_rtx (str_rtx);
4117 set_mem_alias_set (str_rtx, 0);
4118 set_mem_expr (str_rtx, 0);
4121 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4122 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4124 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4126 value = expand_and (GET_MODE (str_rtx), value, mask,
4129 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4130 build_int_cst (NULL_TREE, bitpos),
4132 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4133 value, str_rtx, 1, OPTAB_WIDEN);
4134 if (result != str_rtx)
4135 emit_move_insn (str_rtx, result);
4146 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4147 is true, try generating a nontemporal store. */
4150 expand_assignment (tree to, tree from, bool nontemporal)
4155 /* Don't crash if the lhs of the assignment was erroneous. */
4156 if (TREE_CODE (to) == ERROR_MARK)
4158 result = expand_normal (from);
4162 /* Optimize away no-op moves without side-effects. */
4163 if (operand_equal_p (to, from, 0))
4166 /* Assignment of a structure component needs special treatment
4167 if the structure component's rtx is not simply a MEM.
4168 Assignment of an array element at a constant index, and assignment of
4169 an array element in an unaligned packed structure field, has the same
4171 if (handled_component_p (to)
4172 /* ??? We only need to handle MEM_REF here if the access is not
4173 a full access of the base object. */
4174 || (TREE_CODE (to) == MEM_REF
4175 && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
4176 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4178 enum machine_mode mode1;
4179 HOST_WIDE_INT bitsize, bitpos;
4186 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4187 &unsignedp, &volatilep, true);
4189 /* If we are going to use store_bit_field and extract_bit_field,
4190 make sure to_rtx will be safe for multiple use. */
4192 to_rtx = expand_normal (tem);
4194 /* If the bitfield is volatile, we want to access it in the
4195 field's mode, not the computed mode. */
4197 && GET_CODE (to_rtx) == MEM
4198 && flag_strict_volatile_bitfields > 0)
4199 to_rtx = adjust_address (to_rtx, mode1, 0);
4203 enum machine_mode address_mode;
4206 if (!MEM_P (to_rtx))
4208 /* We can get constant negative offsets into arrays with broken
4209 user code. Translate this to a trap instead of ICEing. */
4210 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4211 expand_builtin_trap ();
4212 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4215 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4217 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4218 if (GET_MODE (offset_rtx) != address_mode)
4219 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4221 /* A constant address in TO_RTX can have VOIDmode, we must not try
4222 to call force_reg for that case. Avoid that case. */
4224 && GET_MODE (to_rtx) == BLKmode
4225 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4227 && (bitpos % bitsize) == 0
4228 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4229 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4231 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4235 to_rtx = offset_address (to_rtx, offset_rtx,
4236 highest_pow2_factor_for_target (to,
4240 /* No action is needed if the target is not a memory and the field
4241 lies completely outside that target. This can occur if the source
4242 code contains an out-of-bounds access to a small array. */
4244 && GET_MODE (to_rtx) != BLKmode
4245 && (unsigned HOST_WIDE_INT) bitpos
4246 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4248 expand_normal (from);
4251 /* Handle expand_expr of a complex value returning a CONCAT. */
4252 else if (GET_CODE (to_rtx) == CONCAT)
4254 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4256 gcc_assert (bitpos == 0);
4257 result = store_expr (from, to_rtx, false, nontemporal);
4261 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4262 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4270 /* If the field is at offset zero, we could have been given the
4271 DECL_RTX of the parent struct. Don't munge it. */
4272 to_rtx = shallow_copy_rtx (to_rtx);
4274 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4276 /* Deal with volatile and readonly fields. The former is only
4277 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4279 MEM_VOLATILE_P (to_rtx) = 1;
4280 if (component_uses_parent_alias_set (to))
4281 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4284 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4288 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4289 TREE_TYPE (tem), get_alias_set (to),
4294 preserve_temp_slots (result);
4300 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4302 addr_space_t as = ADDR_SPACE_GENERIC;
4303 enum machine_mode mode, op_mode1;
4304 enum insn_code icode;
4305 rtx reg, addr, mem, insn;
4307 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4308 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4310 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4311 reg = force_not_mem (reg);
4313 mode = TYPE_MODE (TREE_TYPE (to));
4314 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4316 addr = memory_address_addr_space (mode, addr, as);
4317 mem = gen_rtx_MEM (mode, addr);
4319 set_mem_attributes (mem, to, 0);
4320 set_mem_addr_space (mem, as);
4322 icode = optab_handler (movmisalign_optab, mode);
4323 gcc_assert (icode != CODE_FOR_nothing);
4325 op_mode1 = insn_data[icode].operand[1].mode;
4326 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4327 && op_mode1 != VOIDmode)
4328 reg = copy_to_mode_reg (op_mode1, reg);
4330 insn = GEN_FCN (icode) (mem, reg);
4335 /* If the rhs is a function call and its value is not an aggregate,
4336 call the function before we start to compute the lhs.
4337 This is needed for correct code for cases such as
4338 val = setjmp (buf) on machines where reference to val
4339 requires loading up part of an address in a separate insn.
4341 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4342 since it might be a promoted variable where the zero- or sign- extension
4343 needs to be done. Handling this in the normal way is safe because no
4344 computation is done before the call. The same is true for SSA names. */
4345 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4346 && COMPLETE_TYPE_P (TREE_TYPE (from))
4347 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4348 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4349 && REG_P (DECL_RTL (to)))
4350 || TREE_CODE (to) == SSA_NAME))
4355 value = expand_normal (from);
4357 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4359 /* Handle calls that return values in multiple non-contiguous locations.
4360 The Irix 6 ABI has examples of this. */
4361 if (GET_CODE (to_rtx) == PARALLEL)
4362 emit_group_load (to_rtx, value, TREE_TYPE (from),
4363 int_size_in_bytes (TREE_TYPE (from)));
4364 else if (GET_MODE (to_rtx) == BLKmode)
4365 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4368 if (POINTER_TYPE_P (TREE_TYPE (to)))
4369 value = convert_memory_address_addr_space
4370 (GET_MODE (to_rtx), value,
4371 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4373 emit_move_insn (to_rtx, value);
4375 preserve_temp_slots (to_rtx);
4381 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4382 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4385 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4387 /* Don't move directly into a return register. */
4388 if (TREE_CODE (to) == RESULT_DECL
4389 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4394 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4396 if (GET_CODE (to_rtx) == PARALLEL)
4397 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4398 int_size_in_bytes (TREE_TYPE (from)));
4400 emit_move_insn (to_rtx, temp);
4402 preserve_temp_slots (to_rtx);
4408 /* In case we are returning the contents of an object which overlaps
4409 the place the value is being stored, use a safe function when copying
4410 a value through a pointer into a structure value return block. */
4411 if (TREE_CODE (to) == RESULT_DECL
4412 && TREE_CODE (from) == INDIRECT_REF
4413 && ADDR_SPACE_GENERIC_P
4414 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4415 && refs_may_alias_p (to, from)
4416 && cfun->returns_struct
4417 && !cfun->returns_pcc_struct)
4422 size = expr_size (from);
4423 from_rtx = expand_normal (from);
4425 emit_library_call (memmove_libfunc, LCT_NORMAL,
4426 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4427 XEXP (from_rtx, 0), Pmode,
4428 convert_to_mode (TYPE_MODE (sizetype),
4429 size, TYPE_UNSIGNED (sizetype)),
4430 TYPE_MODE (sizetype));
4432 preserve_temp_slots (to_rtx);
4438 /* Compute FROM and store the value in the rtx we got. */
4441 result = store_expr (from, to_rtx, 0, nontemporal);
4442 preserve_temp_slots (result);
4448 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4449 succeeded, false otherwise. */
4452 emit_storent_insn (rtx to, rtx from)
4454 enum machine_mode mode = GET_MODE (to), imode;
4455 enum insn_code code = optab_handler (storent_optab, mode);
4458 if (code == CODE_FOR_nothing)
4461 imode = insn_data[code].operand[0].mode;
4462 if (!insn_data[code].operand[0].predicate (to, imode))
4465 imode = insn_data[code].operand[1].mode;
4466 if (!insn_data[code].operand[1].predicate (from, imode))
4468 from = copy_to_mode_reg (imode, from);
4469 if (!insn_data[code].operand[1].predicate (from, imode))
4473 pattern = GEN_FCN (code) (to, from);
4474 if (pattern == NULL_RTX)
4477 emit_insn (pattern);
4481 /* Generate code for computing expression EXP,
4482 and storing the value into TARGET.
4484 If the mode is BLKmode then we may return TARGET itself.
4485 It turns out that in BLKmode it doesn't cause a problem.
4486 because C has no operators that could combine two different
4487 assignments into the same BLKmode object with different values
4488 with no sequence point. Will other languages need this to
4491 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4492 stack, and block moves may need to be treated specially.
4494 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4497 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4500 rtx alt_rtl = NULL_RTX;
4501 location_t loc = EXPR_LOCATION (exp);
4503 if (VOID_TYPE_P (TREE_TYPE (exp)))
4505 /* C++ can generate ?: expressions with a throw expression in one
4506 branch and an rvalue in the other. Here, we resolve attempts to
4507 store the throw expression's nonexistent result. */
4508 gcc_assert (!call_param_p);
4509 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4512 if (TREE_CODE (exp) == COMPOUND_EXPR)
4514 /* Perform first part of compound expression, then assign from second
4516 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4517 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4518 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4521 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4523 /* For conditional expression, get safe form of the target. Then
4524 test the condition, doing the appropriate assignment on either
4525 side. This avoids the creation of unnecessary temporaries.
4526 For non-BLKmode, it is more efficient not to do this. */
4528 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4530 do_pending_stack_adjust ();
4532 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4533 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4535 emit_jump_insn (gen_jump (lab2));
4538 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4545 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4546 /* If this is a scalar in a register that is stored in a wider mode
4547 than the declared mode, compute the result into its declared mode
4548 and then convert to the wider mode. Our value is the computed
4551 rtx inner_target = 0;
4553 /* We can do the conversion inside EXP, which will often result
4554 in some optimizations. Do the conversion in two steps: first
4555 change the signedness, if needed, then the extend. But don't
4556 do this if the type of EXP is a subtype of something else
4557 since then the conversion might involve more than just
4558 converting modes. */
4559 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4560 && TREE_TYPE (TREE_TYPE (exp)) == 0
4561 && GET_MODE_PRECISION (GET_MODE (target))
4562 == TYPE_PRECISION (TREE_TYPE (exp)))
4564 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4565 != SUBREG_PROMOTED_UNSIGNED_P (target))
4567 /* Some types, e.g. Fortran's logical*4, won't have a signed
4568 version, so use the mode instead. */
4570 = (signed_or_unsigned_type_for
4571 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4573 ntype = lang_hooks.types.type_for_mode
4574 (TYPE_MODE (TREE_TYPE (exp)),
4575 SUBREG_PROMOTED_UNSIGNED_P (target));
4577 exp = fold_convert_loc (loc, ntype, exp);
4580 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4581 (GET_MODE (SUBREG_REG (target)),
4582 SUBREG_PROMOTED_UNSIGNED_P (target)),
4585 inner_target = SUBREG_REG (target);
4588 temp = expand_expr (exp, inner_target, VOIDmode,
4589 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4591 /* If TEMP is a VOIDmode constant, use convert_modes to make
4592 sure that we properly convert it. */
4593 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4595 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4596 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4597 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4598 GET_MODE (target), temp,
4599 SUBREG_PROMOTED_UNSIGNED_P (target));
4602 convert_move (SUBREG_REG (target), temp,
4603 SUBREG_PROMOTED_UNSIGNED_P (target));
4607 else if (TREE_CODE (exp) == STRING_CST
4608 && !nontemporal && !call_param_p
4609 && TREE_STRING_LENGTH (exp) > 0
4610 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4612 /* Optimize initialization of an array with a STRING_CST. */
4613 HOST_WIDE_INT exp_len, str_copy_len;
4616 exp_len = int_expr_size (exp);
4620 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4621 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4624 str_copy_len = TREE_STRING_LENGTH (exp);
4625 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4627 str_copy_len += STORE_MAX_PIECES - 1;
4628 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4630 str_copy_len = MIN (str_copy_len, exp_len);
4631 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4632 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4633 MEM_ALIGN (target), false))
4638 dest_mem = store_by_pieces (dest_mem,
4639 str_copy_len, builtin_strncpy_read_str,
4640 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4641 MEM_ALIGN (target), false,
4642 exp_len > str_copy_len ? 1 : 0);
4643 if (exp_len > str_copy_len)
4644 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4645 GEN_INT (exp_len - str_copy_len),
4649 else if (TREE_CODE (exp) == MEM_REF
4650 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4651 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST
4652 && integer_zerop (TREE_OPERAND (exp, 1))
4653 && !nontemporal && !call_param_p
4654 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4656 /* Optimize initialization of an array with a STRING_CST. */
4657 HOST_WIDE_INT exp_len, str_copy_len;
4659 tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4661 exp_len = int_expr_size (exp);
4665 str_copy_len = strlen (TREE_STRING_POINTER (str));
4666 if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
4669 str_copy_len = TREE_STRING_LENGTH (str);
4670 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4672 str_copy_len += STORE_MAX_PIECES - 1;
4673 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4675 str_copy_len = MIN (str_copy_len, exp_len);
4676 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4677 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4678 MEM_ALIGN (target), false))
4683 dest_mem = store_by_pieces (dest_mem,
4684 str_copy_len, builtin_strncpy_read_str,
4685 CONST_CAST(char *, TREE_STRING_POINTER (str)),
4686 MEM_ALIGN (target), false,
4687 exp_len > str_copy_len ? 1 : 0);
4688 if (exp_len > str_copy_len)
4689 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4690 GEN_INT (exp_len - str_copy_len),
4699 /* If we want to use a nontemporal store, force the value to
4701 tmp_target = nontemporal ? NULL_RTX : target;
4702 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4704 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4708 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4709 the same as that of TARGET, adjust the constant. This is needed, for
4710 example, in case it is a CONST_DOUBLE and we want only a word-sized
4712 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4713 && TREE_CODE (exp) != ERROR_MARK
4714 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4715 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4716 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4718 /* If value was not generated in the target, store it there.
4719 Convert the value to TARGET's type first if necessary and emit the
4720 pending incrementations that have been queued when expanding EXP.
4721 Note that we cannot emit the whole queue blindly because this will
4722 effectively disable the POST_INC optimization later.
4724 If TEMP and TARGET compare equal according to rtx_equal_p, but
4725 one or both of them are volatile memory refs, we have to distinguish
4727 - expand_expr has used TARGET. In this case, we must not generate
4728 another copy. This can be detected by TARGET being equal according
4730 - expand_expr has not used TARGET - that means that the source just
4731 happens to have the same RTX form. Since temp will have been created
4732 by expand_expr, it will compare unequal according to == .
4733 We must generate a copy in this case, to reach the correct number
4734 of volatile memory references. */
4736 if ((! rtx_equal_p (temp, target)
4737 || (temp != target && (side_effects_p (temp)
4738 || side_effects_p (target))))
4739 && TREE_CODE (exp) != ERROR_MARK
4740 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4741 but TARGET is not valid memory reference, TEMP will differ
4742 from TARGET although it is really the same location. */
4743 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4744 /* If there's nothing to copy, don't bother. Don't call
4745 expr_size unless necessary, because some front-ends (C++)
4746 expr_size-hook must not be given objects that are not
4747 supposed to be bit-copied or bit-initialized. */
4748 && expr_size (exp) != const0_rtx)
4750 if (GET_MODE (temp) != GET_MODE (target)
4751 && GET_MODE (temp) != VOIDmode)
4753 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4754 if (GET_MODE (target) == BLKmode
4755 && GET_MODE (temp) == BLKmode)
4756 emit_block_move (target, temp, expr_size (exp),
4758 ? BLOCK_OP_CALL_PARM
4759 : BLOCK_OP_NORMAL));
4760 else if (GET_MODE (target) == BLKmode)
4761 store_bit_field (target, INTVAL (expr_size (exp)) * BITS_PER_UNIT,
4762 0, GET_MODE (temp), temp);
4764 convert_move (target, temp, unsignedp);
4767 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4769 /* Handle copying a string constant into an array. The string
4770 constant may be shorter than the array. So copy just the string's
4771 actual length, and clear the rest. First get the size of the data
4772 type of the string, which is actually the size of the target. */
4773 rtx size = expr_size (exp);
4775 if (CONST_INT_P (size)
4776 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4777 emit_block_move (target, temp, size,
4779 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4782 enum machine_mode pointer_mode
4783 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4784 enum machine_mode address_mode
4785 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4787 /* Compute the size of the data to copy from the string. */
4789 = size_binop_loc (loc, MIN_EXPR,
4790 make_tree (sizetype, size),
4791 size_int (TREE_STRING_LENGTH (exp)));
4793 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4795 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4798 /* Copy that much. */
4799 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4800 TYPE_UNSIGNED (sizetype));
4801 emit_block_move (target, temp, copy_size_rtx,
4803 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4805 /* Figure out how much is left in TARGET that we have to clear.
4806 Do all calculations in pointer_mode. */
4807 if (CONST_INT_P (copy_size_rtx))
4809 size = plus_constant (size, -INTVAL (copy_size_rtx));
4810 target = adjust_address (target, BLKmode,
4811 INTVAL (copy_size_rtx));
4815 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4816 copy_size_rtx, NULL_RTX, 0,
4819 if (GET_MODE (copy_size_rtx) != address_mode)
4820 copy_size_rtx = convert_to_mode (address_mode,
4822 TYPE_UNSIGNED (sizetype));
4824 target = offset_address (target, copy_size_rtx,
4825 highest_pow2_factor (copy_size));
4826 label = gen_label_rtx ();
4827 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4828 GET_MODE (size), 0, label);
4831 if (size != const0_rtx)
4832 clear_storage (target, size, BLOCK_OP_NORMAL);
4838 /* Handle calls that return values in multiple non-contiguous locations.
4839 The Irix 6 ABI has examples of this. */
4840 else if (GET_CODE (target) == PARALLEL)
4841 emit_group_load (target, temp, TREE_TYPE (exp),
4842 int_size_in_bytes (TREE_TYPE (exp)));
4843 else if (GET_MODE (temp) == BLKmode)
4844 emit_block_move (target, temp, expr_size (exp),
4846 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4847 else if (nontemporal
4848 && emit_storent_insn (target, temp))
4849 /* If we managed to emit a nontemporal store, there is nothing else to
4854 temp = force_operand (temp, target);
4856 emit_move_insn (target, temp);
4863 /* Helper for categorize_ctor_elements. Identical interface. */
4866 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4867 HOST_WIDE_INT *p_elt_count,
4870 unsigned HOST_WIDE_INT idx;
4871 HOST_WIDE_INT nz_elts, elt_count;
4872 tree value, purpose;
4874 /* Whether CTOR is a valid constant initializer, in accordance with what
4875 initializer_constant_valid_p does. If inferred from the constructor
4876 elements, true until proven otherwise. */
4877 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4878 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4883 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4885 HOST_WIDE_INT mult = 1;
4887 if (TREE_CODE (purpose) == RANGE_EXPR)
4889 tree lo_index = TREE_OPERAND (purpose, 0);
4890 tree hi_index = TREE_OPERAND (purpose, 1);
4892 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4893 mult = (tree_low_cst (hi_index, 1)
4894 - tree_low_cst (lo_index, 1) + 1);
4897 switch (TREE_CODE (value))
4901 HOST_WIDE_INT nz = 0, ic = 0;
4904 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4906 nz_elts += mult * nz;
4907 elt_count += mult * ic;
4909 if (const_from_elts_p && const_p)
4910 const_p = const_elt_p;
4917 if (!initializer_zerop (value))
4923 nz_elts += mult * TREE_STRING_LENGTH (value);
4924 elt_count += mult * TREE_STRING_LENGTH (value);
4928 if (!initializer_zerop (TREE_REALPART (value)))
4930 if (!initializer_zerop (TREE_IMAGPART (value)))
4938 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4940 if (!initializer_zerop (TREE_VALUE (v)))
4949 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4952 nz_elts += mult * tc;
4953 elt_count += mult * tc;
4955 if (const_from_elts_p && const_p)
4956 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4964 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4965 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4968 bool clear_this = true;
4970 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4972 /* We don't expect more than one element of the union to be
4973 initialized. Not sure what we should do otherwise... */
4974 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4977 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4978 CONSTRUCTOR_ELTS (ctor),
4981 /* ??? We could look at each element of the union, and find the
4982 largest element. Which would avoid comparing the size of the
4983 initialized element against any tail padding in the union.
4984 Doesn't seem worth the effort... */
4985 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4986 TYPE_SIZE (init_sub_type)) == 1)
4988 /* And now we have to find out if the element itself is fully
4989 constructed. E.g. for union { struct { int a, b; } s; } u
4990 = { .s = { .a = 1 } }. */
4991 if (elt_count == count_type_elements (init_sub_type, false))
4996 *p_must_clear = clear_this;
4999 *p_nz_elts += nz_elts;
5000 *p_elt_count += elt_count;
5005 /* Examine CTOR to discover:
5006 * how many scalar fields are set to nonzero values,
5007 and place it in *P_NZ_ELTS;
5008 * how many scalar fields in total are in CTOR,
5009 and place it in *P_ELT_COUNT.
5010 * if a type is a union, and the initializer from the constructor
5011 is not the largest element in the union, then set *p_must_clear.
5013 Return whether or not CTOR is a valid static constant initializer, the same
5014 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5017 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5018 HOST_WIDE_INT *p_elt_count,
5023 *p_must_clear = false;
5026 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5029 /* Count the number of scalars in TYPE. Return -1 on overflow or
5030 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5031 array member at the end of the structure. */
5034 count_type_elements (const_tree type, bool allow_flexarr)
5036 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5037 switch (TREE_CODE (type))
5041 tree telts = array_type_nelts (type);
5042 if (telts && host_integerp (telts, 1))
5044 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5045 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5048 else if (max / n > m)
5056 HOST_WIDE_INT n = 0, t;
5059 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5060 if (TREE_CODE (f) == FIELD_DECL)
5062 t = count_type_elements (TREE_TYPE (f), false);
5065 /* Check for structures with flexible array member. */
5066 tree tf = TREE_TYPE (f);
5068 && DECL_CHAIN (f) == NULL
5069 && TREE_CODE (tf) == ARRAY_TYPE
5071 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5072 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5073 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5074 && int_size_in_bytes (type) >= 0)
5086 case QUAL_UNION_TYPE:
5093 return TYPE_VECTOR_SUBPARTS (type);
5097 case FIXED_POINT_TYPE:
5102 case REFERENCE_TYPE:
5117 /* Return 1 if EXP contains mostly (3/4) zeros. */
5120 mostly_zeros_p (const_tree exp)
5122 if (TREE_CODE (exp) == CONSTRUCTOR)
5125 HOST_WIDE_INT nz_elts, count, elts;
5128 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5132 elts = count_type_elements (TREE_TYPE (exp), false);
5134 return nz_elts < elts / 4;
5137 return initializer_zerop (exp);
5140 /* Return 1 if EXP contains all zeros. */
5143 all_zeros_p (const_tree exp)
5145 if (TREE_CODE (exp) == CONSTRUCTOR)
5148 HOST_WIDE_INT nz_elts, count;
5151 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5152 return nz_elts == 0;
5155 return initializer_zerop (exp);
5158 /* Helper function for store_constructor.
5159 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5160 TYPE is the type of the CONSTRUCTOR, not the element type.
5161 CLEARED is as for store_constructor.
5162 ALIAS_SET is the alias set to use for any stores.
5164 This provides a recursive shortcut back to store_constructor when it isn't
5165 necessary to go through store_field. This is so that we can pass through
5166 the cleared field to let store_constructor know that we may not have to
5167 clear a substructure if the outer structure has already been cleared. */
5170 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5171 HOST_WIDE_INT bitpos, enum machine_mode mode,
5172 tree exp, tree type, int cleared,
5173 alias_set_type alias_set)
5175 if (TREE_CODE (exp) == CONSTRUCTOR
5176 /* We can only call store_constructor recursively if the size and
5177 bit position are on a byte boundary. */
5178 && bitpos % BITS_PER_UNIT == 0
5179 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5180 /* If we have a nonzero bitpos for a register target, then we just
5181 let store_field do the bitfield handling. This is unlikely to
5182 generate unnecessary clear instructions anyways. */
5183 && (bitpos == 0 || MEM_P (target)))
5187 = adjust_address (target,
5188 GET_MODE (target) == BLKmode
5190 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5191 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5194 /* Update the alias set, if required. */
5195 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5196 && MEM_ALIAS_SET (target) != 0)
5198 target = copy_rtx (target);
5199 set_mem_alias_set (target, alias_set);
5202 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5205 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5208 /* Store the value of constructor EXP into the rtx TARGET.
5209 TARGET is either a REG or a MEM; we know it cannot conflict, since
5210 safe_from_p has been called.
5211 CLEARED is true if TARGET is known to have been zero'd.
5212 SIZE is the number of bytes of TARGET we are allowed to modify: this
5213 may not be the same as the size of EXP if we are assigning to a field
5214 which has been packed to exclude padding bits. */
5217 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5219 tree type = TREE_TYPE (exp);
5220 #ifdef WORD_REGISTER_OPERATIONS
5221 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5224 switch (TREE_CODE (type))
5228 case QUAL_UNION_TYPE:
5230 unsigned HOST_WIDE_INT idx;
5233 /* If size is zero or the target is already cleared, do nothing. */
5234 if (size == 0 || cleared)
5236 /* We either clear the aggregate or indicate the value is dead. */
5237 else if ((TREE_CODE (type) == UNION_TYPE
5238 || TREE_CODE (type) == QUAL_UNION_TYPE)
5239 && ! CONSTRUCTOR_ELTS (exp))
5240 /* If the constructor is empty, clear the union. */
5242 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5246 /* If we are building a static constructor into a register,
5247 set the initial value as zero so we can fold the value into
5248 a constant. But if more than one register is involved,
5249 this probably loses. */
5250 else if (REG_P (target) && TREE_STATIC (exp)
5251 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5253 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5257 /* If the constructor has fewer fields than the structure or
5258 if we are initializing the structure to mostly zeros, clear
5259 the whole structure first. Don't do this if TARGET is a
5260 register whose mode size isn't equal to SIZE since
5261 clear_storage can't handle this case. */
5263 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5264 != fields_length (type))
5265 || mostly_zeros_p (exp))
5267 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5270 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5274 if (REG_P (target) && !cleared)
5275 emit_clobber (target);
5277 /* Store each element of the constructor into the
5278 corresponding field of TARGET. */
5279 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5281 enum machine_mode mode;
5282 HOST_WIDE_INT bitsize;
5283 HOST_WIDE_INT bitpos = 0;
5285 rtx to_rtx = target;
5287 /* Just ignore missing fields. We cleared the whole
5288 structure, above, if any fields are missing. */
5292 if (cleared && initializer_zerop (value))
5295 if (host_integerp (DECL_SIZE (field), 1))
5296 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5300 mode = DECL_MODE (field);
5301 if (DECL_BIT_FIELD (field))
5304 offset = DECL_FIELD_OFFSET (field);
5305 if (host_integerp (offset, 0)
5306 && host_integerp (bit_position (field), 0))
5308 bitpos = int_bit_position (field);
5312 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5316 enum machine_mode address_mode;
5320 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5321 make_tree (TREE_TYPE (exp),
5324 offset_rtx = expand_normal (offset);
5325 gcc_assert (MEM_P (to_rtx));
5328 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5329 if (GET_MODE (offset_rtx) != address_mode)
5330 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5332 to_rtx = offset_address (to_rtx, offset_rtx,
5333 highest_pow2_factor (offset));
5336 #ifdef WORD_REGISTER_OPERATIONS
5337 /* If this initializes a field that is smaller than a
5338 word, at the start of a word, try to widen it to a full
5339 word. This special case allows us to output C++ member
5340 function initializations in a form that the optimizers
5343 && bitsize < BITS_PER_WORD
5344 && bitpos % BITS_PER_WORD == 0
5345 && GET_MODE_CLASS (mode) == MODE_INT
5346 && TREE_CODE (value) == INTEGER_CST
5348 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5350 tree type = TREE_TYPE (value);
5352 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5354 type = lang_hooks.types.type_for_size
5355 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5356 value = fold_convert (type, value);
5359 if (BYTES_BIG_ENDIAN)
5361 = fold_build2 (LSHIFT_EXPR, type, value,
5362 build_int_cst (type,
5363 BITS_PER_WORD - bitsize));
5364 bitsize = BITS_PER_WORD;
5369 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5370 && DECL_NONADDRESSABLE_P (field))
5372 to_rtx = copy_rtx (to_rtx);
5373 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5376 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5377 value, type, cleared,
5378 get_alias_set (TREE_TYPE (field)));
5385 unsigned HOST_WIDE_INT i;
5388 tree elttype = TREE_TYPE (type);
5390 HOST_WIDE_INT minelt = 0;
5391 HOST_WIDE_INT maxelt = 0;
5393 domain = TYPE_DOMAIN (type);
5394 const_bounds_p = (TYPE_MIN_VALUE (domain)
5395 && TYPE_MAX_VALUE (domain)
5396 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5397 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5399 /* If we have constant bounds for the range of the type, get them. */
5402 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5403 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5406 /* If the constructor has fewer elements than the array, clear
5407 the whole array first. Similarly if this is static
5408 constructor of a non-BLKmode object. */
5411 else if (REG_P (target) && TREE_STATIC (exp))
5415 unsigned HOST_WIDE_INT idx;
5417 HOST_WIDE_INT count = 0, zero_count = 0;
5418 need_to_clear = ! const_bounds_p;
5420 /* This loop is a more accurate version of the loop in
5421 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5422 is also needed to check for missing elements. */
5423 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5425 HOST_WIDE_INT this_node_count;
5430 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5432 tree lo_index = TREE_OPERAND (index, 0);
5433 tree hi_index = TREE_OPERAND (index, 1);
5435 if (! host_integerp (lo_index, 1)
5436 || ! host_integerp (hi_index, 1))
5442 this_node_count = (tree_low_cst (hi_index, 1)
5443 - tree_low_cst (lo_index, 1) + 1);
5446 this_node_count = 1;
5448 count += this_node_count;
5449 if (mostly_zeros_p (value))
5450 zero_count += this_node_count;
5453 /* Clear the entire array first if there are any missing
5454 elements, or if the incidence of zero elements is >=
5457 && (count < maxelt - minelt + 1
5458 || 4 * zero_count >= 3 * count))
5462 if (need_to_clear && size > 0)
5465 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5467 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5471 if (!cleared && REG_P (target))
5472 /* Inform later passes that the old value is dead. */
5473 emit_clobber (target);
5475 /* Store each element of the constructor into the
5476 corresponding element of TARGET, determined by counting the
5478 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5480 enum machine_mode mode;
5481 HOST_WIDE_INT bitsize;
5482 HOST_WIDE_INT bitpos;
5483 rtx xtarget = target;
5485 if (cleared && initializer_zerop (value))
5488 mode = TYPE_MODE (elttype);
5489 if (mode == BLKmode)
5490 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5491 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5494 bitsize = GET_MODE_BITSIZE (mode);
5496 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5498 tree lo_index = TREE_OPERAND (index, 0);
5499 tree hi_index = TREE_OPERAND (index, 1);
5500 rtx index_r, pos_rtx;
5501 HOST_WIDE_INT lo, hi, count;
5504 /* If the range is constant and "small", unroll the loop. */
5506 && host_integerp (lo_index, 0)
5507 && host_integerp (hi_index, 0)
5508 && (lo = tree_low_cst (lo_index, 0),
5509 hi = tree_low_cst (hi_index, 0),
5510 count = hi - lo + 1,
5513 || (host_integerp (TYPE_SIZE (elttype), 1)
5514 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5517 lo -= minelt; hi -= minelt;
5518 for (; lo <= hi; lo++)
5520 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5523 && !MEM_KEEP_ALIAS_SET_P (target)
5524 && TREE_CODE (type) == ARRAY_TYPE
5525 && TYPE_NONALIASED_COMPONENT (type))
5527 target = copy_rtx (target);
5528 MEM_KEEP_ALIAS_SET_P (target) = 1;
5531 store_constructor_field
5532 (target, bitsize, bitpos, mode, value, type, cleared,
5533 get_alias_set (elttype));
5538 rtx loop_start = gen_label_rtx ();
5539 rtx loop_end = gen_label_rtx ();
5542 expand_normal (hi_index);
5544 index = build_decl (EXPR_LOCATION (exp),
5545 VAR_DECL, NULL_TREE, domain);
5546 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5547 SET_DECL_RTL (index, index_r);
5548 store_expr (lo_index, index_r, 0, false);
5550 /* Build the head of the loop. */
5551 do_pending_stack_adjust ();
5552 emit_label (loop_start);
5554 /* Assign value to element index. */
5556 fold_convert (ssizetype,
5557 fold_build2 (MINUS_EXPR,
5560 TYPE_MIN_VALUE (domain)));
5563 size_binop (MULT_EXPR, position,
5564 fold_convert (ssizetype,
5565 TYPE_SIZE_UNIT (elttype)));
5567 pos_rtx = expand_normal (position);
5568 xtarget = offset_address (target, pos_rtx,
5569 highest_pow2_factor (position));
5570 xtarget = adjust_address (xtarget, mode, 0);
5571 if (TREE_CODE (value) == CONSTRUCTOR)
5572 store_constructor (value, xtarget, cleared,
5573 bitsize / BITS_PER_UNIT);
5575 store_expr (value, xtarget, 0, false);
5577 /* Generate a conditional jump to exit the loop. */
5578 exit_cond = build2 (LT_EXPR, integer_type_node,
5580 jumpif (exit_cond, loop_end, -1);
5582 /* Update the loop counter, and jump to the head of
5584 expand_assignment (index,
5585 build2 (PLUS_EXPR, TREE_TYPE (index),
5586 index, integer_one_node),
5589 emit_jump (loop_start);
5591 /* Build the end of the loop. */
5592 emit_label (loop_end);
5595 else if ((index != 0 && ! host_integerp (index, 0))
5596 || ! host_integerp (TYPE_SIZE (elttype), 1))
5601 index = ssize_int (1);
5604 index = fold_convert (ssizetype,
5605 fold_build2 (MINUS_EXPR,
5608 TYPE_MIN_VALUE (domain)));
5611 size_binop (MULT_EXPR, index,
5612 fold_convert (ssizetype,
5613 TYPE_SIZE_UNIT (elttype)));
5614 xtarget = offset_address (target,
5615 expand_normal (position),
5616 highest_pow2_factor (position));
5617 xtarget = adjust_address (xtarget, mode, 0);
5618 store_expr (value, xtarget, 0, false);
5623 bitpos = ((tree_low_cst (index, 0) - minelt)
5624 * tree_low_cst (TYPE_SIZE (elttype), 1));
5626 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5628 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5629 && TREE_CODE (type) == ARRAY_TYPE
5630 && TYPE_NONALIASED_COMPONENT (type))
5632 target = copy_rtx (target);
5633 MEM_KEEP_ALIAS_SET_P (target) = 1;
5635 store_constructor_field (target, bitsize, bitpos, mode, value,
5636 type, cleared, get_alias_set (elttype));
5644 unsigned HOST_WIDE_INT idx;
5645 constructor_elt *ce;
5649 tree elttype = TREE_TYPE (type);
5650 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5651 enum machine_mode eltmode = TYPE_MODE (elttype);
5652 HOST_WIDE_INT bitsize;
5653 HOST_WIDE_INT bitpos;
5654 rtvec vector = NULL;
5656 alias_set_type alias;
5658 gcc_assert (eltmode != BLKmode);
5660 n_elts = TYPE_VECTOR_SUBPARTS (type);
5661 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5663 enum machine_mode mode = GET_MODE (target);
5665 icode = (int) optab_handler (vec_init_optab, mode);
5666 if (icode != CODE_FOR_nothing)
5670 vector = rtvec_alloc (n_elts);
5671 for (i = 0; i < n_elts; i++)
5672 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5676 /* If the constructor has fewer elements than the vector,
5677 clear the whole array first. Similarly if this is static
5678 constructor of a non-BLKmode object. */
5681 else if (REG_P (target) && TREE_STATIC (exp))
5685 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5688 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5690 int n_elts_here = tree_low_cst
5691 (int_const_binop (TRUNC_DIV_EXPR,
5692 TYPE_SIZE (TREE_TYPE (value)),
5693 TYPE_SIZE (elttype), 0), 1);
5695 count += n_elts_here;
5696 if (mostly_zeros_p (value))
5697 zero_count += n_elts_here;
5700 /* Clear the entire vector first if there are any missing elements,
5701 or if the incidence of zero elements is >= 75%. */
5702 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5705 if (need_to_clear && size > 0 && !vector)
5708 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5710 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5714 /* Inform later passes that the old value is dead. */
5715 if (!cleared && !vector && REG_P (target))
5716 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5719 alias = MEM_ALIAS_SET (target);
5721 alias = get_alias_set (elttype);
5723 /* Store each element of the constructor into the corresponding
5724 element of TARGET, determined by counting the elements. */
5725 for (idx = 0, i = 0;
5726 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5727 idx++, i += bitsize / elt_size)
5729 HOST_WIDE_INT eltpos;
5730 tree value = ce->value;
5732 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5733 if (cleared && initializer_zerop (value))
5737 eltpos = tree_low_cst (ce->index, 1);
5743 /* Vector CONSTRUCTORs should only be built from smaller
5744 vectors in the case of BLKmode vectors. */
5745 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5746 RTVEC_ELT (vector, eltpos)
5747 = expand_normal (value);
5751 enum machine_mode value_mode =
5752 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5753 ? TYPE_MODE (TREE_TYPE (value))
5755 bitpos = eltpos * elt_size;
5756 store_constructor_field (target, bitsize, bitpos,
5757 value_mode, value, type,
5763 emit_insn (GEN_FCN (icode)
5765 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5774 /* Store the value of EXP (an expression tree)
5775 into a subfield of TARGET which has mode MODE and occupies
5776 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5777 If MODE is VOIDmode, it means that we are storing into a bit-field.
5779 Always return const0_rtx unless we have something particular to
5782 TYPE is the type of the underlying object,
5784 ALIAS_SET is the alias set for the destination. This value will
5785 (in general) be different from that for TARGET, since TARGET is a
5786 reference to the containing structure.
5788 If NONTEMPORAL is true, try generating a nontemporal store. */
5791 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5792 enum machine_mode mode, tree exp, tree type,
5793 alias_set_type alias_set, bool nontemporal)
5795 if (TREE_CODE (exp) == ERROR_MARK)
5798 /* If we have nothing to store, do nothing unless the expression has
5801 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5803 /* If we are storing into an unaligned field of an aligned union that is
5804 in a register, we may have the mode of TARGET being an integer mode but
5805 MODE == BLKmode. In that case, get an aligned object whose size and
5806 alignment are the same as TARGET and store TARGET into it (we can avoid
5807 the store if the field being stored is the entire width of TARGET). Then
5808 call ourselves recursively to store the field into a BLKmode version of
5809 that object. Finally, load from the object into TARGET. This is not
5810 very efficient in general, but should only be slightly more expensive
5811 than the otherwise-required unaligned accesses. Perhaps this can be
5812 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5813 twice, once with emit_move_insn and once via store_field. */
5816 && (REG_P (target) || GET_CODE (target) == SUBREG))
5818 rtx object = assign_temp (type, 0, 1, 1);
5819 rtx blk_object = adjust_address (object, BLKmode, 0);
5821 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5822 emit_move_insn (object, target);
5824 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5827 emit_move_insn (target, object);
5829 /* We want to return the BLKmode version of the data. */
5833 if (GET_CODE (target) == CONCAT)
5835 /* We're storing into a struct containing a single __complex. */
5837 gcc_assert (!bitpos);
5838 return store_expr (exp, target, 0, nontemporal);
5841 /* If the structure is in a register or if the component
5842 is a bit field, we cannot use addressing to access it.
5843 Use bit-field techniques or SUBREG to store in it. */
5845 if (mode == VOIDmode
5846 || (mode != BLKmode && ! direct_store[(int) mode]
5847 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5848 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5850 || GET_CODE (target) == SUBREG
5851 /* If the field isn't aligned enough to store as an ordinary memref,
5852 store it as a bit field. */
5854 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5855 || bitpos % GET_MODE_ALIGNMENT (mode))
5856 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5857 || (bitpos % BITS_PER_UNIT != 0)))
5858 /* If the RHS and field are a constant size and the size of the
5859 RHS isn't the same size as the bitfield, we must use bitfield
5862 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5863 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
5864 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
5865 decl we must use bitfield operations. */
5867 && TREE_CODE (exp) == MEM_REF
5868 && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5869 && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5870 && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
5871 && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
5876 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5877 implies a mask operation. If the precision is the same size as
5878 the field we're storing into, that mask is redundant. This is
5879 particularly common with bit field assignments generated by the
5881 nop_def = get_def_for_expr (exp, NOP_EXPR);
5884 tree type = TREE_TYPE (exp);
5885 if (INTEGRAL_TYPE_P (type)
5886 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5887 && bitsize == TYPE_PRECISION (type))
5889 tree op = gimple_assign_rhs1 (nop_def);
5890 type = TREE_TYPE (op);
5891 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5896 temp = expand_normal (exp);
5898 /* If BITSIZE is narrower than the size of the type of EXP
5899 we will be narrowing TEMP. Normally, what's wanted are the
5900 low-order bits. However, if EXP's type is a record and this is
5901 big-endian machine, we want the upper BITSIZE bits. */
5902 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5903 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5904 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5905 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5906 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5910 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5912 if (mode != VOIDmode && mode != BLKmode
5913 && mode != TYPE_MODE (TREE_TYPE (exp)))
5914 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5916 /* If the modes of TEMP and TARGET are both BLKmode, both
5917 must be in memory and BITPOS must be aligned on a byte
5918 boundary. If so, we simply do a block copy. Likewise
5919 for a BLKmode-like TARGET. */
5920 if (GET_MODE (temp) == BLKmode
5921 && (GET_MODE (target) == BLKmode
5923 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5924 && (bitpos % BITS_PER_UNIT) == 0
5925 && (bitsize % BITS_PER_UNIT) == 0)))
5927 gcc_assert (MEM_P (target) && MEM_P (temp)
5928 && (bitpos % BITS_PER_UNIT) == 0);
5930 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5931 emit_block_move (target, temp,
5932 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5939 /* Store the value in the bitfield. */
5940 store_bit_field (target, bitsize, bitpos, mode, temp);
5946 /* Now build a reference to just the desired component. */
5947 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5949 if (to_rtx == target)
5950 to_rtx = copy_rtx (to_rtx);
5952 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5953 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5954 set_mem_alias_set (to_rtx, alias_set);
5956 return store_expr (exp, to_rtx, 0, nontemporal);
5960 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5961 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5962 codes and find the ultimate containing object, which we return.
5964 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5965 bit position, and *PUNSIGNEDP to the signedness of the field.
5966 If the position of the field is variable, we store a tree
5967 giving the variable offset (in units) in *POFFSET.
5968 This offset is in addition to the bit position.
5969 If the position is not variable, we store 0 in *POFFSET.
5971 If any of the extraction expressions is volatile,
5972 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5974 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5975 Otherwise, it is a mode that can be used to access the field.
5977 If the field describes a variable-sized object, *PMODE is set to
5978 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5979 this case, but the address of the object can be found.
5981 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5982 look through nodes that serve as markers of a greater alignment than
5983 the one that can be deduced from the expression. These nodes make it
5984 possible for front-ends to prevent temporaries from being created by
5985 the middle-end on alignment considerations. For that purpose, the
5986 normal operating mode at high-level is to always pass FALSE so that
5987 the ultimate containing object is really returned; moreover, the
5988 associated predicate handled_component_p will always return TRUE
5989 on these nodes, thus indicating that they are essentially handled
5990 by get_inner_reference. TRUE should only be passed when the caller
5991 is scanning the expression in order to build another representation
5992 and specifically knows how to handle these nodes; as such, this is
5993 the normal operating mode in the RTL expanders. */
5996 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5997 HOST_WIDE_INT *pbitpos, tree *poffset,
5998 enum machine_mode *pmode, int *punsignedp,
5999 int *pvolatilep, bool keep_aligning)
6002 enum machine_mode mode = VOIDmode;
6003 bool blkmode_bitfield = false;
6004 tree offset = size_zero_node;
6005 double_int bit_offset = double_int_zero;
6007 /* First get the mode, signedness, and size. We do this from just the
6008 outermost expression. */
6010 if (TREE_CODE (exp) == COMPONENT_REF)
6012 tree field = TREE_OPERAND (exp, 1);
6013 size_tree = DECL_SIZE (field);
6014 if (!DECL_BIT_FIELD (field))
6015 mode = DECL_MODE (field);
6016 else if (DECL_MODE (field) == BLKmode)
6017 blkmode_bitfield = true;
6018 else if (TREE_THIS_VOLATILE (exp)
6019 && flag_strict_volatile_bitfields > 0)
6020 /* Volatile bitfields should be accessed in the mode of the
6021 field's type, not the mode computed based on the bit
6023 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6025 *punsignedp = DECL_UNSIGNED (field);
6027 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6029 size_tree = TREE_OPERAND (exp, 1);
6030 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6031 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6033 /* For vector types, with the correct size of access, use the mode of
6035 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6036 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6037 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6038 mode = TYPE_MODE (TREE_TYPE (exp));
6042 mode = TYPE_MODE (TREE_TYPE (exp));
6043 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6045 if (mode == BLKmode)
6046 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6048 *pbitsize = GET_MODE_BITSIZE (mode);
6053 if (! host_integerp (size_tree, 1))
6054 mode = BLKmode, *pbitsize = -1;
6056 *pbitsize = tree_low_cst (size_tree, 1);
6059 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6060 and find the ultimate containing object. */
6063 switch (TREE_CODE (exp))
6067 = double_int_add (bit_offset,
6068 tree_to_double_int (TREE_OPERAND (exp, 2)));
6073 tree field = TREE_OPERAND (exp, 1);
6074 tree this_offset = component_ref_field_offset (exp);
6076 /* If this field hasn't been filled in yet, don't go past it.
6077 This should only happen when folding expressions made during
6078 type construction. */
6079 if (this_offset == 0)
6082 offset = size_binop (PLUS_EXPR, offset, this_offset);
6083 bit_offset = double_int_add (bit_offset,
6085 (DECL_FIELD_BIT_OFFSET (field)));
6087 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6092 case ARRAY_RANGE_REF:
6094 tree index = TREE_OPERAND (exp, 1);
6095 tree low_bound = array_ref_low_bound (exp);
6096 tree unit_size = array_ref_element_size (exp);
6098 /* We assume all arrays have sizes that are a multiple of a byte.
6099 First subtract the lower bound, if any, in the type of the
6100 index, then convert to sizetype and multiply by the size of
6101 the array element. */
6102 if (! integer_zerop (low_bound))
6103 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6106 offset = size_binop (PLUS_EXPR, offset,
6107 size_binop (MULT_EXPR,
6108 fold_convert (sizetype, index),
6117 bit_offset = double_int_add (bit_offset,
6118 uhwi_to_double_int (*pbitsize));
6121 case VIEW_CONVERT_EXPR:
6122 if (keep_aligning && STRICT_ALIGNMENT
6123 && (TYPE_ALIGN (TREE_TYPE (exp))
6124 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6125 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6126 < BIGGEST_ALIGNMENT)
6127 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6128 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6133 /* Hand back the decl for MEM[&decl, off]. */
6134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6136 tree off = TREE_OPERAND (exp, 1);
6137 if (!integer_zerop (off))
6139 double_int boff, coff = mem_ref_offset (exp);
6140 boff = double_int_lshift (coff,
6142 ? 3 : exact_log2 (BITS_PER_UNIT),
6143 HOST_BITS_PER_DOUBLE_INT, true);
6144 bit_offset = double_int_add (bit_offset, boff);
6146 exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6154 /* If any reference in the chain is volatile, the effect is volatile. */
6155 if (TREE_THIS_VOLATILE (exp))
6158 exp = TREE_OPERAND (exp, 0);
6162 /* If OFFSET is constant, see if we can return the whole thing as a
6163 constant bit position. Make sure to handle overflow during
6165 if (host_integerp (offset, 0))
6167 double_int tem = double_int_lshift (tree_to_double_int (offset),
6169 ? 3 : exact_log2 (BITS_PER_UNIT),
6170 HOST_BITS_PER_DOUBLE_INT, true);
6171 tem = double_int_add (tem, bit_offset);
6172 if (double_int_fits_in_shwi_p (tem))
6174 *pbitpos = double_int_to_shwi (tem);
6175 *poffset = offset = NULL_TREE;
6179 /* Otherwise, split it up. */
6182 *pbitpos = double_int_to_shwi (bit_offset);
6186 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6187 if (mode == VOIDmode
6189 && (*pbitpos % BITS_PER_UNIT) == 0
6190 && (*pbitsize % BITS_PER_UNIT) == 0)
6198 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6199 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6200 EXP is marked as PACKED. */
6203 contains_packed_reference (const_tree exp)
6205 bool packed_p = false;
6209 switch (TREE_CODE (exp))
6213 tree field = TREE_OPERAND (exp, 1);
6214 packed_p = DECL_PACKED (field)
6215 || TYPE_PACKED (TREE_TYPE (field))
6216 || TYPE_PACKED (TREE_TYPE (exp));
6224 case ARRAY_RANGE_REF:
6227 case VIEW_CONVERT_EXPR:
6233 exp = TREE_OPERAND (exp, 0);
6239 /* Return a tree of sizetype representing the size, in bytes, of the element
6240 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6243 array_ref_element_size (tree exp)
6245 tree aligned_size = TREE_OPERAND (exp, 3);
6246 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6247 location_t loc = EXPR_LOCATION (exp);
6249 /* If a size was specified in the ARRAY_REF, it's the size measured
6250 in alignment units of the element type. So multiply by that value. */
6253 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6254 sizetype from another type of the same width and signedness. */
6255 if (TREE_TYPE (aligned_size) != sizetype)
6256 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6257 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6258 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6261 /* Otherwise, take the size from that of the element type. Substitute
6262 any PLACEHOLDER_EXPR that we have. */
6264 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6267 /* Return a tree representing the lower bound of the array mentioned in
6268 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6271 array_ref_low_bound (tree exp)
6273 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6275 /* If a lower bound is specified in EXP, use it. */
6276 if (TREE_OPERAND (exp, 2))
6277 return TREE_OPERAND (exp, 2);
6279 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6280 substituting for a PLACEHOLDER_EXPR as needed. */
6281 if (domain_type && TYPE_MIN_VALUE (domain_type))
6282 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6284 /* Otherwise, return a zero of the appropriate type. */
6285 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6288 /* Return a tree representing the upper bound of the array mentioned in
6289 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6292 array_ref_up_bound (tree exp)
6294 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6296 /* If there is a domain type and it has an upper bound, use it, substituting
6297 for a PLACEHOLDER_EXPR as needed. */
6298 if (domain_type && TYPE_MAX_VALUE (domain_type))
6299 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6301 /* Otherwise fail. */
6305 /* Return a tree representing the offset, in bytes, of the field referenced
6306 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6309 component_ref_field_offset (tree exp)
6311 tree aligned_offset = TREE_OPERAND (exp, 2);
6312 tree field = TREE_OPERAND (exp, 1);
6313 location_t loc = EXPR_LOCATION (exp);
6315 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6316 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6320 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6321 sizetype from another type of the same width and signedness. */
6322 if (TREE_TYPE (aligned_offset) != sizetype)
6323 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6324 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6325 size_int (DECL_OFFSET_ALIGN (field)
6329 /* Otherwise, take the offset from that of the field. Substitute
6330 any PLACEHOLDER_EXPR that we have. */
6332 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6335 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6337 static unsigned HOST_WIDE_INT
6338 target_align (const_tree target)
6340 /* We might have a chain of nested references with intermediate misaligning
6341 bitfields components, so need to recurse to find out. */
6343 unsigned HOST_WIDE_INT this_align, outer_align;
6345 switch (TREE_CODE (target))
6351 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6352 outer_align = target_align (TREE_OPERAND (target, 0));
6353 return MIN (this_align, outer_align);
6356 case ARRAY_RANGE_REF:
6357 this_align = TYPE_ALIGN (TREE_TYPE (target));
6358 outer_align = target_align (TREE_OPERAND (target, 0));
6359 return MIN (this_align, outer_align);
6362 case NON_LVALUE_EXPR:
6363 case VIEW_CONVERT_EXPR:
6364 this_align = TYPE_ALIGN (TREE_TYPE (target));
6365 outer_align = target_align (TREE_OPERAND (target, 0));
6366 return MAX (this_align, outer_align);
6369 return TYPE_ALIGN (TREE_TYPE (target));
6374 /* Given an rtx VALUE that may contain additions and multiplications, return
6375 an equivalent value that just refers to a register, memory, or constant.
6376 This is done by generating instructions to perform the arithmetic and
6377 returning a pseudo-register containing the value.
6379 The returned value may be a REG, SUBREG, MEM or constant. */
6382 force_operand (rtx value, rtx target)
6385 /* Use subtarget as the target for operand 0 of a binary operation. */
6386 rtx subtarget = get_subtarget (target);
6387 enum rtx_code code = GET_CODE (value);
6389 /* Check for subreg applied to an expression produced by loop optimizer. */
6391 && !REG_P (SUBREG_REG (value))
6392 && !MEM_P (SUBREG_REG (value)))
6395 = simplify_gen_subreg (GET_MODE (value),
6396 force_reg (GET_MODE (SUBREG_REG (value)),
6397 force_operand (SUBREG_REG (value),
6399 GET_MODE (SUBREG_REG (value)),
6400 SUBREG_BYTE (value));
6401 code = GET_CODE (value);
6404 /* Check for a PIC address load. */
6405 if ((code == PLUS || code == MINUS)
6406 && XEXP (value, 0) == pic_offset_table_rtx
6407 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6408 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6409 || GET_CODE (XEXP (value, 1)) == CONST))
6412 subtarget = gen_reg_rtx (GET_MODE (value));
6413 emit_move_insn (subtarget, value);
6417 if (ARITHMETIC_P (value))
6419 op2 = XEXP (value, 1);
6420 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6422 if (code == MINUS && CONST_INT_P (op2))
6425 op2 = negate_rtx (GET_MODE (value), op2);
6428 /* Check for an addition with OP2 a constant integer and our first
6429 operand a PLUS of a virtual register and something else. In that
6430 case, we want to emit the sum of the virtual register and the
6431 constant first and then add the other value. This allows virtual
6432 register instantiation to simply modify the constant rather than
6433 creating another one around this addition. */
6434 if (code == PLUS && CONST_INT_P (op2)
6435 && GET_CODE (XEXP (value, 0)) == PLUS
6436 && REG_P (XEXP (XEXP (value, 0), 0))
6437 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6438 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6440 rtx temp = expand_simple_binop (GET_MODE (value), code,
6441 XEXP (XEXP (value, 0), 0), op2,
6442 subtarget, 0, OPTAB_LIB_WIDEN);
6443 return expand_simple_binop (GET_MODE (value), code, temp,
6444 force_operand (XEXP (XEXP (value,
6446 target, 0, OPTAB_LIB_WIDEN);
6449 op1 = force_operand (XEXP (value, 0), subtarget);
6450 op2 = force_operand (op2, NULL_RTX);
6454 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6456 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6457 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6458 target, 1, OPTAB_LIB_WIDEN);
6460 return expand_divmod (0,
6461 FLOAT_MODE_P (GET_MODE (value))
6462 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6463 GET_MODE (value), op1, op2, target, 0);
6465 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6468 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6471 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6474 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6475 target, 0, OPTAB_LIB_WIDEN);
6477 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6478 target, 1, OPTAB_LIB_WIDEN);
6481 if (UNARY_P (value))
6484 target = gen_reg_rtx (GET_MODE (value));
6485 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6492 case FLOAT_TRUNCATE:
6493 convert_move (target, op1, code == ZERO_EXTEND);
6498 expand_fix (target, op1, code == UNSIGNED_FIX);
6502 case UNSIGNED_FLOAT:
6503 expand_float (target, op1, code == UNSIGNED_FLOAT);
6507 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6511 #ifdef INSN_SCHEDULING
6512 /* On machines that have insn scheduling, we want all memory reference to be
6513 explicit, so we need to deal with such paradoxical SUBREGs. */
6514 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6515 && (GET_MODE_SIZE (GET_MODE (value))
6516 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6518 = simplify_gen_subreg (GET_MODE (value),
6519 force_reg (GET_MODE (SUBREG_REG (value)),
6520 force_operand (SUBREG_REG (value),
6522 GET_MODE (SUBREG_REG (value)),
6523 SUBREG_BYTE (value));
6529 /* Subroutine of expand_expr: return nonzero iff there is no way that
6530 EXP can reference X, which is being modified. TOP_P is nonzero if this
6531 call is going to be used to determine whether we need a temporary
6532 for EXP, as opposed to a recursive call to this function.
6534 It is always safe for this routine to return zero since it merely
6535 searches for optimization opportunities. */
6538 safe_from_p (const_rtx x, tree exp, int top_p)
6544 /* If EXP has varying size, we MUST use a target since we currently
6545 have no way of allocating temporaries of variable size
6546 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6547 So we assume here that something at a higher level has prevented a
6548 clash. This is somewhat bogus, but the best we can do. Only
6549 do this when X is BLKmode and when we are at the top level. */
6550 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6552 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6553 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6554 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6556 && GET_MODE (x) == BLKmode)
6557 /* If X is in the outgoing argument area, it is always safe. */
6559 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6560 || (GET_CODE (XEXP (x, 0)) == PLUS
6561 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6564 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6565 find the underlying pseudo. */
6566 if (GET_CODE (x) == SUBREG)
6569 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6573 /* Now look at our tree code and possibly recurse. */
6574 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6576 case tcc_declaration:
6577 exp_rtl = DECL_RTL_IF_SET (exp);
6583 case tcc_exceptional:
6584 if (TREE_CODE (exp) == TREE_LIST)
6588 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6590 exp = TREE_CHAIN (exp);
6593 if (TREE_CODE (exp) != TREE_LIST)
6594 return safe_from_p (x, exp, 0);
6597 else if (TREE_CODE (exp) == CONSTRUCTOR)
6599 constructor_elt *ce;
6600 unsigned HOST_WIDE_INT idx;
6603 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6605 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6606 || !safe_from_p (x, ce->value, 0))
6610 else if (TREE_CODE (exp) == ERROR_MARK)
6611 return 1; /* An already-visited SAVE_EXPR? */
6616 /* The only case we look at here is the DECL_INITIAL inside a
6618 return (TREE_CODE (exp) != DECL_EXPR
6619 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6620 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6621 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6624 case tcc_comparison:
6625 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6630 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6632 case tcc_expression:
6635 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6636 the expression. If it is set, we conflict iff we are that rtx or
6637 both are in memory. Otherwise, we check all operands of the
6638 expression recursively. */
6640 switch (TREE_CODE (exp))
6643 /* If the operand is static or we are static, we can't conflict.
6644 Likewise if we don't conflict with the operand at all. */
6645 if (staticp (TREE_OPERAND (exp, 0))
6646 || TREE_STATIC (exp)
6647 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6650 /* Otherwise, the only way this can conflict is if we are taking
6651 the address of a DECL a that address if part of X, which is
6653 exp = TREE_OPERAND (exp, 0);
6656 if (!DECL_RTL_SET_P (exp)
6657 || !MEM_P (DECL_RTL (exp)))
6660 exp_rtl = XEXP (DECL_RTL (exp), 0);
6664 case MISALIGNED_INDIRECT_REF:
6667 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6668 get_alias_set (exp)))
6673 /* Assume that the call will clobber all hard registers and
6675 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6680 case WITH_CLEANUP_EXPR:
6681 case CLEANUP_POINT_EXPR:
6682 /* Lowered by gimplify.c. */
6686 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6692 /* If we have an rtx, we do not need to scan our operands. */
6696 nops = TREE_OPERAND_LENGTH (exp);
6697 for (i = 0; i < nops; i++)
6698 if (TREE_OPERAND (exp, i) != 0
6699 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6705 /* Should never get a type here. */
6709 /* If we have an rtl, find any enclosed object. Then see if we conflict
6713 if (GET_CODE (exp_rtl) == SUBREG)
6715 exp_rtl = SUBREG_REG (exp_rtl);
6717 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6721 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6722 are memory and they conflict. */
6723 return ! (rtx_equal_p (x, exp_rtl)
6724 || (MEM_P (x) && MEM_P (exp_rtl)
6725 && true_dependence (exp_rtl, VOIDmode, x,
6726 rtx_addr_varies_p)));
6729 /* If we reach here, it is safe. */
6734 /* Return the highest power of two that EXP is known to be a multiple of.
6735 This is used in updating alignment of MEMs in array references. */
6737 unsigned HOST_WIDE_INT
6738 highest_pow2_factor (const_tree exp)
6740 unsigned HOST_WIDE_INT c0, c1;
6742 switch (TREE_CODE (exp))
6745 /* We can find the lowest bit that's a one. If the low
6746 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6747 We need to handle this case since we can find it in a COND_EXPR,
6748 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6749 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6751 if (TREE_OVERFLOW (exp))
6752 return BIGGEST_ALIGNMENT;
6755 /* Note: tree_low_cst is intentionally not used here,
6756 we don't care about the upper bits. */
6757 c0 = TREE_INT_CST_LOW (exp);
6759 return c0 ? c0 : BIGGEST_ALIGNMENT;
6763 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6764 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6765 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6766 return MIN (c0, c1);
6769 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6770 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6773 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6775 if (integer_pow2p (TREE_OPERAND (exp, 1))
6776 && host_integerp (TREE_OPERAND (exp, 1), 1))
6778 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6779 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6780 return MAX (1, c0 / c1);
6785 /* The highest power of two of a bit-and expression is the maximum of
6786 that of its operands. We typically get here for a complex LHS and
6787 a constant negative power of two on the RHS to force an explicit
6788 alignment, so don't bother looking at the LHS. */
6789 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6793 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6796 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6799 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6800 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6801 return MIN (c0, c1);
6810 /* Similar, except that the alignment requirements of TARGET are
6811 taken into account. Assume it is at least as aligned as its
6812 type, unless it is a COMPONENT_REF in which case the layout of
6813 the structure gives the alignment. */
6815 static unsigned HOST_WIDE_INT
6816 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6818 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6819 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6821 return MAX (factor, talign);
6824 /* Subroutine of expand_expr. Expand the two operands of a binary
6825 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6826 The value may be stored in TARGET if TARGET is nonzero. The
6827 MODIFIER argument is as documented by expand_expr. */
6830 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6831 enum expand_modifier modifier)
6833 if (! safe_from_p (target, exp1, 1))
6835 if (operand_equal_p (exp0, exp1, 0))
6837 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6838 *op1 = copy_rtx (*op0);
6842 /* If we need to preserve evaluation order, copy exp0 into its own
6843 temporary variable so that it can't be clobbered by exp1. */
6844 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6845 exp0 = save_expr (exp0);
6846 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6847 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6852 /* Return a MEM that contains constant EXP. DEFER is as for
6853 output_constant_def and MODIFIER is as for expand_expr. */
6856 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6860 mem = output_constant_def (exp, defer);
6861 if (modifier != EXPAND_INITIALIZER)
6862 mem = use_anchored_address (mem);
6866 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6867 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6870 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6871 enum expand_modifier modifier, addr_space_t as)
6873 rtx result, subtarget;
6875 HOST_WIDE_INT bitsize, bitpos;
6876 int volatilep, unsignedp;
6877 enum machine_mode mode1;
6879 /* If we are taking the address of a constant and are at the top level,
6880 we have to use output_constant_def since we can't call force_const_mem
6882 /* ??? This should be considered a front-end bug. We should not be
6883 generating ADDR_EXPR of something that isn't an LVALUE. The only
6884 exception here is STRING_CST. */
6885 if (CONSTANT_CLASS_P (exp))
6886 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6888 /* Everything must be something allowed by is_gimple_addressable. */
6889 switch (TREE_CODE (exp))
6892 /* This case will happen via recursion for &a->b. */
6893 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6897 tree tem = TREE_OPERAND (exp, 0);
6898 if (!integer_zerop (TREE_OPERAND (exp, 1)))
6899 tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6901 double_int_to_tree (sizetype, mem_ref_offset (exp)));
6902 return expand_expr (tem, target, tmode, modifier);
6906 /* Expand the initializer like constants above. */
6907 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6910 /* The real part of the complex number is always first, therefore
6911 the address is the same as the address of the parent object. */
6914 inner = TREE_OPERAND (exp, 0);
6918 /* The imaginary part of the complex number is always second.
6919 The expression is therefore always offset by the size of the
6922 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6923 inner = TREE_OPERAND (exp, 0);
6927 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6928 expand_expr, as that can have various side effects; LABEL_DECLs for
6929 example, may not have their DECL_RTL set yet. Expand the rtl of
6930 CONSTRUCTORs too, which should yield a memory reference for the
6931 constructor's contents. Assume language specific tree nodes can
6932 be expanded in some interesting way. */
6933 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6935 || TREE_CODE (exp) == CONSTRUCTOR
6936 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6938 result = expand_expr (exp, target, tmode,
6939 modifier == EXPAND_INITIALIZER
6940 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6942 /* If the DECL isn't in memory, then the DECL wasn't properly
6943 marked TREE_ADDRESSABLE, which will be either a front-end
6944 or a tree optimizer bug. */
6945 gcc_assert (MEM_P (result));
6946 result = XEXP (result, 0);
6948 /* ??? Is this needed anymore? */
6949 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6951 assemble_external (exp);
6952 TREE_USED (exp) = 1;
6955 if (modifier != EXPAND_INITIALIZER
6956 && modifier != EXPAND_CONST_ADDRESS)
6957 result = force_operand (result, target);
6961 /* Pass FALSE as the last argument to get_inner_reference although
6962 we are expanding to RTL. The rationale is that we know how to
6963 handle "aligning nodes" here: we can just bypass them because
6964 they won't change the final object whose address will be returned
6965 (they actually exist only for that purpose). */
6966 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6967 &mode1, &unsignedp, &volatilep, false);
6971 /* We must have made progress. */
6972 gcc_assert (inner != exp);
6974 subtarget = offset || bitpos ? NULL_RTX : target;
6975 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6976 inner alignment, force the inner to be sufficiently aligned. */
6977 if (CONSTANT_CLASS_P (inner)
6978 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6980 inner = copy_node (inner);
6981 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6982 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6983 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6985 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6991 if (modifier != EXPAND_NORMAL)
6992 result = force_operand (result, NULL);
6993 tmp = expand_expr (offset, NULL_RTX, tmode,
6994 modifier == EXPAND_INITIALIZER
6995 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6997 result = convert_memory_address_addr_space (tmode, result, as);
6998 tmp = convert_memory_address_addr_space (tmode, tmp, as);
7000 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7001 result = gen_rtx_PLUS (tmode, result, tmp);
7004 subtarget = bitpos ? NULL_RTX : target;
7005 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7006 1, OPTAB_LIB_WIDEN);
7012 /* Someone beforehand should have rejected taking the address
7013 of such an object. */
7014 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7016 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7017 if (modifier < EXPAND_SUM)
7018 result = force_operand (result, target);
7024 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7025 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7028 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7029 enum expand_modifier modifier)
7031 addr_space_t as = ADDR_SPACE_GENERIC;
7032 enum machine_mode address_mode = Pmode;
7033 enum machine_mode pointer_mode = ptr_mode;
7034 enum machine_mode rmode;
7037 /* Target mode of VOIDmode says "whatever's natural". */
7038 if (tmode == VOIDmode)
7039 tmode = TYPE_MODE (TREE_TYPE (exp));
7041 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7043 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7044 address_mode = targetm.addr_space.address_mode (as);
7045 pointer_mode = targetm.addr_space.pointer_mode (as);
7048 /* We can get called with some Weird Things if the user does silliness
7049 like "(short) &a". In that case, convert_memory_address won't do
7050 the right thing, so ignore the given target mode. */
7051 if (tmode != address_mode && tmode != pointer_mode)
7052 tmode = address_mode;
7054 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7055 tmode, modifier, as);
7057 /* Despite expand_expr claims concerning ignoring TMODE when not
7058 strictly convenient, stuff breaks if we don't honor it. Note
7059 that combined with the above, we only do this for pointer modes. */
7060 rmode = GET_MODE (result);
7061 if (rmode == VOIDmode)
7064 result = convert_memory_address_addr_space (tmode, result, as);
7069 /* Generate code for computing CONSTRUCTOR EXP.
7070 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7071 is TRUE, instead of creating a temporary variable in memory
7072 NULL is returned and the caller needs to handle it differently. */
7075 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7076 bool avoid_temp_mem)
7078 tree type = TREE_TYPE (exp);
7079 enum machine_mode mode = TYPE_MODE (type);
7081 /* Try to avoid creating a temporary at all. This is possible
7082 if all of the initializer is zero.
7083 FIXME: try to handle all [0..255] initializers we can handle
7085 if (TREE_STATIC (exp)
7086 && !TREE_ADDRESSABLE (exp)
7087 && target != 0 && mode == BLKmode
7088 && all_zeros_p (exp))
7090 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7094 /* All elts simple constants => refer to a constant in memory. But
7095 if this is a non-BLKmode mode, let it store a field at a time
7096 since that should make a CONST_INT or CONST_DOUBLE when we
7097 fold. Likewise, if we have a target we can use, it is best to
7098 store directly into the target unless the type is large enough
7099 that memcpy will be used. If we are making an initializer and
7100 all operands are constant, put it in memory as well.
7102 FIXME: Avoid trying to fill vector constructors piece-meal.
7103 Output them with output_constant_def below unless we're sure
7104 they're zeros. This should go away when vector initializers
7105 are treated like VECTOR_CST instead of arrays. */
7106 if ((TREE_STATIC (exp)
7107 && ((mode == BLKmode
7108 && ! (target != 0 && safe_from_p (target, exp, 1)))
7109 || TREE_ADDRESSABLE (exp)
7110 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7111 && (! MOVE_BY_PIECES_P
7112 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7114 && ! mostly_zeros_p (exp))))
7115 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7116 && TREE_CONSTANT (exp)))
7123 constructor = expand_expr_constant (exp, 1, modifier);
7125 if (modifier != EXPAND_CONST_ADDRESS
7126 && modifier != EXPAND_INITIALIZER
7127 && modifier != EXPAND_SUM)
7128 constructor = validize_mem (constructor);
7133 /* Handle calls that pass values in multiple non-contiguous
7134 locations. The Irix 6 ABI has examples of this. */
7135 if (target == 0 || ! safe_from_p (target, exp, 1)
7136 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7142 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7143 | (TREE_READONLY (exp)
7144 * TYPE_QUAL_CONST))),
7145 0, TREE_ADDRESSABLE (exp), 1);
7148 store_constructor (exp, target, 0, int_expr_size (exp));
7153 /* expand_expr: generate code for computing expression EXP.
7154 An rtx for the computed value is returned. The value is never null.
7155 In the case of a void EXP, const0_rtx is returned.
7157 The value may be stored in TARGET if TARGET is nonzero.
7158 TARGET is just a suggestion; callers must assume that
7159 the rtx returned may not be the same as TARGET.
7161 If TARGET is CONST0_RTX, it means that the value will be ignored.
7163 If TMODE is not VOIDmode, it suggests generating the
7164 result in mode TMODE. But this is done only when convenient.
7165 Otherwise, TMODE is ignored and the value generated in its natural mode.
7166 TMODE is just a suggestion; callers must assume that
7167 the rtx returned may not have mode TMODE.
7169 Note that TARGET may have neither TMODE nor MODE. In that case, it
7170 probably will not be used.
7172 If MODIFIER is EXPAND_SUM then when EXP is an addition
7173 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7174 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7175 products as above, or REG or MEM, or constant.
7176 Ordinarily in such cases we would output mul or add instructions
7177 and then return a pseudo reg containing the sum.
7179 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7180 it also marks a label as absolutely required (it can't be dead).
7181 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7182 This is used for outputting expressions used in initializers.
7184 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7185 with a constant address even if that address is not normally legitimate.
7186 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7188 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7189 a call parameter. Such targets require special care as we haven't yet
7190 marked TARGET so that it's safe from being trashed by libcalls. We
7191 don't want to use TARGET for anything but the final result;
7192 Intermediate values must go elsewhere. Additionally, calls to
7193 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7195 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7196 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7197 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7198 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7202 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7203 enum expand_modifier modifier, rtx *alt_rtl)
7207 /* Handle ERROR_MARK before anybody tries to access its type. */
7208 if (TREE_CODE (exp) == ERROR_MARK
7209 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7211 ret = CONST0_RTX (tmode);
7212 return ret ? ret : const0_rtx;
7215 /* If this is an expression of some kind and it has an associated line
7216 number, then emit the line number before expanding the expression.
7218 We need to save and restore the file and line information so that
7219 errors discovered during expansion are emitted with the right
7220 information. It would be better of the diagnostic routines
7221 used the file/line information embedded in the tree nodes rather
7223 if (cfun && EXPR_HAS_LOCATION (exp))
7225 location_t saved_location = input_location;
7226 location_t saved_curr_loc = get_curr_insn_source_location ();
7227 tree saved_block = get_curr_insn_block ();
7228 input_location = EXPR_LOCATION (exp);
7229 set_curr_insn_source_location (input_location);
7231 /* Record where the insns produced belong. */
7232 set_curr_insn_block (TREE_BLOCK (exp));
7234 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7236 input_location = saved_location;
7237 set_curr_insn_block (saved_block);
7238 set_curr_insn_source_location (saved_curr_loc);
7242 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7249 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7250 enum expand_modifier modifier)
7252 rtx op0, op1, op2, temp;
7255 enum machine_mode mode;
7256 enum tree_code code = ops->code;
7258 rtx subtarget, original_target;
7260 bool reduce_bit_field;
7261 location_t loc = ops->location;
7262 tree treeop0, treeop1;
7263 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7264 ? reduce_to_bit_field_precision ((expr), \
7270 mode = TYPE_MODE (type);
7271 unsignedp = TYPE_UNSIGNED (type);
7276 /* We should be called only on simple (binary or unary) expressions,
7277 exactly those that are valid in gimple expressions that aren't
7278 GIMPLE_SINGLE_RHS (or invalid). */
7279 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7280 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7281 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7283 ignore = (target == const0_rtx
7284 || ((CONVERT_EXPR_CODE_P (code)
7285 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7286 && TREE_CODE (type) == VOID_TYPE));
7288 /* We should be called only if we need the result. */
7289 gcc_assert (!ignore);
7291 /* An operation in what may be a bit-field type needs the
7292 result to be reduced to the precision of the bit-field type,
7293 which is narrower than that of the type's mode. */
7294 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7295 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7297 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7300 /* Use subtarget as the target for operand 0 of a binary operation. */
7301 subtarget = get_subtarget (target);
7302 original_target = target;
7306 case NON_LVALUE_EXPR:
7309 if (treeop0 == error_mark_node)
7312 if (TREE_CODE (type) == UNION_TYPE)
7314 tree valtype = TREE_TYPE (treeop0);
7316 /* If both input and output are BLKmode, this conversion isn't doing
7317 anything except possibly changing memory attribute. */
7318 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7320 rtx result = expand_expr (treeop0, target, tmode,
7323 result = copy_rtx (result);
7324 set_mem_attributes (result, type, 0);
7330 if (TYPE_MODE (type) != BLKmode)
7331 target = gen_reg_rtx (TYPE_MODE (type));
7333 target = assign_temp (type, 0, 1, 1);
7337 /* Store data into beginning of memory target. */
7338 store_expr (treeop0,
7339 adjust_address (target, TYPE_MODE (valtype), 0),
7340 modifier == EXPAND_STACK_PARM,
7345 gcc_assert (REG_P (target));
7347 /* Store this field into a union of the proper type. */
7348 store_field (target,
7349 MIN ((int_size_in_bytes (TREE_TYPE
7352 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7353 0, TYPE_MODE (valtype), treeop0,
7357 /* Return the entire union. */
7361 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7363 op0 = expand_expr (treeop0, target, VOIDmode,
7366 /* If the signedness of the conversion differs and OP0 is
7367 a promoted SUBREG, clear that indication since we now
7368 have to do the proper extension. */
7369 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7370 && GET_CODE (op0) == SUBREG)
7371 SUBREG_PROMOTED_VAR_P (op0) = 0;
7373 return REDUCE_BIT_FIELD (op0);
7376 op0 = expand_expr (treeop0, NULL_RTX, mode,
7377 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7378 if (GET_MODE (op0) == mode)
7381 /* If OP0 is a constant, just convert it into the proper mode. */
7382 else if (CONSTANT_P (op0))
7384 tree inner_type = TREE_TYPE (treeop0);
7385 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7387 if (modifier == EXPAND_INITIALIZER)
7388 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7389 subreg_lowpart_offset (mode,
7392 op0= convert_modes (mode, inner_mode, op0,
7393 TYPE_UNSIGNED (inner_type));
7396 else if (modifier == EXPAND_INITIALIZER)
7397 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7399 else if (target == 0)
7400 op0 = convert_to_mode (mode, op0,
7401 TYPE_UNSIGNED (TREE_TYPE
7405 convert_move (target, op0,
7406 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7410 return REDUCE_BIT_FIELD (op0);
7412 case ADDR_SPACE_CONVERT_EXPR:
7414 tree treeop0_type = TREE_TYPE (treeop0);
7416 addr_space_t as_from;
7418 gcc_assert (POINTER_TYPE_P (type));
7419 gcc_assert (POINTER_TYPE_P (treeop0_type));
7421 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7422 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7424 /* Conversions between pointers to the same address space should
7425 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7426 gcc_assert (as_to != as_from);
7428 /* Ask target code to handle conversion between pointers
7429 to overlapping address spaces. */
7430 if (targetm.addr_space.subset_p (as_to, as_from)
7431 || targetm.addr_space.subset_p (as_from, as_to))
7433 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7434 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7439 /* For disjoint address spaces, converting anything but
7440 a null pointer invokes undefined behaviour. We simply
7441 always return a null pointer here. */
7442 return CONST0_RTX (mode);
7445 case POINTER_PLUS_EXPR:
7446 /* Even though the sizetype mode and the pointer's mode can be different
7447 expand is able to handle this correctly and get the correct result out
7448 of the PLUS_EXPR code. */
7449 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7450 if sizetype precision is smaller than pointer precision. */
7451 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7452 treeop1 = fold_convert_loc (loc, type,
7453 fold_convert_loc (loc, ssizetype,
7456 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7457 something else, make sure we add the register to the constant and
7458 then to the other thing. This case can occur during strength
7459 reduction and doing it this way will produce better code if the
7460 frame pointer or argument pointer is eliminated.
7462 fold-const.c will ensure that the constant is always in the inner
7463 PLUS_EXPR, so the only case we need to do anything about is if
7464 sp, ap, or fp is our second argument, in which case we must swap
7465 the innermost first argument and our second argument. */
7467 if (TREE_CODE (treeop0) == PLUS_EXPR
7468 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7469 && TREE_CODE (treeop1) == VAR_DECL
7470 && (DECL_RTL (treeop1) == frame_pointer_rtx
7471 || DECL_RTL (treeop1) == stack_pointer_rtx
7472 || DECL_RTL (treeop1) == arg_pointer_rtx))
7476 treeop1 = TREE_OPERAND (treeop0, 0);
7477 TREE_OPERAND (treeop0, 0) = t;
7480 /* If the result is to be ptr_mode and we are adding an integer to
7481 something, we might be forming a constant. So try to use
7482 plus_constant. If it produces a sum and we can't accept it,
7483 use force_operand. This allows P = &ARR[const] to generate
7484 efficient code on machines where a SYMBOL_REF is not a valid
7487 If this is an EXPAND_SUM call, always return the sum. */
7488 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7489 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7491 if (modifier == EXPAND_STACK_PARM)
7493 if (TREE_CODE (treeop0) == INTEGER_CST
7494 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7495 && TREE_CONSTANT (treeop1))
7499 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7501 /* Use immed_double_const to ensure that the constant is
7502 truncated according to the mode of OP1, then sign extended
7503 to a HOST_WIDE_INT. Using the constant directly can result
7504 in non-canonical RTL in a 64x32 cross compile. */
7506 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7508 TYPE_MODE (TREE_TYPE (treeop1)));
7509 op1 = plus_constant (op1, INTVAL (constant_part));
7510 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7511 op1 = force_operand (op1, target);
7512 return REDUCE_BIT_FIELD (op1);
7515 else if (TREE_CODE (treeop1) == INTEGER_CST
7516 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7517 && TREE_CONSTANT (treeop0))
7521 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7522 (modifier == EXPAND_INITIALIZER
7523 ? EXPAND_INITIALIZER : EXPAND_SUM));
7524 if (! CONSTANT_P (op0))
7526 op1 = expand_expr (treeop1, NULL_RTX,
7527 VOIDmode, modifier);
7528 /* Return a PLUS if modifier says it's OK. */
7529 if (modifier == EXPAND_SUM
7530 || modifier == EXPAND_INITIALIZER)
7531 return simplify_gen_binary (PLUS, mode, op0, op1);
7534 /* Use immed_double_const to ensure that the constant is
7535 truncated according to the mode of OP1, then sign extended
7536 to a HOST_WIDE_INT. Using the constant directly can result
7537 in non-canonical RTL in a 64x32 cross compile. */
7539 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7541 TYPE_MODE (TREE_TYPE (treeop0)));
7542 op0 = plus_constant (op0, INTVAL (constant_part));
7543 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7544 op0 = force_operand (op0, target);
7545 return REDUCE_BIT_FIELD (op0);
7549 /* No sense saving up arithmetic to be done
7550 if it's all in the wrong mode to form part of an address.
7551 And force_operand won't know whether to sign-extend or
7553 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7554 || mode != ptr_mode)
7556 expand_operands (treeop0, treeop1,
7557 subtarget, &op0, &op1, EXPAND_NORMAL);
7558 if (op0 == const0_rtx)
7560 if (op1 == const0_rtx)
7565 expand_operands (treeop0, treeop1,
7566 subtarget, &op0, &op1, modifier);
7567 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7570 /* For initializers, we are allowed to return a MINUS of two
7571 symbolic constants. Here we handle all cases when both operands
7573 /* Handle difference of two symbolic constants,
7574 for the sake of an initializer. */
7575 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7576 && really_constant_p (treeop0)
7577 && really_constant_p (treeop1))
7579 expand_operands (treeop0, treeop1,
7580 NULL_RTX, &op0, &op1, modifier);
7582 /* If the last operand is a CONST_INT, use plus_constant of
7583 the negated constant. Else make the MINUS. */
7584 if (CONST_INT_P (op1))
7585 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7587 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7590 /* No sense saving up arithmetic to be done
7591 if it's all in the wrong mode to form part of an address.
7592 And force_operand won't know whether to sign-extend or
7594 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7595 || mode != ptr_mode)
7598 expand_operands (treeop0, treeop1,
7599 subtarget, &op0, &op1, modifier);
7601 /* Convert A - const to A + (-const). */
7602 if (CONST_INT_P (op1))
7604 op1 = negate_rtx (mode, op1);
7605 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7610 case WIDEN_MULT_PLUS_EXPR:
7611 case WIDEN_MULT_MINUS_EXPR:
7612 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7613 op2 = expand_normal (ops->op2);
7614 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7618 case WIDEN_MULT_EXPR:
7619 /* If first operand is constant, swap them.
7620 Thus the following special case checks need only
7621 check the second operand. */
7622 if (TREE_CODE (treeop0) == INTEGER_CST)
7629 /* First, check if we have a multiplication of one signed and one
7630 unsigned operand. */
7631 if (TREE_CODE (treeop1) != INTEGER_CST
7632 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7633 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7635 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7636 this_optab = usmul_widen_optab;
7637 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7639 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7641 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7642 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7645 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7651 /* Check for a multiplication with matching signedness. */
7652 else if ((TREE_CODE (treeop1) == INTEGER_CST
7653 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7654 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7655 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7657 tree op0type = TREE_TYPE (treeop0);
7658 enum machine_mode innermode = TYPE_MODE (op0type);
7659 bool zextend_p = TYPE_UNSIGNED (op0type);
7660 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7661 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7663 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7665 if (optab_handler (this_optab, mode) != CODE_FOR_nothing)
7667 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7669 temp = expand_widening_mult (mode, op0, op1, target,
7670 unsignedp, this_optab);
7671 return REDUCE_BIT_FIELD (temp);
7673 if (optab_handler (other_optab, mode) != CODE_FOR_nothing
7674 && innermode == word_mode)
7677 op0 = expand_normal (treeop0);
7678 if (TREE_CODE (treeop1) == INTEGER_CST)
7679 op1 = convert_modes (innermode, mode,
7680 expand_normal (treeop1), unsignedp);
7682 op1 = expand_normal (treeop1);
7683 temp = expand_binop (mode, other_optab, op0, op1, target,
7684 unsignedp, OPTAB_LIB_WIDEN);
7685 hipart = gen_highpart (innermode, temp);
7686 htem = expand_mult_highpart_adjust (innermode, hipart,
7690 emit_move_insn (hipart, htem);
7691 return REDUCE_BIT_FIELD (temp);
7695 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7696 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7697 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7698 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7701 /* If this is a fixed-point operation, then we cannot use the code
7702 below because "expand_mult" doesn't support sat/no-sat fixed-point
7704 if (ALL_FIXED_POINT_MODE_P (mode))
7707 /* If first operand is constant, swap them.
7708 Thus the following special case checks need only
7709 check the second operand. */
7710 if (TREE_CODE (treeop0) == INTEGER_CST)
7717 /* Attempt to return something suitable for generating an
7718 indexed address, for machines that support that. */
7720 if (modifier == EXPAND_SUM && mode == ptr_mode
7721 && host_integerp (treeop1, 0))
7723 tree exp1 = treeop1;
7725 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7729 op0 = force_operand (op0, NULL_RTX);
7731 op0 = copy_to_mode_reg (mode, op0);
7733 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7734 gen_int_mode (tree_low_cst (exp1, 0),
7735 TYPE_MODE (TREE_TYPE (exp1)))));
7738 if (modifier == EXPAND_STACK_PARM)
7741 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7742 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7744 case TRUNC_DIV_EXPR:
7745 case FLOOR_DIV_EXPR:
7747 case ROUND_DIV_EXPR:
7748 case EXACT_DIV_EXPR:
7749 /* If this is a fixed-point operation, then we cannot use the code
7750 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7752 if (ALL_FIXED_POINT_MODE_P (mode))
7755 if (modifier == EXPAND_STACK_PARM)
7757 /* Possible optimization: compute the dividend with EXPAND_SUM
7758 then if the divisor is constant can optimize the case
7759 where some terms of the dividend have coeffs divisible by it. */
7760 expand_operands (treeop0, treeop1,
7761 subtarget, &op0, &op1, EXPAND_NORMAL);
7762 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7767 case TRUNC_MOD_EXPR:
7768 case FLOOR_MOD_EXPR:
7770 case ROUND_MOD_EXPR:
7771 if (modifier == EXPAND_STACK_PARM)
7773 expand_operands (treeop0, treeop1,
7774 subtarget, &op0, &op1, EXPAND_NORMAL);
7775 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7777 case FIXED_CONVERT_EXPR:
7778 op0 = expand_normal (treeop0);
7779 if (target == 0 || modifier == EXPAND_STACK_PARM)
7780 target = gen_reg_rtx (mode);
7782 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7783 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7784 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7785 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7787 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7790 case FIX_TRUNC_EXPR:
7791 op0 = expand_normal (treeop0);
7792 if (target == 0 || modifier == EXPAND_STACK_PARM)
7793 target = gen_reg_rtx (mode);
7794 expand_fix (target, op0, unsignedp);
7798 op0 = expand_normal (treeop0);
7799 if (target == 0 || modifier == EXPAND_STACK_PARM)
7800 target = gen_reg_rtx (mode);
7801 /* expand_float can't figure out what to do if FROM has VOIDmode.
7802 So give it the correct mode. With -O, cse will optimize this. */
7803 if (GET_MODE (op0) == VOIDmode)
7804 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7806 expand_float (target, op0,
7807 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7811 op0 = expand_expr (treeop0, subtarget,
7812 VOIDmode, EXPAND_NORMAL);
7813 if (modifier == EXPAND_STACK_PARM)
7815 temp = expand_unop (mode,
7816 optab_for_tree_code (NEGATE_EXPR, type,
7820 return REDUCE_BIT_FIELD (temp);
7823 op0 = expand_expr (treeop0, subtarget,
7824 VOIDmode, EXPAND_NORMAL);
7825 if (modifier == EXPAND_STACK_PARM)
7828 /* ABS_EXPR is not valid for complex arguments. */
7829 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7830 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7832 /* Unsigned abs is simply the operand. Testing here means we don't
7833 risk generating incorrect code below. */
7834 if (TYPE_UNSIGNED (type))
7837 return expand_abs (mode, op0, target, unsignedp,
7838 safe_from_p (target, treeop0, 1));
7842 target = original_target;
7844 || modifier == EXPAND_STACK_PARM
7845 || (MEM_P (target) && MEM_VOLATILE_P (target))
7846 || GET_MODE (target) != mode
7848 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7849 target = gen_reg_rtx (mode);
7850 expand_operands (treeop0, treeop1,
7851 target, &op0, &op1, EXPAND_NORMAL);
7853 /* First try to do it with a special MIN or MAX instruction.
7854 If that does not win, use a conditional jump to select the proper
7856 this_optab = optab_for_tree_code (code, type, optab_default);
7857 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7862 /* At this point, a MEM target is no longer useful; we will get better
7865 if (! REG_P (target))
7866 target = gen_reg_rtx (mode);
7868 /* If op1 was placed in target, swap op0 and op1. */
7869 if (target != op0 && target == op1)
7876 /* We generate better code and avoid problems with op1 mentioning
7877 target by forcing op1 into a pseudo if it isn't a constant. */
7878 if (! CONSTANT_P (op1))
7879 op1 = force_reg (mode, op1);
7882 enum rtx_code comparison_code;
7885 if (code == MAX_EXPR)
7886 comparison_code = unsignedp ? GEU : GE;
7888 comparison_code = unsignedp ? LEU : LE;
7890 /* Canonicalize to comparisons against 0. */
7891 if (op1 == const1_rtx)
7893 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7894 or (a != 0 ? a : 1) for unsigned.
7895 For MIN we are safe converting (a <= 1 ? a : 1)
7896 into (a <= 0 ? a : 1) */
7897 cmpop1 = const0_rtx;
7898 if (code == MAX_EXPR)
7899 comparison_code = unsignedp ? NE : GT;
7901 if (op1 == constm1_rtx && !unsignedp)
7903 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7904 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7905 cmpop1 = const0_rtx;
7906 if (code == MIN_EXPR)
7907 comparison_code = LT;
7909 #ifdef HAVE_conditional_move
7910 /* Use a conditional move if possible. */
7911 if (can_conditionally_move_p (mode))
7915 /* ??? Same problem as in expmed.c: emit_conditional_move
7916 forces a stack adjustment via compare_from_rtx, and we
7917 lose the stack adjustment if the sequence we are about
7918 to create is discarded. */
7919 do_pending_stack_adjust ();
7923 /* Try to emit the conditional move. */
7924 insn = emit_conditional_move (target, comparison_code,
7929 /* If we could do the conditional move, emit the sequence,
7933 rtx seq = get_insns ();
7939 /* Otherwise discard the sequence and fall back to code with
7945 emit_move_insn (target, op0);
7947 temp = gen_label_rtx ();
7948 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7949 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
7952 emit_move_insn (target, op1);
7957 op0 = expand_expr (treeop0, subtarget,
7958 VOIDmode, EXPAND_NORMAL);
7959 if (modifier == EXPAND_STACK_PARM)
7961 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7965 /* ??? Can optimize bitwise operations with one arg constant.
7966 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7967 and (a bitwise1 b) bitwise2 b (etc)
7968 but that is probably not worth while. */
7970 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7971 boolean values when we want in all cases to compute both of them. In
7972 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7973 as actual zero-or-1 values and then bitwise anding. In cases where
7974 there cannot be any side effects, better code would be made by
7975 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7976 how to recognize those cases. */
7978 case TRUTH_AND_EXPR:
7979 code = BIT_AND_EXPR;
7984 code = BIT_IOR_EXPR;
7988 case TRUTH_XOR_EXPR:
7989 code = BIT_XOR_EXPR;
7995 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
7996 || (GET_MODE_PRECISION (TYPE_MODE (type))
7997 == TYPE_PRECISION (type)));
8002 /* If this is a fixed-point operation, then we cannot use the code
8003 below because "expand_shift" doesn't support sat/no-sat fixed-point
8005 if (ALL_FIXED_POINT_MODE_P (mode))
8008 if (! safe_from_p (subtarget, treeop1, 1))
8010 if (modifier == EXPAND_STACK_PARM)
8012 op0 = expand_expr (treeop0, subtarget,
8013 VOIDmode, EXPAND_NORMAL);
8014 temp = expand_shift (code, mode, op0, treeop1, target,
8016 if (code == LSHIFT_EXPR)
8017 temp = REDUCE_BIT_FIELD (temp);
8020 /* Could determine the answer when only additive constants differ. Also,
8021 the addition of one can be handled by changing the condition. */
8028 case UNORDERED_EXPR:
8036 temp = do_store_flag (ops,
8037 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8038 tmode != VOIDmode ? tmode : mode);
8042 /* Use a compare and a jump for BLKmode comparisons, or for function
8043 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8046 || modifier == EXPAND_STACK_PARM
8047 || ! safe_from_p (target, treeop0, 1)
8048 || ! safe_from_p (target, treeop1, 1)
8049 /* Make sure we don't have a hard reg (such as function's return
8050 value) live across basic blocks, if not optimizing. */
8051 || (!optimize && REG_P (target)
8052 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8053 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8055 emit_move_insn (target, const0_rtx);
8057 op1 = gen_label_rtx ();
8058 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8060 emit_move_insn (target, const1_rtx);
8065 case TRUTH_NOT_EXPR:
8066 if (modifier == EXPAND_STACK_PARM)
8068 op0 = expand_expr (treeop0, target,
8069 VOIDmode, EXPAND_NORMAL);
8070 /* The parser is careful to generate TRUTH_NOT_EXPR
8071 only with operands that are always zero or one. */
8072 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8073 target, 1, OPTAB_LIB_WIDEN);
8078 /* Get the rtx code of the operands. */
8079 op0 = expand_normal (treeop0);
8080 op1 = expand_normal (treeop1);
8083 target = gen_reg_rtx (TYPE_MODE (type));
8085 /* Move the real (op0) and imaginary (op1) parts to their location. */
8086 write_complex_part (target, op0, false);
8087 write_complex_part (target, op1, true);
8091 case WIDEN_SUM_EXPR:
8093 tree oprnd0 = treeop0;
8094 tree oprnd1 = treeop1;
8096 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8097 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8102 case REDUC_MAX_EXPR:
8103 case REDUC_MIN_EXPR:
8104 case REDUC_PLUS_EXPR:
8106 op0 = expand_normal (treeop0);
8107 this_optab = optab_for_tree_code (code, type, optab_default);
8108 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8113 case VEC_EXTRACT_EVEN_EXPR:
8114 case VEC_EXTRACT_ODD_EXPR:
8116 expand_operands (treeop0, treeop1,
8117 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8118 this_optab = optab_for_tree_code (code, type, optab_default);
8119 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8125 case VEC_INTERLEAVE_HIGH_EXPR:
8126 case VEC_INTERLEAVE_LOW_EXPR:
8128 expand_operands (treeop0, treeop1,
8129 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8130 this_optab = optab_for_tree_code (code, type, optab_default);
8131 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8137 case VEC_LSHIFT_EXPR:
8138 case VEC_RSHIFT_EXPR:
8140 target = expand_vec_shift_expr (ops, target);
8144 case VEC_UNPACK_HI_EXPR:
8145 case VEC_UNPACK_LO_EXPR:
8147 op0 = expand_normal (treeop0);
8148 this_optab = optab_for_tree_code (code, type, optab_default);
8149 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8155 case VEC_UNPACK_FLOAT_HI_EXPR:
8156 case VEC_UNPACK_FLOAT_LO_EXPR:
8158 op0 = expand_normal (treeop0);
8159 /* The signedness is determined from input operand. */
8160 this_optab = optab_for_tree_code (code,
8161 TREE_TYPE (treeop0),
8163 temp = expand_widen_pattern_expr
8164 (ops, op0, NULL_RTX, NULL_RTX,
8165 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8171 case VEC_WIDEN_MULT_HI_EXPR:
8172 case VEC_WIDEN_MULT_LO_EXPR:
8174 tree oprnd0 = treeop0;
8175 tree oprnd1 = treeop1;
8177 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8178 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8180 gcc_assert (target);
8184 case VEC_PACK_TRUNC_EXPR:
8185 case VEC_PACK_SAT_EXPR:
8186 case VEC_PACK_FIX_TRUNC_EXPR:
8187 mode = TYPE_MODE (TREE_TYPE (treeop0));
8194 /* Here to do an ordinary binary operator. */
8196 expand_operands (treeop0, treeop1,
8197 subtarget, &op0, &op1, EXPAND_NORMAL);
8199 this_optab = optab_for_tree_code (code, type, optab_default);
8201 if (modifier == EXPAND_STACK_PARM)
8203 temp = expand_binop (mode, this_optab, op0, op1, target,
8204 unsignedp, OPTAB_LIB_WIDEN);
8206 return REDUCE_BIT_FIELD (temp);
8208 #undef REDUCE_BIT_FIELD
8211 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8212 enum expand_modifier modifier, rtx *alt_rtl)
8214 rtx op0, op1, temp, decl_rtl;
8217 enum machine_mode mode;
8218 enum tree_code code = TREE_CODE (exp);
8220 rtx subtarget, original_target;
8223 bool reduce_bit_field;
8224 location_t loc = EXPR_LOCATION (exp);
8225 struct separate_ops ops;
8226 tree treeop0, treeop1, treeop2;
8227 tree ssa_name = NULL_TREE;
8230 type = TREE_TYPE (exp);
8231 mode = TYPE_MODE (type);
8232 unsignedp = TYPE_UNSIGNED (type);
8234 treeop0 = treeop1 = treeop2 = NULL_TREE;
8235 if (!VL_EXP_CLASS_P (exp))
8236 switch (TREE_CODE_LENGTH (code))
8239 case 3: treeop2 = TREE_OPERAND (exp, 2);
8240 case 2: treeop1 = TREE_OPERAND (exp, 1);
8241 case 1: treeop0 = TREE_OPERAND (exp, 0);
8251 ignore = (target == const0_rtx
8252 || ((CONVERT_EXPR_CODE_P (code)
8253 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8254 && TREE_CODE (type) == VOID_TYPE));
8256 /* An operation in what may be a bit-field type needs the
8257 result to be reduced to the precision of the bit-field type,
8258 which is narrower than that of the type's mode. */
8259 reduce_bit_field = (!ignore
8260 && TREE_CODE (type) == INTEGER_TYPE
8261 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8263 /* If we are going to ignore this result, we need only do something
8264 if there is a side-effect somewhere in the expression. If there
8265 is, short-circuit the most common cases here. Note that we must
8266 not call expand_expr with anything but const0_rtx in case this
8267 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8271 if (! TREE_SIDE_EFFECTS (exp))
8274 /* Ensure we reference a volatile object even if value is ignored, but
8275 don't do this if all we are doing is taking its address. */
8276 if (TREE_THIS_VOLATILE (exp)
8277 && TREE_CODE (exp) != FUNCTION_DECL
8278 && mode != VOIDmode && mode != BLKmode
8279 && modifier != EXPAND_CONST_ADDRESS)
8281 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8283 temp = copy_to_reg (temp);
8287 if (TREE_CODE_CLASS (code) == tcc_unary
8288 || code == COMPONENT_REF || code == INDIRECT_REF)
8289 return expand_expr (treeop0, const0_rtx, VOIDmode,
8292 else if (TREE_CODE_CLASS (code) == tcc_binary
8293 || TREE_CODE_CLASS (code) == tcc_comparison
8294 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8296 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8297 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8300 else if (code == BIT_FIELD_REF)
8302 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8303 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8304 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8311 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8314 /* Use subtarget as the target for operand 0 of a binary operation. */
8315 subtarget = get_subtarget (target);
8316 original_target = target;
8322 tree function = decl_function_context (exp);
8324 temp = label_rtx (exp);
8325 temp = gen_rtx_LABEL_REF (Pmode, temp);
8327 if (function != current_function_decl
8329 LABEL_REF_NONLOCAL_P (temp) = 1;
8331 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8336 /* ??? ivopts calls expander, without any preparation from
8337 out-of-ssa. So fake instructions as if this was an access to the
8338 base variable. This unnecessarily allocates a pseudo, see how we can
8339 reuse it, if partition base vars have it set already. */
8340 if (!currently_expanding_to_rtl)
8341 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8344 g = get_gimple_for_ssa_name (exp);
8346 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8350 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8351 exp = SSA_NAME_VAR (ssa_name);
8352 goto expand_decl_rtl;
8356 /* If a static var's type was incomplete when the decl was written,
8357 but the type is complete now, lay out the decl now. */
8358 if (DECL_SIZE (exp) == 0
8359 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8360 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8361 layout_decl (exp, 0);
8363 /* ... fall through ... */
8367 decl_rtl = DECL_RTL (exp);
8369 gcc_assert (decl_rtl);
8370 decl_rtl = copy_rtx (decl_rtl);
8371 /* Record writes to register variables. */
8372 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8373 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8375 int i = REGNO (decl_rtl);
8376 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8379 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8385 /* Ensure variable marked as used even if it doesn't go through
8386 a parser. If it hasn't be used yet, write out an external
8388 if (! TREE_USED (exp))
8390 assemble_external (exp);
8391 TREE_USED (exp) = 1;
8394 /* Show we haven't gotten RTL for this yet. */
8397 /* Variables inherited from containing functions should have
8398 been lowered by this point. */
8399 context = decl_function_context (exp);
8400 gcc_assert (!context
8401 || context == current_function_decl
8402 || TREE_STATIC (exp)
8403 /* ??? C++ creates functions that are not TREE_STATIC. */
8404 || TREE_CODE (exp) == FUNCTION_DECL);
8406 /* This is the case of an array whose size is to be determined
8407 from its initializer, while the initializer is still being parsed.
8410 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8411 temp = validize_mem (decl_rtl);
8413 /* If DECL_RTL is memory, we are in the normal case and the
8414 address is not valid, get the address into a register. */
8416 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8419 *alt_rtl = decl_rtl;
8420 decl_rtl = use_anchored_address (decl_rtl);
8421 if (modifier != EXPAND_CONST_ADDRESS
8422 && modifier != EXPAND_SUM
8423 && !memory_address_addr_space_p (DECL_MODE (exp),
8425 MEM_ADDR_SPACE (decl_rtl)))
8426 temp = replace_equiv_address (decl_rtl,
8427 copy_rtx (XEXP (decl_rtl, 0)));
8430 /* If we got something, return it. But first, set the alignment
8431 if the address is a register. */
8434 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8435 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8440 /* If the mode of DECL_RTL does not match that of the decl, it
8441 must be a promoted value. We return a SUBREG of the wanted mode,
8442 but mark it so that we know that it was already extended. */
8443 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8445 enum machine_mode pmode;
8447 /* Get the signedness to be used for this variable. Ensure we get
8448 the same mode we got when the variable was declared. */
8449 if (code == SSA_NAME
8450 && (g = SSA_NAME_DEF_STMT (ssa_name))
8451 && gimple_code (g) == GIMPLE_CALL)
8452 pmode = promote_function_mode (type, mode, &unsignedp,
8454 (TREE_TYPE (gimple_call_fn (g))),
8457 pmode = promote_decl_mode (exp, &unsignedp);
8458 gcc_assert (GET_MODE (decl_rtl) == pmode);
8460 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8461 SUBREG_PROMOTED_VAR_P (temp) = 1;
8462 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8469 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8470 TREE_INT_CST_HIGH (exp), mode);
8476 tree tmp = NULL_TREE;
8477 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8478 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8479 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8480 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8481 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8482 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8483 return const_vector_from_tree (exp);
8484 if (GET_MODE_CLASS (mode) == MODE_INT)
8486 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8488 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8491 tmp = build_constructor_from_list (type,
8492 TREE_VECTOR_CST_ELTS (exp));
8493 return expand_expr (tmp, ignore ? const0_rtx : target,
8498 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8501 /* If optimized, generate immediate CONST_DOUBLE
8502 which will be turned into memory by reload if necessary.
8504 We used to force a register so that loop.c could see it. But
8505 this does not allow gen_* patterns to perform optimizations with
8506 the constants. It also produces two insns in cases like "x = 1.0;".
8507 On most machines, floating-point constants are not permitted in
8508 many insns, so we'd end up copying it to a register in any case.
8510 Now, we do the copying in expand_binop, if appropriate. */
8511 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8512 TYPE_MODE (TREE_TYPE (exp)));
8515 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8516 TYPE_MODE (TREE_TYPE (exp)));
8519 /* Handle evaluating a complex constant in a CONCAT target. */
8520 if (original_target && GET_CODE (original_target) == CONCAT)
8522 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8525 rtarg = XEXP (original_target, 0);
8526 itarg = XEXP (original_target, 1);
8528 /* Move the real and imaginary parts separately. */
8529 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8530 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8533 emit_move_insn (rtarg, op0);
8535 emit_move_insn (itarg, op1);
8537 return original_target;
8540 /* ... fall through ... */
8543 temp = expand_expr_constant (exp, 1, modifier);
8545 /* temp contains a constant address.
8546 On RISC machines where a constant address isn't valid,
8547 make some insns to get that address into a register. */
8548 if (modifier != EXPAND_CONST_ADDRESS
8549 && modifier != EXPAND_INITIALIZER
8550 && modifier != EXPAND_SUM
8551 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8552 MEM_ADDR_SPACE (temp)))
8553 return replace_equiv_address (temp,
8554 copy_rtx (XEXP (temp, 0)));
8560 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8562 if (!SAVE_EXPR_RESOLVED_P (exp))
8564 /* We can indeed still hit this case, typically via builtin
8565 expanders calling save_expr immediately before expanding
8566 something. Assume this means that we only have to deal
8567 with non-BLKmode values. */
8568 gcc_assert (GET_MODE (ret) != BLKmode);
8570 val = build_decl (EXPR_LOCATION (exp),
8571 VAR_DECL, NULL, TREE_TYPE (exp));
8572 DECL_ARTIFICIAL (val) = 1;
8573 DECL_IGNORED_P (val) = 1;
8575 TREE_OPERAND (exp, 0) = treeop0;
8576 SAVE_EXPR_RESOLVED_P (exp) = 1;
8578 if (!CONSTANT_P (ret))
8579 ret = copy_to_reg (ret);
8580 SET_DECL_RTL (val, ret);
8588 /* If we don't need the result, just ensure we evaluate any
8592 unsigned HOST_WIDE_INT idx;
8595 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8596 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8601 return expand_constructor (exp, target, modifier, false);
8603 case MISALIGNED_INDIRECT_REF:
8606 tree exp1 = treeop0;
8607 addr_space_t as = ADDR_SPACE_GENERIC;
8609 if (modifier != EXPAND_WRITE)
8613 t = fold_read_from_constant_string (exp);
8615 return expand_expr (t, target, tmode, modifier);
8618 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8619 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8621 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8622 op0 = memory_address_addr_space (mode, op0, as);
8624 temp = gen_rtx_MEM (mode, op0);
8626 set_mem_attributes (temp, exp, 0);
8627 set_mem_addr_space (temp, as);
8629 /* Resolve the misalignment now, so that we don't have to remember
8630 to resolve it later. Of course, this only works for reads. */
8631 if (code == MISALIGNED_INDIRECT_REF)
8636 gcc_assert (modifier == EXPAND_NORMAL
8637 || modifier == EXPAND_STACK_PARM);
8639 /* The vectorizer should have already checked the mode. */
8640 icode = optab_handler (movmisalign_optab, mode);
8641 gcc_assert (icode != CODE_FOR_nothing);
8643 /* We've already validated the memory, and we're creating a
8644 new pseudo destination. The predicates really can't fail. */
8645 reg = gen_reg_rtx (mode);
8647 /* Nor can the insn generator. */
8648 insn = GEN_FCN (icode) (reg, temp);
8657 case TARGET_MEM_REF:
8659 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8660 struct mem_address addr;
8663 get_address_description (exp, &addr);
8664 op0 = addr_for_mem_ref (&addr, as, true);
8665 op0 = memory_address_addr_space (mode, op0, as);
8666 temp = gen_rtx_MEM (mode, op0);
8667 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8668 set_mem_addr_space (temp, as);
8669 base = get_base_address (TMR_ORIGINAL (exp));
8671 && (INDIRECT_REF_P (base) || TREE_CODE (base) == MEM_REF)
8673 && TREE_CODE (TMR_BASE (exp)) == SSA_NAME
8674 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
8676 set_mem_expr (temp, build1 (INDIRECT_REF,
8677 TREE_TYPE (exp), TMR_BASE (exp)));
8678 set_mem_offset (temp, NULL_RTX);
8686 = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8687 enum machine_mode address_mode;
8688 tree base = TREE_OPERAND (exp, 0);
8690 /* Handle expansion of non-aliased memory with non-BLKmode. That
8691 might end up in a register. */
8692 if (TREE_CODE (base) == ADDR_EXPR)
8694 HOST_WIDE_INT offset = mem_ref_offset (exp).low;
8696 base = TREE_OPERAND (base, 0);
8700 base = get_addr_base_and_unit_offset (base, &off);
8704 /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
8705 decl we must use bitfield operations. */
8707 && !TREE_ADDRESSABLE (base)
8708 && DECL_MODE (base) != BLKmode
8709 && DECL_RTL_SET_P (base)
8710 && !MEM_P (DECL_RTL (base)))
8714 && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
8715 && (GET_MODE_BITSIZE (DECL_MODE (base))
8716 == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
8717 return expand_expr (build1 (VIEW_CONVERT_EXPR,
8718 TREE_TYPE (exp), base),
8719 target, tmode, modifier);
8720 bit_offset = bitsize_int (offset * BITS_PER_UNIT);
8721 bftype = TREE_TYPE (base);
8722 if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
8723 bftype = TREE_TYPE (exp);
8724 return expand_expr (build3 (BIT_FIELD_REF, bftype,
8726 TYPE_SIZE (TREE_TYPE (exp)),
8728 target, tmode, modifier);
8731 address_mode = targetm.addr_space.address_mode (as);
8732 base = TREE_OPERAND (exp, 0);
8733 if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
8734 base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
8735 gimple_assign_rhs1 (def_stmt),
8736 gimple_assign_rhs2 (def_stmt));
8737 op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8738 op0 = convert_memory_address_addr_space (address_mode, op0, as);
8739 if (!integer_zerop (TREE_OPERAND (exp, 1)))
8742 = immed_double_int_const (mem_ref_offset (exp), address_mode);
8743 op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
8745 op0 = memory_address_addr_space (mode, op0, as);
8746 temp = gen_rtx_MEM (mode, op0);
8747 set_mem_attributes (temp, exp, 0);
8748 set_mem_addr_space (temp, as);
8749 if (TREE_THIS_VOLATILE (exp))
8750 MEM_VOLATILE_P (temp) = 1;
8757 tree array = treeop0;
8758 tree index = treeop1;
8760 /* Fold an expression like: "foo"[2].
8761 This is not done in fold so it won't happen inside &.
8762 Don't fold if this is for wide characters since it's too
8763 difficult to do correctly and this is a very rare case. */
8765 if (modifier != EXPAND_CONST_ADDRESS
8766 && modifier != EXPAND_INITIALIZER
8767 && modifier != EXPAND_MEMORY)
8769 tree t = fold_read_from_constant_string (exp);
8772 return expand_expr (t, target, tmode, modifier);
8775 /* If this is a constant index into a constant array,
8776 just get the value from the array. Handle both the cases when
8777 we have an explicit constructor and when our operand is a variable
8778 that was declared const. */
8780 if (modifier != EXPAND_CONST_ADDRESS
8781 && modifier != EXPAND_INITIALIZER
8782 && modifier != EXPAND_MEMORY
8783 && TREE_CODE (array) == CONSTRUCTOR
8784 && ! TREE_SIDE_EFFECTS (array)
8785 && TREE_CODE (index) == INTEGER_CST)
8787 unsigned HOST_WIDE_INT ix;
8790 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8792 if (tree_int_cst_equal (field, index))
8794 if (!TREE_SIDE_EFFECTS (value))
8795 return expand_expr (fold (value), target, tmode, modifier);
8800 else if (optimize >= 1
8801 && modifier != EXPAND_CONST_ADDRESS
8802 && modifier != EXPAND_INITIALIZER
8803 && modifier != EXPAND_MEMORY
8804 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8805 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8806 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8807 && targetm.binds_local_p (array))
8809 if (TREE_CODE (index) == INTEGER_CST)
8811 tree init = DECL_INITIAL (array);
8813 if (TREE_CODE (init) == CONSTRUCTOR)
8815 unsigned HOST_WIDE_INT ix;
8818 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8820 if (tree_int_cst_equal (field, index))
8822 if (TREE_SIDE_EFFECTS (value))
8825 if (TREE_CODE (value) == CONSTRUCTOR)
8827 /* If VALUE is a CONSTRUCTOR, this
8828 optimization is only useful if
8829 this doesn't store the CONSTRUCTOR
8830 into memory. If it does, it is more
8831 efficient to just load the data from
8832 the array directly. */
8833 rtx ret = expand_constructor (value, target,
8835 if (ret == NULL_RTX)
8839 return expand_expr (fold (value), target, tmode,
8843 else if(TREE_CODE (init) == STRING_CST)
8845 tree index1 = index;
8846 tree low_bound = array_ref_low_bound (exp);
8847 index1 = fold_convert_loc (loc, sizetype,
8850 /* Optimize the special-case of a zero lower bound.
8852 We convert the low_bound to sizetype to avoid some problems
8853 with constant folding. (E.g. suppose the lower bound is 1,
8854 and its mode is QI. Without the conversion,l (ARRAY
8855 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8856 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8858 if (! integer_zerop (low_bound))
8859 index1 = size_diffop_loc (loc, index1,
8860 fold_convert_loc (loc, sizetype,
8863 if (0 > compare_tree_int (index1,
8864 TREE_STRING_LENGTH (init)))
8866 tree type = TREE_TYPE (TREE_TYPE (init));
8867 enum machine_mode mode = TYPE_MODE (type);
8869 if (GET_MODE_CLASS (mode) == MODE_INT
8870 && GET_MODE_SIZE (mode) == 1)
8871 return gen_int_mode (TREE_STRING_POINTER (init)
8872 [TREE_INT_CST_LOW (index1)],
8879 goto normal_inner_ref;
8882 /* If the operand is a CONSTRUCTOR, we can just extract the
8883 appropriate field if it is present. */
8884 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8886 unsigned HOST_WIDE_INT idx;
8889 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8891 if (field == treeop1
8892 /* We can normally use the value of the field in the
8893 CONSTRUCTOR. However, if this is a bitfield in
8894 an integral mode that we can fit in a HOST_WIDE_INT,
8895 we must mask only the number of bits in the bitfield,
8896 since this is done implicitly by the constructor. If
8897 the bitfield does not meet either of those conditions,
8898 we can't do this optimization. */
8899 && (! DECL_BIT_FIELD (field)
8900 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8901 && (GET_MODE_BITSIZE (DECL_MODE (field))
8902 <= HOST_BITS_PER_WIDE_INT))))
8904 if (DECL_BIT_FIELD (field)
8905 && modifier == EXPAND_STACK_PARM)
8907 op0 = expand_expr (value, target, tmode, modifier);
8908 if (DECL_BIT_FIELD (field))
8910 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8911 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8913 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8915 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8916 op0 = expand_and (imode, op0, op1, target);
8921 = build_int_cst (NULL_TREE,
8922 GET_MODE_BITSIZE (imode) - bitsize);
8924 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8926 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8934 goto normal_inner_ref;
8937 case ARRAY_RANGE_REF:
8940 enum machine_mode mode1, mode2;
8941 HOST_WIDE_INT bitsize, bitpos;
8943 int volatilep = 0, must_force_mem;
8944 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8945 &mode1, &unsignedp, &volatilep, true);
8946 rtx orig_op0, memloc;
8948 /* If we got back the original object, something is wrong. Perhaps
8949 we are evaluating an expression too early. In any event, don't
8950 infinitely recurse. */
8951 gcc_assert (tem != exp);
8953 /* If TEM's type is a union of variable size, pass TARGET to the inner
8954 computation, since it will need a temporary and TARGET is known
8955 to have to do. This occurs in unchecked conversion in Ada. */
8958 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8959 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8961 && modifier != EXPAND_STACK_PARM
8962 ? target : NULL_RTX),
8964 (modifier == EXPAND_INITIALIZER
8965 || modifier == EXPAND_CONST_ADDRESS
8966 || modifier == EXPAND_STACK_PARM)
8967 ? modifier : EXPAND_NORMAL);
8970 /* If the bitfield is volatile, we want to access it in the
8971 field's mode, not the computed mode. */
8973 && GET_CODE (op0) == MEM
8974 && flag_strict_volatile_bitfields > 0)
8975 op0 = adjust_address (op0, mode1, 0);
8978 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8980 /* If we have either an offset, a BLKmode result, or a reference
8981 outside the underlying object, we must force it to memory.
8982 Such a case can occur in Ada if we have unchecked conversion
8983 of an expression from a scalar type to an aggregate type or
8984 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8985 passed a partially uninitialized object or a view-conversion
8986 to a larger size. */
8987 must_force_mem = (offset
8989 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8991 /* Handle CONCAT first. */
8992 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8995 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8998 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9001 op0 = XEXP (op0, 0);
9002 mode2 = GET_MODE (op0);
9004 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9005 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9009 op0 = XEXP (op0, 1);
9011 mode2 = GET_MODE (op0);
9014 /* Otherwise force into memory. */
9018 /* If this is a constant, put it in a register if it is a legitimate
9019 constant and we don't need a memory reference. */
9020 if (CONSTANT_P (op0)
9022 && LEGITIMATE_CONSTANT_P (op0)
9024 op0 = force_reg (mode2, op0);
9026 /* Otherwise, if this is a constant, try to force it to the constant
9027 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
9028 is a legitimate constant. */
9029 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9030 op0 = validize_mem (memloc);
9032 /* Otherwise, if this is a constant or the object is not in memory
9033 and need be, put it there. */
9034 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9036 tree nt = build_qualified_type (TREE_TYPE (tem),
9037 (TYPE_QUALS (TREE_TYPE (tem))
9038 | TYPE_QUAL_CONST));
9039 memloc = assign_temp (nt, 1, 1, 1);
9040 emit_move_insn (memloc, op0);
9046 enum machine_mode address_mode;
9047 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9050 gcc_assert (MEM_P (op0));
9053 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9054 if (GET_MODE (offset_rtx) != address_mode)
9055 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9057 if (GET_MODE (op0) == BLKmode
9058 /* A constant address in OP0 can have VOIDmode, we must
9059 not try to call force_reg in that case. */
9060 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9062 && (bitpos % bitsize) == 0
9063 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9064 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9066 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9070 op0 = offset_address (op0, offset_rtx,
9071 highest_pow2_factor (offset));
9074 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9075 record its alignment as BIGGEST_ALIGNMENT. */
9076 if (MEM_P (op0) && bitpos == 0 && offset != 0
9077 && is_aligning_offset (offset, tem))
9078 set_mem_align (op0, BIGGEST_ALIGNMENT);
9080 /* Don't forget about volatility even if this is a bitfield. */
9081 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9083 if (op0 == orig_op0)
9084 op0 = copy_rtx (op0);
9086 MEM_VOLATILE_P (op0) = 1;
9089 /* In cases where an aligned union has an unaligned object
9090 as a field, we might be extracting a BLKmode value from
9091 an integer-mode (e.g., SImode) object. Handle this case
9092 by doing the extract into an object as wide as the field
9093 (which we know to be the width of a basic mode), then
9094 storing into memory, and changing the mode to BLKmode. */
9095 if (mode1 == VOIDmode
9096 || REG_P (op0) || GET_CODE (op0) == SUBREG
9097 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9098 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9099 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9100 && modifier != EXPAND_CONST_ADDRESS
9101 && modifier != EXPAND_INITIALIZER)
9102 /* If the field is volatile, we always want an aligned
9104 || (volatilep && flag_strict_volatile_bitfields > 0)
9105 /* If the field isn't aligned enough to fetch as a memref,
9106 fetch it as a bit field. */
9107 || (mode1 != BLKmode
9108 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9109 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9111 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9112 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9113 && ((modifier == EXPAND_CONST_ADDRESS
9114 || modifier == EXPAND_INITIALIZER)
9116 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9117 || (bitpos % BITS_PER_UNIT != 0)))
9118 /* If the type and the field are a constant size and the
9119 size of the type isn't the same size as the bitfield,
9120 we must use bitfield operations. */
9122 && TYPE_SIZE (TREE_TYPE (exp))
9123 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9124 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9127 enum machine_mode ext_mode = mode;
9129 if (ext_mode == BLKmode
9130 && ! (target != 0 && MEM_P (op0)
9132 && bitpos % BITS_PER_UNIT == 0))
9133 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9135 if (ext_mode == BLKmode)
9138 target = assign_temp (type, 0, 1, 1);
9143 /* In this case, BITPOS must start at a byte boundary and
9144 TARGET, if specified, must be a MEM. */
9145 gcc_assert (MEM_P (op0)
9146 && (!target || MEM_P (target))
9147 && !(bitpos % BITS_PER_UNIT));
9149 emit_block_move (target,
9150 adjust_address (op0, VOIDmode,
9151 bitpos / BITS_PER_UNIT),
9152 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9154 (modifier == EXPAND_STACK_PARM
9155 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9160 op0 = validize_mem (op0);
9162 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9163 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9165 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9166 (modifier == EXPAND_STACK_PARM
9167 ? NULL_RTX : target),
9168 ext_mode, ext_mode);
9170 /* If the result is a record type and BITSIZE is narrower than
9171 the mode of OP0, an integral mode, and this is a big endian
9172 machine, we must put the field into the high-order bits. */
9173 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9174 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9175 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9176 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9177 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9181 /* If the result type is BLKmode, store the data into a temporary
9182 of the appropriate type, but with the mode corresponding to the
9183 mode for the data we have (op0's mode). It's tempting to make
9184 this a constant type, since we know it's only being stored once,
9185 but that can cause problems if we are taking the address of this
9186 COMPONENT_REF because the MEM of any reference via that address
9187 will have flags corresponding to the type, which will not
9188 necessarily be constant. */
9189 if (mode == BLKmode)
9191 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9194 /* If the reference doesn't use the alias set of its type,
9195 we cannot create the temporary using that type. */
9196 if (component_uses_parent_alias_set (exp))
9198 new_rtx = assign_stack_local (ext_mode, size, 0);
9199 set_mem_alias_set (new_rtx, get_alias_set (exp));
9202 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9204 emit_move_insn (new_rtx, op0);
9205 op0 = copy_rtx (new_rtx);
9206 PUT_MODE (op0, BLKmode);
9207 set_mem_attributes (op0, exp, 1);
9213 /* If the result is BLKmode, use that to access the object
9215 if (mode == BLKmode)
9218 /* Get a reference to just this component. */
9219 if (modifier == EXPAND_CONST_ADDRESS
9220 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9221 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9223 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9225 if (op0 == orig_op0)
9226 op0 = copy_rtx (op0);
9228 set_mem_attributes (op0, exp, 0);
9229 if (REG_P (XEXP (op0, 0)))
9230 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9232 MEM_VOLATILE_P (op0) |= volatilep;
9233 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9234 || modifier == EXPAND_CONST_ADDRESS
9235 || modifier == EXPAND_INITIALIZER)
9237 else if (target == 0)
9238 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9240 convert_move (target, op0, unsignedp);
9245 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9248 /* All valid uses of __builtin_va_arg_pack () are removed during
9250 if (CALL_EXPR_VA_ARG_PACK (exp))
9251 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9253 tree fndecl = get_callee_fndecl (exp), attr;
9256 && (attr = lookup_attribute ("error",
9257 DECL_ATTRIBUTES (fndecl))) != NULL)
9258 error ("%Kcall to %qs declared with attribute error: %s",
9259 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9260 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9262 && (attr = lookup_attribute ("warning",
9263 DECL_ATTRIBUTES (fndecl))) != NULL)
9264 warning_at (tree_nonartificial_location (exp),
9265 0, "%Kcall to %qs declared with attribute warning: %s",
9266 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9267 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9269 /* Check for a built-in function. */
9270 if (fndecl && DECL_BUILT_IN (fndecl))
9272 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9273 return expand_builtin (exp, target, subtarget, tmode, ignore);
9276 return expand_call (exp, target, ignore);
9278 case VIEW_CONVERT_EXPR:
9281 /* If we are converting to BLKmode, try to avoid an intermediate
9282 temporary by fetching an inner memory reference. */
9284 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9285 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9286 && handled_component_p (treeop0))
9288 enum machine_mode mode1;
9289 HOST_WIDE_INT bitsize, bitpos;
9294 = get_inner_reference (treeop0, &bitsize, &bitpos,
9295 &offset, &mode1, &unsignedp, &volatilep,
9299 /* ??? We should work harder and deal with non-zero offsets. */
9301 && (bitpos % BITS_PER_UNIT) == 0
9303 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9305 /* See the normal_inner_ref case for the rationale. */
9308 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9309 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9311 && modifier != EXPAND_STACK_PARM
9312 ? target : NULL_RTX),
9314 (modifier == EXPAND_INITIALIZER
9315 || modifier == EXPAND_CONST_ADDRESS
9316 || modifier == EXPAND_STACK_PARM)
9317 ? modifier : EXPAND_NORMAL);
9319 if (MEM_P (orig_op0))
9323 /* Get a reference to just this component. */
9324 if (modifier == EXPAND_CONST_ADDRESS
9325 || modifier == EXPAND_SUM
9326 || modifier == EXPAND_INITIALIZER)
9327 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9329 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9331 if (op0 == orig_op0)
9332 op0 = copy_rtx (op0);
9334 set_mem_attributes (op0, treeop0, 0);
9335 if (REG_P (XEXP (op0, 0)))
9336 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9338 MEM_VOLATILE_P (op0) |= volatilep;
9344 op0 = expand_expr (treeop0,
9345 NULL_RTX, VOIDmode, modifier);
9347 /* If the input and output modes are both the same, we are done. */
9348 if (mode == GET_MODE (op0))
9350 /* If neither mode is BLKmode, and both modes are the same size
9351 then we can use gen_lowpart. */
9352 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9353 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9354 && !COMPLEX_MODE_P (GET_MODE (op0)))
9356 if (GET_CODE (op0) == SUBREG)
9357 op0 = force_reg (GET_MODE (op0), op0);
9358 op0 = gen_lowpart (mode, op0);
9360 /* If both types are integral, convert from one mode to the other. */
9361 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9362 op0 = convert_modes (mode, GET_MODE (op0), op0,
9363 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9364 /* As a last resort, spill op0 to memory, and reload it in a
9366 else if (!MEM_P (op0))
9368 /* If the operand is not a MEM, force it into memory. Since we
9369 are going to be changing the mode of the MEM, don't call
9370 force_const_mem for constants because we don't allow pool
9371 constants to change mode. */
9372 tree inner_type = TREE_TYPE (treeop0);
9374 gcc_assert (!TREE_ADDRESSABLE (exp));
9376 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9378 = assign_stack_temp_for_type
9379 (TYPE_MODE (inner_type),
9380 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9382 emit_move_insn (target, op0);
9386 /* At this point, OP0 is in the correct mode. If the output type is
9387 such that the operand is known to be aligned, indicate that it is.
9388 Otherwise, we need only be concerned about alignment for non-BLKmode
9392 op0 = copy_rtx (op0);
9394 if (TYPE_ALIGN_OK (type))
9395 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9396 else if (STRICT_ALIGNMENT
9398 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9400 tree inner_type = TREE_TYPE (treeop0);
9401 HOST_WIDE_INT temp_size
9402 = MAX (int_size_in_bytes (inner_type),
9403 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9405 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9406 rtx new_with_op0_mode
9407 = adjust_address (new_rtx, GET_MODE (op0), 0);
9409 gcc_assert (!TREE_ADDRESSABLE (exp));
9411 if (GET_MODE (op0) == BLKmode)
9412 emit_block_move (new_with_op0_mode, op0,
9413 GEN_INT (GET_MODE_SIZE (mode)),
9414 (modifier == EXPAND_STACK_PARM
9415 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9417 emit_move_insn (new_with_op0_mode, op0);
9422 op0 = adjust_address (op0, mode, 0);
9427 /* Use a compare and a jump for BLKmode comparisons, or for function
9428 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9430 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9431 are occassionally created by folding during expansion. */
9432 case TRUTH_ANDIF_EXPR:
9433 case TRUTH_ORIF_EXPR:
9436 || modifier == EXPAND_STACK_PARM
9437 || ! safe_from_p (target, treeop0, 1)
9438 || ! safe_from_p (target, treeop1, 1)
9439 /* Make sure we don't have a hard reg (such as function's return
9440 value) live across basic blocks, if not optimizing. */
9441 || (!optimize && REG_P (target)
9442 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9443 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9446 emit_move_insn (target, const0_rtx);
9448 op1 = gen_label_rtx ();
9449 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9452 emit_move_insn (target, const1_rtx);
9455 return ignore ? const0_rtx : target;
9457 case STATEMENT_LIST:
9459 tree_stmt_iterator iter;
9461 gcc_assert (ignore);
9463 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9464 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9469 /* A COND_EXPR with its type being VOID_TYPE represents a
9470 conditional jump and is handled in
9471 expand_gimple_cond_expr. */
9472 gcc_assert (!VOID_TYPE_P (type));
9474 /* Note that COND_EXPRs whose type is a structure or union
9475 are required to be constructed to contain assignments of
9476 a temporary variable, so that we can evaluate them here
9477 for side effect only. If type is void, we must do likewise. */
9479 gcc_assert (!TREE_ADDRESSABLE (type)
9481 && TREE_TYPE (treeop1) != void_type_node
9482 && TREE_TYPE (treeop2) != void_type_node);
9484 /* If we are not to produce a result, we have no target. Otherwise,
9485 if a target was specified use it; it will not be used as an
9486 intermediate target unless it is safe. If no target, use a
9489 if (modifier != EXPAND_STACK_PARM
9491 && safe_from_p (original_target, treeop0, 1)
9492 && GET_MODE (original_target) == mode
9493 #ifdef HAVE_conditional_move
9494 && (! can_conditionally_move_p (mode)
9495 || REG_P (original_target))
9497 && !MEM_P (original_target))
9498 temp = original_target;
9500 temp = assign_temp (type, 0, 0, 1);
9502 do_pending_stack_adjust ();
9504 op0 = gen_label_rtx ();
9505 op1 = gen_label_rtx ();
9506 jumpifnot (treeop0, op0, -1);
9507 store_expr (treeop1, temp,
9508 modifier == EXPAND_STACK_PARM,
9511 emit_jump_insn (gen_jump (op1));
9514 store_expr (treeop2, temp,
9515 modifier == EXPAND_STACK_PARM,
9523 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9530 gcc_assert (ignore);
9532 /* Check for |= or &= of a bitfield of size one into another bitfield
9533 of size 1. In this case, (unless we need the result of the
9534 assignment) we can do this more efficiently with a
9535 test followed by an assignment, if necessary.
9537 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9538 things change so we do, this code should be enhanced to
9540 if (TREE_CODE (lhs) == COMPONENT_REF
9541 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9542 || TREE_CODE (rhs) == BIT_AND_EXPR)
9543 && TREE_OPERAND (rhs, 0) == lhs
9544 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9545 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9546 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9548 rtx label = gen_label_rtx ();
9549 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9550 do_jump (TREE_OPERAND (rhs, 1),
9552 value ? 0 : label, -1);
9553 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9554 MOVE_NONTEMPORAL (exp));
9555 do_pending_stack_adjust ();
9560 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9565 return expand_expr_addr_expr (exp, target, tmode, modifier);
9568 op0 = expand_normal (treeop0);
9569 return read_complex_part (op0, false);
9572 op0 = expand_normal (treeop0);
9573 return read_complex_part (op0, true);
9580 /* Expanded in cfgexpand.c. */
9583 case TRY_CATCH_EXPR:
9585 case EH_FILTER_EXPR:
9586 case TRY_FINALLY_EXPR:
9587 /* Lowered by tree-eh.c. */
9590 case WITH_CLEANUP_EXPR:
9591 case CLEANUP_POINT_EXPR:
9593 case CASE_LABEL_EXPR:
9599 case PREINCREMENT_EXPR:
9600 case PREDECREMENT_EXPR:
9601 case POSTINCREMENT_EXPR:
9602 case POSTDECREMENT_EXPR:
9605 /* Lowered by gimplify.c. */
9609 /* Function descriptors are not valid except for as
9610 initialization constants, and should not be expanded. */
9613 case WITH_SIZE_EXPR:
9614 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9615 have pulled out the size to use in whatever context it needed. */
9616 return expand_expr_real (treeop0, original_target, tmode,
9619 case REALIGN_LOAD_EXPR:
9621 tree oprnd0 = treeop0;
9622 tree oprnd1 = treeop1;
9623 tree oprnd2 = treeop2;
9626 this_optab = optab_for_tree_code (code, type, optab_default);
9627 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9628 op2 = expand_normal (oprnd2);
9629 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9637 tree oprnd0 = treeop0;
9638 tree oprnd1 = treeop1;
9639 tree oprnd2 = treeop2;
9642 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9643 op2 = expand_normal (oprnd2);
9644 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9649 case COMPOUND_LITERAL_EXPR:
9651 /* Initialize the anonymous variable declared in the compound
9652 literal, then return the variable. */
9653 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9655 /* Create RTL for this variable. */
9656 if (!DECL_RTL_SET_P (decl))
9658 if (DECL_HARD_REGISTER (decl))
9659 /* The user specified an assembler name for this variable.
9661 rest_of_decl_compilation (decl, 0, 0);
9666 return expand_expr_real (decl, original_target, tmode,
9671 return expand_expr_real_2 (&ops, target, tmode, modifier);
9675 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9676 signedness of TYPE), possibly returning the result in TARGET. */
9678 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9680 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9681 if (target && GET_MODE (target) != GET_MODE (exp))
9683 /* For constant values, reduce using build_int_cst_type. */
9684 if (CONST_INT_P (exp))
9686 HOST_WIDE_INT value = INTVAL (exp);
9687 tree t = build_int_cst_type (type, value);
9688 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9690 else if (TYPE_UNSIGNED (type))
9692 rtx mask = immed_double_int_const (double_int_mask (prec),
9694 return expand_and (GET_MODE (exp), exp, mask, target);
9698 tree count = build_int_cst (NULL_TREE,
9699 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9700 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9701 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9705 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9706 when applied to the address of EXP produces an address known to be
9707 aligned more than BIGGEST_ALIGNMENT. */
9710 is_aligning_offset (const_tree offset, const_tree exp)
9712 /* Strip off any conversions. */
9713 while (CONVERT_EXPR_P (offset))
9714 offset = TREE_OPERAND (offset, 0);
9716 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9717 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9718 if (TREE_CODE (offset) != BIT_AND_EXPR
9719 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9720 || compare_tree_int (TREE_OPERAND (offset, 1),
9721 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9722 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9725 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9726 It must be NEGATE_EXPR. Then strip any more conversions. */
9727 offset = TREE_OPERAND (offset, 0);
9728 while (CONVERT_EXPR_P (offset))
9729 offset = TREE_OPERAND (offset, 0);
9731 if (TREE_CODE (offset) != NEGATE_EXPR)
9734 offset = TREE_OPERAND (offset, 0);
9735 while (CONVERT_EXPR_P (offset))
9736 offset = TREE_OPERAND (offset, 0);
9738 /* This must now be the address of EXP. */
9739 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9742 /* Return the tree node if an ARG corresponds to a string constant or zero
9743 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9744 in bytes within the string that ARG is accessing. The type of the
9745 offset will be `sizetype'. */
9748 string_constant (tree arg, tree *ptr_offset)
9750 tree array, offset, lower_bound;
9753 if (TREE_CODE (arg) == ADDR_EXPR)
9755 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9757 *ptr_offset = size_zero_node;
9758 return TREE_OPERAND (arg, 0);
9760 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9762 array = TREE_OPERAND (arg, 0);
9763 offset = size_zero_node;
9765 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9767 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9768 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9769 if (TREE_CODE (array) != STRING_CST
9770 && TREE_CODE (array) != VAR_DECL)
9773 /* Check if the array has a nonzero lower bound. */
9774 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9775 if (!integer_zerop (lower_bound))
9777 /* If the offset and base aren't both constants, return 0. */
9778 if (TREE_CODE (lower_bound) != INTEGER_CST)
9780 if (TREE_CODE (offset) != INTEGER_CST)
9782 /* Adjust offset by the lower bound. */
9783 offset = size_diffop (fold_convert (sizetype, offset),
9784 fold_convert (sizetype, lower_bound));
9790 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9792 tree arg0 = TREE_OPERAND (arg, 0);
9793 tree arg1 = TREE_OPERAND (arg, 1);
9798 if (TREE_CODE (arg0) == ADDR_EXPR
9799 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9800 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9802 array = TREE_OPERAND (arg0, 0);
9805 else if (TREE_CODE (arg1) == ADDR_EXPR
9806 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9807 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9809 array = TREE_OPERAND (arg1, 0);
9818 if (TREE_CODE (array) == STRING_CST)
9820 *ptr_offset = fold_convert (sizetype, offset);
9823 else if (TREE_CODE (array) == VAR_DECL)
9827 /* Variables initialized to string literals can be handled too. */
9828 if (DECL_INITIAL (array) == NULL_TREE
9829 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9832 /* If they are read-only, non-volatile and bind locally. */
9833 if (! TREE_READONLY (array)
9834 || TREE_SIDE_EFFECTS (array)
9835 || ! targetm.binds_local_p (array))
9838 /* Avoid const char foo[4] = "abcde"; */
9839 if (DECL_SIZE_UNIT (array) == NULL_TREE
9840 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9841 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9842 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9845 /* If variable is bigger than the string literal, OFFSET must be constant
9846 and inside of the bounds of the string literal. */
9847 offset = fold_convert (sizetype, offset);
9848 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9849 && (! host_integerp (offset, 1)
9850 || compare_tree_int (offset, length) >= 0))
9853 *ptr_offset = offset;
9854 return DECL_INITIAL (array);
9860 /* Generate code to calculate OPS, and exploded expression
9861 using a store-flag instruction and return an rtx for the result.
9862 OPS reflects a comparison.
9864 If TARGET is nonzero, store the result there if convenient.
9866 Return zero if there is no suitable set-flag instruction
9867 available on this machine.
9869 Once expand_expr has been called on the arguments of the comparison,
9870 we are committed to doing the store flag, since it is not safe to
9871 re-evaluate the expression. We emit the store-flag insn by calling
9872 emit_store_flag, but only expand the arguments if we have a reason
9873 to believe that emit_store_flag will be successful. If we think that
9874 it will, but it isn't, we have to simulate the store-flag with a
9875 set/jump/set sequence. */
9878 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9881 tree arg0, arg1, type;
9883 enum machine_mode operand_mode;
9886 rtx subtarget = target;
9887 location_t loc = ops->location;
9892 /* Don't crash if the comparison was erroneous. */
9893 if (arg0 == error_mark_node || arg1 == error_mark_node)
9896 type = TREE_TYPE (arg0);
9897 operand_mode = TYPE_MODE (type);
9898 unsignedp = TYPE_UNSIGNED (type);
9900 /* We won't bother with BLKmode store-flag operations because it would mean
9901 passing a lot of information to emit_store_flag. */
9902 if (operand_mode == BLKmode)
9905 /* We won't bother with store-flag operations involving function pointers
9906 when function pointers must be canonicalized before comparisons. */
9907 #ifdef HAVE_canonicalize_funcptr_for_compare
9908 if (HAVE_canonicalize_funcptr_for_compare
9909 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9910 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9912 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9913 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9914 == FUNCTION_TYPE))))
9921 /* Get the rtx comparison code to use. We know that EXP is a comparison
9922 operation of some type. Some comparisons against 1 and -1 can be
9923 converted to comparisons with zero. Do so here so that the tests
9924 below will be aware that we have a comparison with zero. These
9925 tests will not catch constants in the first operand, but constants
9926 are rarely passed as the first operand. */
9937 if (integer_onep (arg1))
9938 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9940 code = unsignedp ? LTU : LT;
9943 if (! unsignedp && integer_all_onesp (arg1))
9944 arg1 = integer_zero_node, code = LT;
9946 code = unsignedp ? LEU : LE;
9949 if (! unsignedp && integer_all_onesp (arg1))
9950 arg1 = integer_zero_node, code = GE;
9952 code = unsignedp ? GTU : GT;
9955 if (integer_onep (arg1))
9956 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9958 code = unsignedp ? GEU : GE;
9961 case UNORDERED_EXPR:
9990 /* Put a constant second. */
9991 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9992 || TREE_CODE (arg0) == FIXED_CST)
9994 tem = arg0; arg0 = arg1; arg1 = tem;
9995 code = swap_condition (code);
9998 /* If this is an equality or inequality test of a single bit, we can
9999 do this by shifting the bit being tested to the low-order bit and
10000 masking the result with the constant 1. If the condition was EQ,
10001 we xor it with 1. This does not require an scc insn and is faster
10002 than an scc insn even if we have it.
10004 The code to make this transformation was moved into fold_single_bit_test,
10005 so we just call into the folder and expand its result. */
10007 if ((code == NE || code == EQ)
10008 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10009 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10011 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10012 return expand_expr (fold_single_bit_test (loc,
10013 code == NE ? NE_EXPR : EQ_EXPR,
10015 target, VOIDmode, EXPAND_NORMAL);
10018 if (! get_subtarget (target)
10019 || GET_MODE (subtarget) != operand_mode)
10022 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10025 target = gen_reg_rtx (mode);
10027 /* Try a cstore if possible. */
10028 return emit_store_flag_force (target, code, op0, op1,
10029 operand_mode, unsignedp, 1);
10033 /* Stubs in case we haven't got a casesi insn. */
10034 #ifndef HAVE_casesi
10035 # define HAVE_casesi 0
10036 # define gen_casesi(a, b, c, d, e) (0)
10037 # define CODE_FOR_casesi CODE_FOR_nothing
10040 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10041 0 otherwise (i.e. if there is no casesi instruction). */
10043 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10044 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10045 rtx fallback_label ATTRIBUTE_UNUSED)
10047 enum machine_mode index_mode = SImode;
10048 int index_bits = GET_MODE_BITSIZE (index_mode);
10049 rtx op1, op2, index;
10050 enum machine_mode op_mode;
10055 /* Convert the index to SImode. */
10056 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10058 enum machine_mode omode = TYPE_MODE (index_type);
10059 rtx rangertx = expand_normal (range);
10061 /* We must handle the endpoints in the original mode. */
10062 index_expr = build2 (MINUS_EXPR, index_type,
10063 index_expr, minval);
10064 minval = integer_zero_node;
10065 index = expand_normal (index_expr);
10067 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10068 omode, 1, default_label);
10069 /* Now we can safely truncate. */
10070 index = convert_to_mode (index_mode, index, 0);
10074 if (TYPE_MODE (index_type) != index_mode)
10076 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10077 index_expr = fold_convert (index_type, index_expr);
10080 index = expand_normal (index_expr);
10083 do_pending_stack_adjust ();
10085 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10086 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10088 index = copy_to_mode_reg (op_mode, index);
10090 op1 = expand_normal (minval);
10092 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10093 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10094 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10095 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10097 op1 = copy_to_mode_reg (op_mode, op1);
10099 op2 = expand_normal (range);
10101 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10102 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10103 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10104 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10106 op2 = copy_to_mode_reg (op_mode, op2);
10108 emit_jump_insn (gen_casesi (index, op1, op2,
10109 table_label, !default_label
10110 ? fallback_label : default_label));
10114 /* Attempt to generate a tablejump instruction; same concept. */
10115 #ifndef HAVE_tablejump
10116 #define HAVE_tablejump 0
10117 #define gen_tablejump(x, y) (0)
10120 /* Subroutine of the next function.
10122 INDEX is the value being switched on, with the lowest value
10123 in the table already subtracted.
10124 MODE is its expected mode (needed if INDEX is constant).
10125 RANGE is the length of the jump table.
10126 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10128 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10129 index value is out of range. */
10132 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10137 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10138 cfun->cfg->max_jumptable_ents = INTVAL (range);
10140 /* Do an unsigned comparison (in the proper mode) between the index
10141 expression and the value which represents the length of the range.
10142 Since we just finished subtracting the lower bound of the range
10143 from the index expression, this comparison allows us to simultaneously
10144 check that the original index expression value is both greater than
10145 or equal to the minimum value of the range and less than or equal to
10146 the maximum value of the range. */
10149 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10152 /* If index is in range, it must fit in Pmode.
10153 Convert to Pmode so we can index with it. */
10155 index = convert_to_mode (Pmode, index, 1);
10157 /* Don't let a MEM slip through, because then INDEX that comes
10158 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10159 and break_out_memory_refs will go to work on it and mess it up. */
10160 #ifdef PIC_CASE_VECTOR_ADDRESS
10161 if (flag_pic && !REG_P (index))
10162 index = copy_to_mode_reg (Pmode, index);
10165 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10166 GET_MODE_SIZE, because this indicates how large insns are. The other
10167 uses should all be Pmode, because they are addresses. This code
10168 could fail if addresses and insns are not the same size. */
10169 index = gen_rtx_PLUS (Pmode,
10170 gen_rtx_MULT (Pmode, index,
10171 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10172 gen_rtx_LABEL_REF (Pmode, table_label));
10173 #ifdef PIC_CASE_VECTOR_ADDRESS
10175 index = PIC_CASE_VECTOR_ADDRESS (index);
10178 index = memory_address (CASE_VECTOR_MODE, index);
10179 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10180 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10181 convert_move (temp, vector, 0);
10183 emit_jump_insn (gen_tablejump (temp, table_label));
10185 /* If we are generating PIC code or if the table is PC-relative, the
10186 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10187 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10192 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10193 rtx table_label, rtx default_label)
10197 if (! HAVE_tablejump)
10200 index_expr = fold_build2 (MINUS_EXPR, index_type,
10201 fold_convert (index_type, index_expr),
10202 fold_convert (index_type, minval));
10203 index = expand_normal (index_expr);
10204 do_pending_stack_adjust ();
10206 do_tablejump (index, TYPE_MODE (index_type),
10207 convert_modes (TYPE_MODE (index_type),
10208 TYPE_MODE (TREE_TYPE (range)),
10209 expand_normal (range),
10210 TYPE_UNSIGNED (TREE_TYPE (range))),
10211 table_label, default_label);
10215 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10217 const_vector_from_tree (tree exp)
10222 enum machine_mode inner, mode;
10224 mode = TYPE_MODE (TREE_TYPE (exp));
10226 if (initializer_zerop (exp))
10227 return CONST0_RTX (mode);
10229 units = GET_MODE_NUNITS (mode);
10230 inner = GET_MODE_INNER (mode);
10232 v = rtvec_alloc (units);
10234 link = TREE_VECTOR_CST_ELTS (exp);
10235 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10237 elt = TREE_VALUE (link);
10239 if (TREE_CODE (elt) == REAL_CST)
10240 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10242 else if (TREE_CODE (elt) == FIXED_CST)
10243 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10246 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10250 /* Initialize remaining elements to 0. */
10251 for (; i < units; ++i)
10252 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10254 return gen_rtx_CONST_VECTOR (mode, v);
10258 /* Build a decl for a EH personality function named NAME. */
10261 build_personality_function (const char *name)
10265 type = build_function_type_list (integer_type_node, integer_type_node,
10266 long_long_unsigned_type_node,
10267 ptr_type_node, ptr_type_node, NULL_TREE);
10268 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10269 get_identifier (name), type);
10270 DECL_ARTIFICIAL (decl) = 1;
10271 DECL_EXTERNAL (decl) = 1;
10272 TREE_PUBLIC (decl) = 1;
10274 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10275 are the flags assigned by targetm.encode_section_info. */
10276 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10281 /* Extracts the personality function of DECL and returns the corresponding
10285 get_personality_function (tree decl)
10287 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10288 enum eh_personality_kind pk;
10290 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10291 if (pk == eh_personality_none)
10295 && pk == eh_personality_any)
10296 personality = lang_hooks.eh_personality ();
10298 if (pk == eh_personality_lang)
10299 gcc_assert (personality != NULL_TREE);
10301 return XEXP (DECL_RTL (personality), 0);
10304 #include "gt-expr.h"