1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "hard-reg-set.h"
34 #include "insn-config.h"
35 #include "insn-attr.h"
36 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
43 #include "typeclass.h"
45 #include "langhooks.h"
48 #include "tree-iterator.h"
49 #include "tree-pass.h"
50 #include "tree-flow.h"
54 #include "diagnostic.h"
55 #include "ssaexpand.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
92 struct move_by_pieces_d
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces_d
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces_d *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces_d *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (sepops, rtx, enum machine_mode);
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
239 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
241 #ifndef SLOW_UNALIGNED_ACCESS
242 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 /* This is run to set up which modes can be used
246 directly in memory and to initialize the block move optab. It is run
247 at the beginning of compilation and when the target is reinitialized. */
250 init_expr_target (void)
253 enum machine_mode mode;
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 /* A scratch register we can modify in-place below to avoid
265 useless RTL allocations. */
266 reg = gen_rtx_REG (VOIDmode, -1);
268 insn = rtx_alloc (INSN);
269 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
270 PATTERN (insn) = pat;
272 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
273 mode = (enum machine_mode) ((int) mode + 1))
277 direct_load[(int) mode] = direct_store[(int) mode] = 0;
278 PUT_MODE (mem, mode);
279 PUT_MODE (mem1, mode);
280 PUT_MODE (reg, mode);
282 /* See if there is some register that can be used in this mode and
283 directly loaded or stored from memory. */
285 if (mode != VOIDmode && mode != BLKmode)
286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
287 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 if (! HARD_REGNO_MODE_OK (regno, mode))
293 SET_REGNO (reg, regno);
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = mem1;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
306 SET_DEST (pat) = mem;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
311 SET_DEST (pat) = mem1;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
317 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
320 mode = GET_MODE_WIDER_MODE (mode))
322 enum machine_mode srcmode;
323 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
324 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 ic = can_extend_p (mode, srcmode, 0);
329 if (ic == CODE_FOR_nothing)
332 PUT_MODE (mem, srcmode);
334 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
335 float_extend_from_mem[mode][srcmode] = true;
340 /* This is run at the start of compiling a function. */
345 memset (&crtl->expr, 0, sizeof (crtl->expr));
348 /* Copy data from FROM to TO, where the machine modes are not the same.
349 Both modes may be integer, or both may be floating, or both may be
351 UNSIGNEDP should be nonzero if FROM is an unsigned type.
352 This causes zero-extension instead of sign-extension. */
355 convert_move (rtx to, rtx from, int unsignedp)
357 enum machine_mode to_mode = GET_MODE (to);
358 enum machine_mode from_mode = GET_MODE (from);
359 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
360 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
364 /* rtx code for making an equivalent value. */
365 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
366 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369 gcc_assert (to_real == from_real);
370 gcc_assert (to_mode != BLKmode);
371 gcc_assert (from_mode != BLKmode);
373 /* If the source and destination are already the same, then there's
378 /* If FROM is a SUBREG that indicates that we have already done at least
379 the required extension, strip it. We don't handle such SUBREGs as
382 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
383 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
384 >= GET_MODE_SIZE (to_mode))
385 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
386 from = gen_lowpart (to_mode, from), from_mode = to_mode;
388 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
390 if (to_mode == from_mode
391 || (from_mode == VOIDmode && CONSTANT_P (from)))
393 emit_move_insn (to, from);
397 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
399 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
401 if (VECTOR_MODE_P (to_mode))
402 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
404 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
406 emit_move_insn (to, from);
410 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
412 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
413 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
422 gcc_assert ((GET_MODE_PRECISION (from_mode)
423 != GET_MODE_PRECISION (to_mode))
424 || (DECIMAL_FLOAT_MODE_P (from_mode)
425 != DECIMAL_FLOAT_MODE_P (to_mode)));
427 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
428 /* Conversion between decimal float and binary float, same size. */
429 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
430 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
435 /* Try converting directly if the insn is supported. */
437 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
438 if (code != CODE_FOR_nothing)
440 emit_unop_insn (code, to, from,
441 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
445 /* Otherwise use a libcall. */
446 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
448 /* Is this conversion implemented yet? */
449 gcc_assert (libcall);
452 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
454 insns = get_insns ();
456 emit_libcall_block (insns, to, value,
457 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
459 : gen_rtx_FLOAT_EXTEND (to_mode, from));
463 /* Handle pointer conversion. */ /* SPEE 900220. */
464 /* Targets are expected to provide conversion insns between PxImode and
465 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
466 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
468 enum machine_mode full_mode
469 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
471 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
472 != CODE_FOR_nothing);
474 if (full_mode != from_mode)
475 from = convert_to_mode (full_mode, from, unsignedp);
476 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
480 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483 enum machine_mode full_mode
484 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
486 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
487 != CODE_FOR_nothing);
489 if (to_mode == full_mode)
491 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
496 new_from = gen_reg_rtx (full_mode);
497 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from, from, UNKNOWN);
500 /* else proceed to integer conversions below. */
501 from_mode = full_mode;
505 /* Make sure both are fixed-point modes or both are not. */
506 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
507 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
508 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
510 /* If we widen from_mode to to_mode and they are in the same class,
511 we won't saturate the result.
512 Otherwise, always saturate the result to play safe. */
513 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
514 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
515 expand_fixed_convert (to, from, 0, 0);
517 expand_fixed_convert (to, from, 0, 1);
521 /* Now both modes are integers. */
523 /* Handle expanding beyond a word. */
524 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
525 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
532 enum machine_mode lowpart_mode;
533 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 /* If FROM is a SUBREG, put it into a register. Do this
540 so that we always generate the same set of insns for
541 better cse'ing; if an intermediate assignment occurred,
542 we won't be doing the operation directly on the SUBREG. */
543 if (optimize > 0 && GET_CODE (from) == SUBREG)
544 from = force_reg (from_mode, from);
545 emit_unop_insn (code, to, from, equiv_code);
548 /* Next, try converting via full word. */
549 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
550 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
551 != CODE_FOR_nothing))
553 rtx word_to = gen_reg_rtx (word_mode);
556 if (reg_overlap_mentioned_p (to, from))
557 from = force_reg (from_mode, from);
560 convert_move (word_to, from, unsignedp);
561 emit_unop_insn (code, to, word_to, equiv_code);
565 /* No special multiword conversion insn; do it by hand. */
568 /* Since we will turn this into a no conflict block, we must ensure
569 that the source does not overlap the target. */
571 if (reg_overlap_mentioned_p (to, from))
572 from = force_reg (from_mode, from);
574 /* Get a copy of FROM widened to a word, if necessary. */
575 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
576 lowpart_mode = word_mode;
578 lowpart_mode = from_mode;
580 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
582 lowpart = gen_lowpart (lowpart_mode, to);
583 emit_move_insn (lowpart, lowfrom);
585 /* Compute the value to put in each remaining word. */
587 fill_value = const0_rtx;
589 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
590 LT, lowfrom, const0_rtx,
593 /* Fill the remaining words. */
594 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
596 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
597 rtx subword = operand_subword (to, index, 1, to_mode);
599 gcc_assert (subword);
601 if (fill_value != subword)
602 emit_move_insn (subword, fill_value);
605 insns = get_insns ();
612 /* Truncating multi-word to a word or less. */
613 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
614 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617 && ! MEM_VOLATILE_P (from)
618 && direct_load[(int) to_mode]
619 && ! mode_dependent_address_p (XEXP (from, 0)))
621 || GET_CODE (from) == SUBREG))
622 from = force_reg (from_mode, from);
623 convert_move (to, gen_lowpart (word_mode, from), 0);
627 /* Now follow all the conversions between integers
628 no more than a word long. */
630 /* For truncation, usually we can just refer to FROM in a narrower mode. */
631 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
632 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
633 GET_MODE_BITSIZE (from_mode)))
636 && ! MEM_VOLATILE_P (from)
637 && direct_load[(int) to_mode]
638 && ! mode_dependent_address_p (XEXP (from, 0)))
640 || GET_CODE (from) == SUBREG))
641 from = force_reg (from_mode, from);
642 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
643 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
644 from = copy_to_reg (from);
645 emit_move_insn (to, gen_lowpart (to_mode, from));
649 /* Handle extension. */
650 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
652 /* Convert directly if that works. */
653 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656 emit_unop_insn (code, to, from, equiv_code);
661 enum machine_mode intermediate;
665 /* Search for a mode to convert via. */
666 for (intermediate = from_mode; intermediate != VOIDmode;
667 intermediate = GET_MODE_WIDER_MODE (intermediate))
668 if (((can_extend_p (to_mode, intermediate, unsignedp)
670 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
671 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
672 GET_MODE_BITSIZE (intermediate))))
673 && (can_extend_p (intermediate, from_mode, unsignedp)
674 != CODE_FOR_nothing))
676 convert_move (to, convert_to_mode (intermediate, from,
677 unsignedp), unsignedp);
681 /* No suitable intermediate mode.
682 Generate what we need with shifts. */
683 shift_amount = build_int_cst (NULL_TREE,
684 GET_MODE_BITSIZE (to_mode)
685 - GET_MODE_BITSIZE (from_mode));
686 from = gen_lowpart (to_mode, force_reg (from_mode, from));
687 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
689 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692 emit_move_insn (to, tmp);
697 /* Support special truncate insns for certain modes. */
698 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
700 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
705 /* Handle truncation of volatile memrefs, and so on;
706 the things that couldn't be truncated directly,
707 and for which there was no special instruction.
709 ??? Code above formerly short-circuited this, for most integer
710 mode pairs, with a force_reg in from_mode followed by a recursive
711 call to this routine. Appears always to have been wrong. */
712 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
714 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
715 emit_move_insn (to, temp);
719 /* Mode combination is not recognized. */
723 /* Return an rtx for a value that would result
724 from converting X to mode MODE.
725 Both X and MODE may be floating, or both integer.
726 UNSIGNEDP is nonzero if X is an unsigned value.
727 This can be done by referring to a part of X in place
728 or by copying to a new temporary with conversion. */
731 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
733 return convert_modes (mode, VOIDmode, x, unsignedp);
736 /* Return an rtx for a value that would result
737 from converting X from mode OLDMODE to mode MODE.
738 Both modes may be floating, or both integer.
739 UNSIGNEDP is nonzero if X is an unsigned value.
741 This can be done by referring to a part of X in place
742 or by copying to a new temporary with conversion.
744 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
747 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
751 /* If FROM is a SUBREG that indicates that we have already done at least
752 the required extension, strip it. */
754 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
755 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
756 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
757 x = gen_lowpart (mode, x);
759 if (GET_MODE (x) != VOIDmode)
760 oldmode = GET_MODE (x);
765 /* There is one case that we must handle specially: If we are converting
766 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
767 we are to interpret the constant as unsigned, gen_lowpart will do
768 the wrong if the constant appears negative. What we want to do is
769 make the high-order word of the constant zero, not all ones. */
771 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
772 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
773 && CONST_INT_P (x) && INTVAL (x) < 0)
775 double_int val = uhwi_to_double_int (INTVAL (x));
777 /* We need to zero extend VAL. */
778 if (oldmode != VOIDmode)
779 val = double_int_zext (val, GET_MODE_BITSIZE (oldmode));
781 return immed_double_int_const (val, mode);
784 /* We can do this with a gen_lowpart if both desired and current modes
785 are integer, and this is either a constant integer, a register, or a
786 non-volatile MEM. Except for the constant case where MODE is no
787 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
790 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
791 || (GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_CLASS (oldmode) == MODE_INT
793 && (GET_CODE (x) == CONST_DOUBLE
794 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
795 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
796 && direct_load[(int) mode])
798 && (! HARD_REGISTER_P (x)
799 || HARD_REGNO_MODE_OK (REGNO (x), mode))
800 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
801 GET_MODE_BITSIZE (GET_MODE (x)))))))))
803 /* ?? If we don't know OLDMODE, we have to assume here that
804 X does not need sign- or zero-extension. This may not be
805 the case, but it's the best we can do. */
806 if (CONST_INT_P (x) && oldmode != VOIDmode
807 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
809 HOST_WIDE_INT val = INTVAL (x);
810 int width = GET_MODE_BITSIZE (oldmode);
812 /* We must sign or zero-extend in this case. Start by
813 zero-extending, then sign extend if we need to. */
814 val &= ((HOST_WIDE_INT) 1 << width) - 1;
816 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
817 val |= (HOST_WIDE_INT) (-1) << width;
819 return gen_int_mode (val, mode);
822 return gen_lowpart (mode, x);
825 /* Converting from integer constant into mode is always equivalent to an
827 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
829 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
830 return simplify_gen_subreg (mode, x, oldmode, 0);
833 temp = gen_reg_rtx (mode);
834 convert_move (temp, x, unsignedp);
838 /* STORE_MAX_PIECES is the number of bytes at a time that we can
839 store efficiently. Due to internal GCC limitations, this is
840 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
841 for an immediate constant. */
843 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
845 /* Determine whether the LEN bytes can be moved by using several move
846 instructions. Return nonzero if a call to move_by_pieces should
850 can_move_by_pieces (unsigned HOST_WIDE_INT len,
851 unsigned int align ATTRIBUTE_UNUSED)
853 return MOVE_BY_PIECES_P (len, align);
856 /* Generate several move instructions to copy LEN bytes from block FROM to
857 block TO. (These are MEM rtx's with BLKmode).
859 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
860 used to push FROM to the stack.
862 ALIGN is maximum stack alignment we can assume.
864 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
865 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
869 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
870 unsigned int align, int endp)
872 struct move_by_pieces_d data;
873 enum machine_mode to_addr_mode, from_addr_mode
874 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
875 rtx to_addr, from_addr = XEXP (from, 0);
876 unsigned int max_size = MOVE_MAX_PIECES + 1;
877 enum machine_mode mode = VOIDmode, tmode;
878 enum insn_code icode;
880 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
883 data.from_addr = from_addr;
886 to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
887 to_addr = XEXP (to, 0);
890 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
891 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
893 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
897 to_addr_mode = VOIDmode;
901 #ifdef STACK_GROWS_DOWNWARD
907 data.to_addr = to_addr;
910 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
911 || GET_CODE (from_addr) == POST_INC
912 || GET_CODE (from_addr) == POST_DEC);
914 data.explicit_inc_from = 0;
915 data.explicit_inc_to = 0;
916 if (data.reverse) data.offset = len;
919 /* If copying requires more than two move insns,
920 copy addresses to registers (to make displacements shorter)
921 and use post-increment if available. */
922 if (!(data.autinc_from && data.autinc_to)
923 && move_by_pieces_ninsns (len, align, max_size) > 2)
925 /* Find the mode of the largest move... */
926 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
927 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
928 if (GET_MODE_SIZE (tmode) < max_size)
931 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
933 data.from_addr = copy_to_mode_reg (from_addr_mode,
934 plus_constant (from_addr, len));
935 data.autinc_from = 1;
936 data.explicit_inc_from = -1;
938 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
940 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
941 data.autinc_from = 1;
942 data.explicit_inc_from = 1;
944 if (!data.autinc_from && CONSTANT_P (from_addr))
945 data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
946 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
948 data.to_addr = copy_to_mode_reg (to_addr_mode,
949 plus_constant (to_addr, len));
951 data.explicit_inc_to = -1;
953 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
955 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
957 data.explicit_inc_to = 1;
959 if (!data.autinc_to && CONSTANT_P (to_addr))
960 data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
964 if (align >= GET_MODE_ALIGNMENT (tmode))
965 align = GET_MODE_ALIGNMENT (tmode);
968 enum machine_mode xmode;
970 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
972 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
973 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
974 || SLOW_UNALIGNED_ACCESS (tmode, align))
977 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
980 /* First move what we can in the largest integer mode, then go to
981 successively smaller modes. */
985 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
986 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
987 if (GET_MODE_SIZE (tmode) < max_size)
990 if (mode == VOIDmode)
993 icode = optab_handler (mov_optab, mode)->insn_code;
994 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
995 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
997 max_size = GET_MODE_SIZE (mode);
1000 /* The code above should have handled everything. */
1001 gcc_assert (!data.len);
1007 gcc_assert (!data.reverse);
1012 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1013 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1015 data.to_addr = copy_to_mode_reg (to_addr_mode,
1016 plus_constant (data.to_addr,
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1026 to1 = adjust_address (data.to, QImode, data.offset);
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1049 enum machine_mode tmode, xmode;
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1061 while (max_size > 1)
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1071 if (mode == VOIDmode)
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1078 max_size = GET_MODE_SIZE (mode);
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces_d *data)
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1096 while (data->len >= size)
1099 data->offset -= size;
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1107 to1 = adjust_address (data->to, mode, data->offset);
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1114 from1 = adjust_address (data->from, mode, data->offset);
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1124 emit_insn ((*genfun) (to1, from1));
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1139 if (! data->reverse)
1140 data->offset += size;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1190 gcc_assert (align >= BITS_PER_UNIT);
1192 gcc_assert (MEM_P (x));
1193 gcc_assert (MEM_P (y));
1196 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1197 block copy is more efficient for other large modes, e.g. DCmode. */
1198 x = adjust_address (x, BLKmode, 0);
1199 y = adjust_address (y, BLKmode, 0);
1201 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1202 can be incorrect is coming from __builtin_memcpy. */
1203 if (CONST_INT_P (size))
1205 if (INTVAL (size) == 0)
1208 x = shallow_copy_rtx (x);
1209 y = shallow_copy_rtx (y);
1210 set_mem_size (x, size);
1211 set_mem_size (y, size);
1214 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1215 move_by_pieces (x, y, INTVAL (size), align, 0);
1216 else if (emit_block_move_via_movmem (x, y, size, align,
1217 expected_align, expected_size))
1219 else if (may_use_call
1220 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1221 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1222 retval = emit_block_move_via_libcall (x, y, size,
1223 method == BLOCK_OP_TAILCALL);
1225 emit_block_move_via_loop (x, y, size, align);
1227 if (method == BLOCK_OP_CALL_PARM)
1234 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1236 return emit_block_move_hints (x, y, size, method, 0, -1);
1239 /* A subroutine of emit_block_move. Returns true if calling the
1240 block move libcall will not clobber any parameters which may have
1241 already been placed on the stack. */
1244 block_move_libcall_safe_for_call_parm (void)
1246 #if defined (REG_PARM_STACK_SPACE)
1250 /* If arguments are pushed on the stack, then they're safe. */
1254 /* If registers go on the stack anyway, any argument is sure to clobber
1255 an outgoing argument. */
1256 #if defined (REG_PARM_STACK_SPACE)
1257 fn = emit_block_move_libcall_fn (false);
1258 /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1259 depend on its argument. */
1261 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1262 && REG_PARM_STACK_SPACE (fn) != 0)
1266 /* If any argument goes in memory, then it might clobber an outgoing
1269 CUMULATIVE_ARGS args_so_far;
1272 fn = emit_block_move_libcall_fn (false);
1273 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1275 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1276 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1278 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1279 rtx tmp = targetm.calls.function_arg (&args_so_far, mode,
1281 if (!tmp || !REG_P (tmp))
1283 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1285 targetm.calls.function_arg_advance (&args_so_far, mode,
1292 /* A subroutine of emit_block_move. Expand a movmem pattern;
1293 return true if successful. */
1296 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1297 unsigned int expected_align, HOST_WIDE_INT expected_size)
1299 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1300 int save_volatile_ok = volatile_ok;
1301 enum machine_mode mode;
1303 if (expected_align < align)
1304 expected_align = align;
1306 /* Since this is a move insn, we don't care about volatility. */
1309 /* Try the most limited insn first, because there's no point
1310 including more than one in the machine description unless
1311 the more limited one has some advantage. */
1313 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1314 mode = GET_MODE_WIDER_MODE (mode))
1316 enum insn_code code = movmem_optab[(int) mode];
1317 insn_operand_predicate_fn pred;
1319 if (code != CODE_FOR_nothing
1320 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1321 here because if SIZE is less than the mode mask, as it is
1322 returned by the macro, it will definitely be less than the
1323 actual mode mask. */
1324 && ((CONST_INT_P (size)
1325 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1326 <= (GET_MODE_MASK (mode) >> 1)))
1327 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1328 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1329 || (*pred) (x, BLKmode))
1330 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1331 || (*pred) (y, BLKmode))
1332 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1333 || (*pred) (opalign, VOIDmode)))
1336 rtx last = get_last_insn ();
1339 op2 = convert_to_mode (mode, size, 1);
1340 pred = insn_data[(int) code].operand[2].predicate;
1341 if (pred != 0 && ! (*pred) (op2, mode))
1342 op2 = copy_to_mode_reg (mode, op2);
1344 /* ??? When called via emit_block_move_for_call, it'd be
1345 nice if there were some way to inform the backend, so
1346 that it doesn't fail the expansion because it thinks
1347 emitting the libcall would be more efficient. */
1349 if (insn_data[(int) code].n_operands == 4)
1350 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1352 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1353 GEN_INT (expected_align
1355 GEN_INT (expected_size));
1359 volatile_ok = save_volatile_ok;
1363 delete_insns_since (last);
1367 volatile_ok = save_volatile_ok;
1371 /* A subroutine of emit_block_move. Expand a call to memcpy.
1372 Return the return value from memcpy, 0 otherwise. */
1375 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1377 rtx dst_addr, src_addr;
1378 tree call_expr, fn, src_tree, dst_tree, size_tree;
1379 enum machine_mode size_mode;
1382 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1383 pseudos. We can then place those new pseudos into a VAR_DECL and
1386 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1387 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1389 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1390 src_addr = convert_memory_address (ptr_mode, src_addr);
1392 dst_tree = make_tree (ptr_type_node, dst_addr);
1393 src_tree = make_tree (ptr_type_node, src_addr);
1395 size_mode = TYPE_MODE (sizetype);
1397 size = convert_to_mode (size_mode, size, 1);
1398 size = copy_to_mode_reg (size_mode, size);
1400 /* It is incorrect to use the libcall calling conventions to call
1401 memcpy in this context. This could be a user call to memcpy and
1402 the user may wish to examine the return value from memcpy. For
1403 targets where libcalls and normal calls have different conventions
1404 for returning pointers, we could end up generating incorrect code. */
1406 size_tree = make_tree (sizetype, size);
1408 fn = emit_block_move_libcall_fn (true);
1409 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1410 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1412 retval = expand_normal (call_expr);
1417 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1418 for the function we use for block copies. The first time FOR_CALL
1419 is true, we call assemble_external. */
1421 static GTY(()) tree block_move_fn;
1424 init_block_move_fn (const char *asmspec)
1430 fn = get_identifier ("memcpy");
1431 args = build_function_type_list (ptr_type_node, ptr_type_node,
1432 const_ptr_type_node, sizetype,
1435 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1436 DECL_EXTERNAL (fn) = 1;
1437 TREE_PUBLIC (fn) = 1;
1438 DECL_ARTIFICIAL (fn) = 1;
1439 TREE_NOTHROW (fn) = 1;
1440 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1441 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1447 set_user_assembler_name (block_move_fn, asmspec);
1451 emit_block_move_libcall_fn (int for_call)
1453 static bool emitted_extern;
1456 init_block_move_fn (NULL);
1458 if (for_call && !emitted_extern)
1460 emitted_extern = true;
1461 make_decl_rtl (block_move_fn);
1462 assemble_external (block_move_fn);
1465 return block_move_fn;
1468 /* A subroutine of emit_block_move. Copy the data via an explicit
1469 loop. This is used only when libcalls are forbidden. */
1470 /* ??? It'd be nice to copy in hunks larger than QImode. */
1473 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1474 unsigned int align ATTRIBUTE_UNUSED)
1476 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1477 enum machine_mode x_addr_mode
1478 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1479 enum machine_mode y_addr_mode
1480 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1481 enum machine_mode iter_mode;
1483 iter_mode = GET_MODE (size);
1484 if (iter_mode == VOIDmode)
1485 iter_mode = word_mode;
1487 top_label = gen_label_rtx ();
1488 cmp_label = gen_label_rtx ();
1489 iter = gen_reg_rtx (iter_mode);
1491 emit_move_insn (iter, const0_rtx);
1493 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1494 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1495 do_pending_stack_adjust ();
1497 emit_jump (cmp_label);
1498 emit_label (top_label);
1500 tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1501 x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1503 if (x_addr_mode != y_addr_mode)
1504 tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1505 y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1507 x = change_address (x, QImode, x_addr);
1508 y = change_address (y, QImode, y_addr);
1510 emit_move_insn (x, y);
1512 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1513 true, OPTAB_LIB_WIDEN);
1515 emit_move_insn (iter, tmp);
1517 emit_label (cmp_label);
1519 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1523 /* Copy all or part of a value X into registers starting at REGNO.
1524 The number of registers to be filled is NREGS. */
1527 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 #ifdef HAVE_load_multiple
1538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1539 x = validize_mem (force_const_mem (mode, x));
1541 /* See if the machine can do this with a load multiple insn. */
1542 #ifdef HAVE_load_multiple
1543 if (HAVE_load_multiple)
1545 last = get_last_insn ();
1546 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1554 delete_insns_since (last);
1558 for (i = 0; i < nregs; i++)
1559 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1560 operand_subword_force (x, i, mode));
1563 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1564 The number of registers to be filled is NREGS. */
1567 move_block_from_reg (int regno, rtx x, int nregs)
1574 /* See if the machine can do this with a store multiple insn. */
1575 #ifdef HAVE_store_multiple
1576 if (HAVE_store_multiple)
1578 rtx last = get_last_insn ();
1579 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1587 delete_insns_since (last);
1591 for (i = 0; i < nregs; i++)
1593 rtx tem = operand_subword (x, i, 1, BLKmode);
1597 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1601 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1602 ORIG, where ORIG is a non-consecutive group of registers represented by
1603 a PARALLEL. The clone is identical to the original except in that the
1604 original set of registers is replaced by a new set of pseudo registers.
1605 The new set has the same modes as the original set. */
1608 gen_group_rtx (rtx orig)
1613 gcc_assert (GET_CODE (orig) == PARALLEL);
1615 length = XVECLEN (orig, 0);
1616 tmps = XALLOCAVEC (rtx, length);
1618 /* Skip a NULL entry in first slot. */
1619 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1624 for (; i < length; i++)
1626 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1627 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1629 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1636 except that values are placed in TMPS[i], and must later be moved
1637 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1640 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1644 enum machine_mode m = GET_MODE (orig_src);
1646 gcc_assert (GET_CODE (dst) == PARALLEL);
1649 && !SCALAR_INT_MODE_P (m)
1650 && !MEM_P (orig_src)
1651 && GET_CODE (orig_src) != CONCAT)
1653 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1654 if (imode == BLKmode)
1655 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1657 src = gen_reg_rtx (imode);
1658 if (imode != BLKmode)
1659 src = gen_lowpart (GET_MODE (orig_src), src);
1660 emit_move_insn (src, orig_src);
1661 /* ...and back again. */
1662 if (imode != BLKmode)
1663 src = gen_lowpart (imode, src);
1664 emit_group_load_1 (tmps, dst, src, type, ssize);
1668 /* Check for a NULL entry, used to indicate that the parameter goes
1669 both on the stack and in registers. */
1670 if (XEXP (XVECEXP (dst, 0, 0), 0))
1675 /* Process the pieces. */
1676 for (i = start; i < XVECLEN (dst, 0); i++)
1678 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1679 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1680 unsigned int bytelen = GET_MODE_SIZE (mode);
1683 /* Handle trailing fragments that run over the size of the struct. */
1684 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1686 /* Arrange to shift the fragment to where it belongs.
1687 extract_bit_field loads to the lsb of the reg. */
1689 #ifdef BLOCK_REG_PADDING
1690 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1691 == (BYTES_BIG_ENDIAN ? upward : downward)
1696 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1697 bytelen = ssize - bytepos;
1698 gcc_assert (bytelen > 0);
1701 /* If we won't be loading directly from memory, protect the real source
1702 from strange tricks we might play; but make sure that the source can
1703 be loaded directly into the destination. */
1705 if (!MEM_P (orig_src)
1706 && (!CONSTANT_P (orig_src)
1707 || (GET_MODE (orig_src) != mode
1708 && GET_MODE (orig_src) != VOIDmode)))
1710 if (GET_MODE (orig_src) == VOIDmode)
1711 src = gen_reg_rtx (mode);
1713 src = gen_reg_rtx (GET_MODE (orig_src));
1715 emit_move_insn (src, orig_src);
1718 /* Optimize the access just a bit. */
1720 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1721 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1722 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1723 && bytelen == GET_MODE_SIZE (mode))
1725 tmps[i] = gen_reg_rtx (mode);
1726 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1728 else if (COMPLEX_MODE_P (mode)
1729 && GET_MODE (src) == mode
1730 && bytelen == GET_MODE_SIZE (mode))
1731 /* Let emit_move_complex do the bulk of the work. */
1733 else if (GET_CODE (src) == CONCAT)
1735 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1736 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1738 if ((bytepos == 0 && bytelen == slen0)
1739 || (bytepos != 0 && bytepos + bytelen <= slen))
1741 /* The following assumes that the concatenated objects all
1742 have the same size. In this case, a simple calculation
1743 can be used to determine the object and the bit field
1745 tmps[i] = XEXP (src, bytepos / slen0);
1746 if (! CONSTANT_P (tmps[i])
1747 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1748 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1749 (bytepos % slen0) * BITS_PER_UNIT,
1750 1, NULL_RTX, mode, mode);
1756 gcc_assert (!bytepos);
1757 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1758 emit_move_insn (mem, src);
1759 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1760 0, 1, NULL_RTX, mode, mode);
1763 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1764 SIMD register, which is currently broken. While we get GCC
1765 to emit proper RTL for these cases, let's dump to memory. */
1766 else if (VECTOR_MODE_P (GET_MODE (dst))
1769 int slen = GET_MODE_SIZE (GET_MODE (src));
1772 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1773 emit_move_insn (mem, src);
1774 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1776 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1777 && XVECLEN (dst, 0) > 1)
1778 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1779 else if (CONSTANT_P (src))
1781 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1789 gcc_assert (2 * len == ssize);
1790 split_double (src, &first, &second);
1797 else if (REG_P (src) && GET_MODE (src) == mode)
1800 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1801 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1805 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1806 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1810 /* Emit code to move a block SRC of type TYPE to a block DST,
1811 where DST is non-consecutive registers represented by a PARALLEL.
1812 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1816 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1821 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1822 emit_group_load_1 (tmps, dst, src, type, ssize);
1824 /* Copy the extracted pieces into the proper (probable) hard regs. */
1825 for (i = 0; i < XVECLEN (dst, 0); i++)
1827 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1830 emit_move_insn (d, tmps[i]);
1834 /* Similar, but load SRC into new pseudos in a format that looks like
1835 PARALLEL. This can later be fed to emit_group_move to get things
1836 in the right place. */
1839 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1844 vec = rtvec_alloc (XVECLEN (parallel, 0));
1845 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1847 /* Convert the vector to look just like the original PARALLEL, except
1848 with the computed values. */
1849 for (i = 0; i < XVECLEN (parallel, 0); i++)
1851 rtx e = XVECEXP (parallel, 0, i);
1852 rtx d = XEXP (e, 0);
1856 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1857 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1859 RTVEC_ELT (vec, i) = e;
1862 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865 /* Emit code to move a block SRC to block DST, where SRC and DST are
1866 non-consecutive groups of registers, each represented by a PARALLEL. */
1869 emit_group_move (rtx dst, rtx src)
1873 gcc_assert (GET_CODE (src) == PARALLEL
1874 && GET_CODE (dst) == PARALLEL
1875 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1877 /* Skip first entry if NULL. */
1878 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1879 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1880 XEXP (XVECEXP (src, 0, i), 0));
1883 /* Move a group of registers represented by a PARALLEL into pseudos. */
1886 emit_group_move_into_temps (rtx src)
1888 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1891 for (i = 0; i < XVECLEN (src, 0); i++)
1893 rtx e = XVECEXP (src, 0, i);
1894 rtx d = XEXP (e, 0);
1897 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1898 RTVEC_ELT (vec, i) = e;
1901 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1905 where SRC is non-consecutive registers represented by a PARALLEL.
1906 SSIZE represents the total size of block ORIG_DST, or -1 if not
1910 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 int start, finish, i;
1914 enum machine_mode m = GET_MODE (orig_dst);
1916 gcc_assert (GET_CODE (src) == PARALLEL);
1918 if (!SCALAR_INT_MODE_P (m)
1919 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1921 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1922 if (imode == BLKmode)
1923 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1925 dst = gen_reg_rtx (imode);
1926 emit_group_store (dst, src, type, ssize);
1927 if (imode != BLKmode)
1928 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1929 emit_move_insn (orig_dst, dst);
1933 /* Check for a NULL entry, used to indicate that the parameter goes
1934 both on the stack and in registers. */
1935 if (XEXP (XVECEXP (src, 0, 0), 0))
1939 finish = XVECLEN (src, 0);
1941 tmps = XALLOCAVEC (rtx, finish);
1943 /* Copy the (probable) hard regs into pseudos. */
1944 for (i = start; i < finish; i++)
1946 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1947 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1949 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1950 emit_move_insn (tmps[i], reg);
1956 /* If we won't be storing directly into memory, protect the real destination
1957 from strange tricks we might play. */
1959 if (GET_CODE (dst) == PARALLEL)
1963 /* We can get a PARALLEL dst if there is a conditional expression in
1964 a return statement. In that case, the dst and src are the same,
1965 so no action is necessary. */
1966 if (rtx_equal_p (dst, src))
1969 /* It is unclear if we can ever reach here, but we may as well handle
1970 it. Allocate a temporary, and split this into a store/load to/from
1973 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1974 emit_group_store (temp, src, type, ssize);
1975 emit_group_load (dst, temp, type, ssize);
1978 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1980 enum machine_mode outer = GET_MODE (dst);
1981 enum machine_mode inner;
1982 HOST_WIDE_INT bytepos;
1986 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1987 dst = gen_reg_rtx (outer);
1989 /* Make life a bit easier for combine. */
1990 /* If the first element of the vector is the low part
1991 of the destination mode, use a paradoxical subreg to
1992 initialize the destination. */
1995 inner = GET_MODE (tmps[start]);
1996 bytepos = subreg_lowpart_offset (inner, outer);
1997 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1999 temp = simplify_gen_subreg (outer, tmps[start],
2003 emit_move_insn (dst, temp);
2010 /* If the first element wasn't the low part, try the last. */
2012 && start < finish - 1)
2014 inner = GET_MODE (tmps[finish - 1]);
2015 bytepos = subreg_lowpart_offset (inner, outer);
2016 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2018 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2022 emit_move_insn (dst, temp);
2029 /* Otherwise, simply initialize the result to zero. */
2031 emit_move_insn (dst, CONST0_RTX (outer));
2034 /* Process the pieces. */
2035 for (i = start; i < finish; i++)
2037 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2038 enum machine_mode mode = GET_MODE (tmps[i]);
2039 unsigned int bytelen = GET_MODE_SIZE (mode);
2040 unsigned int adj_bytelen = bytelen;
2043 /* Handle trailing fragments that run over the size of the struct. */
2044 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2045 adj_bytelen = ssize - bytepos;
2047 if (GET_CODE (dst) == CONCAT)
2049 if (bytepos + adj_bytelen
2050 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 dest = XEXP (dst, 0);
2052 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2054 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2055 dest = XEXP (dst, 1);
2059 enum machine_mode dest_mode = GET_MODE (dest);
2060 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2062 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2064 if (GET_MODE_ALIGNMENT (dest_mode)
2065 >= GET_MODE_ALIGNMENT (tmp_mode))
2067 dest = assign_stack_temp (dest_mode,
2068 GET_MODE_SIZE (dest_mode),
2070 emit_move_insn (adjust_address (dest,
2078 dest = assign_stack_temp (tmp_mode,
2079 GET_MODE_SIZE (tmp_mode),
2081 emit_move_insn (dest, tmps[i]);
2082 dst = adjust_address (dest, dest_mode, bytepos);
2088 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2090 /* store_bit_field always takes its value from the lsb.
2091 Move the fragment to the lsb if it's not already there. */
2093 #ifdef BLOCK_REG_PADDING
2094 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2095 == (BYTES_BIG_ENDIAN ? upward : downward)
2101 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2102 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2103 build_int_cst (NULL_TREE, shift),
2106 bytelen = adj_bytelen;
2109 /* Optimize the access just a bit. */
2111 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2112 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2113 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2114 && bytelen == GET_MODE_SIZE (mode))
2115 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2117 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2121 /* Copy from the pseudo into the (probable) hard reg. */
2122 if (orig_dst != dst)
2123 emit_move_insn (orig_dst, dst);
2126 /* Generate code to copy a BLKmode object of TYPE out of a
2127 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2128 is null, a stack temporary is created. TGTBLK is returned.
2130 The purpose of this routine is to handle functions that return
2131 BLKmode structures in registers. Some machines (the PA for example)
2132 want to return all small structures in registers regardless of the
2133 structure's alignment. */
2136 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2138 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2139 rtx src = NULL, dst = NULL;
2140 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2141 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 enum machine_mode copy_mode;
2146 tgtblk = assign_temp (build_qualified_type (type,
2148 | TYPE_QUAL_CONST)),
2150 preserve_temp_slots (tgtblk);
2153 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2154 into a new pseudo which is a full word. */
2156 if (GET_MODE (srcreg) != BLKmode
2157 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2158 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2160 /* If the structure doesn't take up a whole number of words, see whether
2161 SRCREG is padded on the left or on the right. If it's on the left,
2162 set PADDING_CORRECTION to the number of bits to skip.
2164 In most ABIs, the structure will be returned at the least end of
2165 the register, which translates to right padding on little-endian
2166 targets and left padding on big-endian targets. The opposite
2167 holds if the structure is returned at the most significant
2168 end of the register. */
2169 if (bytes % UNITS_PER_WORD != 0
2170 && (targetm.calls.return_in_msb (type)
2172 : BYTES_BIG_ENDIAN))
2174 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2176 /* Copy the structure BITSIZE bits at a time. If the target lives in
2177 memory, take care of not reading/writing past its end by selecting
2178 a copy mode suited to BITSIZE. This should always be possible given
2181 We could probably emit more efficient code for machines which do not use
2182 strict alignment, but it doesn't seem worth the effort at the current
2185 copy_mode = word_mode;
2188 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2189 if (mem_mode != BLKmode)
2190 copy_mode = mem_mode;
2193 for (bitpos = 0, xbitpos = padding_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == padding_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == padding_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 /* We need a new destination operand each time bitpos is on
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 bitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, copy_mode, copy_mode));
2221 /* Add a USE expression for REG to the (possibly empty) list pointed
2222 to by CALL_FUSAGE. REG must denote a hard register. */
2225 use_reg (rtx *call_fusage, rtx reg)
2227 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2230 = gen_rtx_EXPR_LIST (VOIDmode,
2231 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2234 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2235 starting at REGNO. All of these registers must be hard registers. */
2238 use_regs (rtx *call_fusage, int regno, int nregs)
2242 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2244 for (i = 0; i < nregs; i++)
2245 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2248 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2249 PARALLEL REGS. This is for calls that pass values in multiple
2250 non-contiguous locations. The Irix 6 ABI has examples of this. */
2253 use_group_regs (rtx *call_fusage, rtx regs)
2257 for (i = 0; i < XVECLEN (regs, 0); i++)
2259 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2261 /* A NULL entry means the parameter goes both on the stack and in
2262 registers. This can also be a MEM for targets that pass values
2263 partially on the stack and partially in registers. */
2264 if (reg != 0 && REG_P (reg))
2265 use_reg (call_fusage, reg);
2269 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2270 assigment and the code of the expresion on the RHS is CODE. Return
2274 get_def_for_expr (tree name, enum tree_code code)
2278 if (TREE_CODE (name) != SSA_NAME)
2281 def_stmt = get_gimple_for_ssa_name (name);
2283 || gimple_assign_rhs_code (def_stmt) != code)
2290 /* Determine whether the LEN bytes generated by CONSTFUN can be
2291 stored to memory using several move instructions. CONSTFUNDATA is
2292 a pointer which will be passed as argument in every CONSTFUN call.
2293 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2294 a memset operation and false if it's a copy of a constant string.
2295 Return nonzero if a call to store_by_pieces should succeed. */
2298 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2299 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2300 void *constfundata, unsigned int align, bool memsetp)
2302 unsigned HOST_WIDE_INT l;
2303 unsigned int max_size;
2304 HOST_WIDE_INT offset = 0;
2305 enum machine_mode mode, tmode;
2306 enum insn_code icode;
2314 ? SET_BY_PIECES_P (len, align)
2315 : STORE_BY_PIECES_P (len, align)))
2318 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2319 if (align >= GET_MODE_ALIGNMENT (tmode))
2320 align = GET_MODE_ALIGNMENT (tmode);
2323 enum machine_mode xmode;
2325 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2327 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2328 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2329 || SLOW_UNALIGNED_ACCESS (tmode, align))
2332 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2335 /* We would first store what we can in the largest integer mode, then go to
2336 successively smaller modes. */
2339 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2344 max_size = STORE_MAX_PIECES + 1;
2345 while (max_size > 1)
2347 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2348 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2349 if (GET_MODE_SIZE (tmode) < max_size)
2352 if (mode == VOIDmode)
2355 icode = optab_handler (mov_optab, mode)->insn_code;
2356 if (icode != CODE_FOR_nothing
2357 && align >= GET_MODE_ALIGNMENT (mode))
2359 unsigned int size = GET_MODE_SIZE (mode);
2366 cst = (*constfun) (constfundata, offset, mode);
2367 if (!LEGITIMATE_CONSTANT_P (cst))
2377 max_size = GET_MODE_SIZE (mode);
2380 /* The code above should have handled everything. */
2387 /* Generate several move instructions to store LEN bytes generated by
2388 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2389 pointer which will be passed as argument in every CONSTFUN call.
2390 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2391 a memset operation and false if it's a copy of a constant string.
2392 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2393 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2397 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2398 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2399 void *constfundata, unsigned int align, bool memsetp, int endp)
2401 enum machine_mode to_addr_mode
2402 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2403 struct store_by_pieces_d data;
2407 gcc_assert (endp != 2);
2412 ? SET_BY_PIECES_P (len, align)
2413 : STORE_BY_PIECES_P (len, align));
2414 data.constfun = constfun;
2415 data.constfundata = constfundata;
2418 store_by_pieces_1 (&data, align);
2423 gcc_assert (!data.reverse);
2428 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2429 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2431 data.to_addr = copy_to_mode_reg (to_addr_mode,
2432 plus_constant (data.to_addr,
2435 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2442 to1 = adjust_address (data.to, QImode, data.offset);
2450 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2451 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2454 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2456 struct store_by_pieces_d data;
2461 data.constfun = clear_by_pieces_1;
2462 data.constfundata = NULL;
2465 store_by_pieces_1 (&data, align);
2468 /* Callback routine for clear_by_pieces.
2469 Return const0_rtx unconditionally. */
2472 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2473 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2474 enum machine_mode mode ATTRIBUTE_UNUSED)
2479 /* Subroutine of clear_by_pieces and store_by_pieces.
2480 Generate several move instructions to store LEN bytes of block TO. (A MEM
2481 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2484 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2485 unsigned int align ATTRIBUTE_UNUSED)
2487 enum machine_mode to_addr_mode
2488 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2489 rtx to_addr = XEXP (data->to, 0);
2490 unsigned int max_size = STORE_MAX_PIECES + 1;
2491 enum machine_mode mode = VOIDmode, tmode;
2492 enum insn_code icode;
2495 data->to_addr = to_addr;
2497 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2498 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2500 data->explicit_inc_to = 0;
2502 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2504 data->offset = data->len;
2506 /* If storing requires more than two move insns,
2507 copy addresses to registers (to make displacements shorter)
2508 and use post-increment if available. */
2509 if (!data->autinc_to
2510 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2512 /* Determine the main mode we'll be using. */
2513 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2514 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2515 if (GET_MODE_SIZE (tmode) < max_size)
2518 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2520 data->to_addr = copy_to_mode_reg (to_addr_mode,
2521 plus_constant (to_addr, data->len));
2522 data->autinc_to = 1;
2523 data->explicit_inc_to = -1;
2526 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2527 && ! data->autinc_to)
2529 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2530 data->autinc_to = 1;
2531 data->explicit_inc_to = 1;
2534 if ( !data->autinc_to && CONSTANT_P (to_addr))
2535 data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2538 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2539 if (align >= GET_MODE_ALIGNMENT (tmode))
2540 align = GET_MODE_ALIGNMENT (tmode);
2543 enum machine_mode xmode;
2545 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2547 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2548 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2549 || SLOW_UNALIGNED_ACCESS (tmode, align))
2552 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2555 /* First store what we can in the largest integer mode, then go to
2556 successively smaller modes. */
2558 while (max_size > 1)
2560 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2561 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2562 if (GET_MODE_SIZE (tmode) < max_size)
2565 if (mode == VOIDmode)
2568 icode = optab_handler (mov_optab, mode)->insn_code;
2569 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2570 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2572 max_size = GET_MODE_SIZE (mode);
2575 /* The code above should have handled everything. */
2576 gcc_assert (!data->len);
2579 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2580 with move instructions for mode MODE. GENFUN is the gen_... function
2581 to make a move insn for that mode. DATA has all the other info. */
2584 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2585 struct store_by_pieces_d *data)
2587 unsigned int size = GET_MODE_SIZE (mode);
2590 while (data->len >= size)
2593 data->offset -= size;
2595 if (data->autinc_to)
2596 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2599 to1 = adjust_address (data->to, mode, data->offset);
2601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2602 emit_insn (gen_add2_insn (data->to_addr,
2603 GEN_INT (-(HOST_WIDE_INT) size)));
2605 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2606 emit_insn ((*genfun) (to1, cst));
2608 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2609 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2611 if (! data->reverse)
2612 data->offset += size;
2618 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2619 its length in bytes. */
2622 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2623 unsigned int expected_align, HOST_WIDE_INT expected_size)
2625 enum machine_mode mode = GET_MODE (object);
2628 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2630 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2631 just move a zero. Otherwise, do this a piece at a time. */
2633 && CONST_INT_P (size)
2634 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2636 rtx zero = CONST0_RTX (mode);
2639 emit_move_insn (object, zero);
2643 if (COMPLEX_MODE_P (mode))
2645 zero = CONST0_RTX (GET_MODE_INNER (mode));
2648 write_complex_part (object, zero, 0);
2649 write_complex_part (object, zero, 1);
2655 if (size == const0_rtx)
2658 align = MEM_ALIGN (object);
2660 if (CONST_INT_P (size)
2661 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2662 clear_by_pieces (object, INTVAL (size), align);
2663 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2664 expected_align, expected_size))
2666 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2667 return set_storage_via_libcall (object, size, const0_rtx,
2668 method == BLOCK_OP_TAILCALL);
2676 clear_storage (rtx object, rtx size, enum block_op_methods method)
2678 return clear_storage_hints (object, size, method, 0, -1);
2682 /* A subroutine of clear_storage. Expand a call to memset.
2683 Return the return value of memset, 0 otherwise. */
2686 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2688 tree call_expr, fn, object_tree, size_tree, val_tree;
2689 enum machine_mode size_mode;
2692 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2693 place those into new pseudos into a VAR_DECL and use them later. */
2695 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2697 size_mode = TYPE_MODE (sizetype);
2698 size = convert_to_mode (size_mode, size, 1);
2699 size = copy_to_mode_reg (size_mode, size);
2701 /* It is incorrect to use the libcall calling conventions to call
2702 memset in this context. This could be a user call to memset and
2703 the user may wish to examine the return value from memset. For
2704 targets where libcalls and normal calls have different conventions
2705 for returning pointers, we could end up generating incorrect code. */
2707 object_tree = make_tree (ptr_type_node, object);
2708 if (!CONST_INT_P (val))
2709 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2710 size_tree = make_tree (sizetype, size);
2711 val_tree = make_tree (integer_type_node, val);
2713 fn = clear_storage_libcall_fn (true);
2714 call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2715 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2717 retval = expand_normal (call_expr);
2722 /* A subroutine of set_storage_via_libcall. Create the tree node
2723 for the function we use for block clears. The first time FOR_CALL
2724 is true, we call assemble_external. */
2726 tree block_clear_fn;
2729 init_block_clear_fn (const char *asmspec)
2731 if (!block_clear_fn)
2735 fn = get_identifier ("memset");
2736 args = build_function_type_list (ptr_type_node, ptr_type_node,
2737 integer_type_node, sizetype,
2740 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2741 DECL_EXTERNAL (fn) = 1;
2742 TREE_PUBLIC (fn) = 1;
2743 DECL_ARTIFICIAL (fn) = 1;
2744 TREE_NOTHROW (fn) = 1;
2745 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2746 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2748 block_clear_fn = fn;
2752 set_user_assembler_name (block_clear_fn, asmspec);
2756 clear_storage_libcall_fn (int for_call)
2758 static bool emitted_extern;
2760 if (!block_clear_fn)
2761 init_block_clear_fn (NULL);
2763 if (for_call && !emitted_extern)
2765 emitted_extern = true;
2766 make_decl_rtl (block_clear_fn);
2767 assemble_external (block_clear_fn);
2770 return block_clear_fn;
2773 /* Expand a setmem pattern; return true if successful. */
2776 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2777 unsigned int expected_align, HOST_WIDE_INT expected_size)
2779 /* Try the most limited insn first, because there's no point
2780 including more than one in the machine description unless
2781 the more limited one has some advantage. */
2783 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2784 enum machine_mode mode;
2786 if (expected_align < align)
2787 expected_align = align;
2789 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2790 mode = GET_MODE_WIDER_MODE (mode))
2792 enum insn_code code = setmem_optab[(int) mode];
2793 insn_operand_predicate_fn pred;
2795 if (code != CODE_FOR_nothing
2796 /* We don't need MODE to be narrower than
2797 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2798 the mode mask, as it is returned by the macro, it will
2799 definitely be less than the actual mode mask. */
2800 && ((CONST_INT_P (size)
2801 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2802 <= (GET_MODE_MASK (mode) >> 1)))
2803 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2804 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2805 || (*pred) (object, BLKmode))
2806 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2807 || (*pred) (opalign, VOIDmode)))
2810 enum machine_mode char_mode;
2811 rtx last = get_last_insn ();
2814 opsize = convert_to_mode (mode, size, 1);
2815 pred = insn_data[(int) code].operand[1].predicate;
2816 if (pred != 0 && ! (*pred) (opsize, mode))
2817 opsize = copy_to_mode_reg (mode, opsize);
2820 char_mode = insn_data[(int) code].operand[2].mode;
2821 if (char_mode != VOIDmode)
2823 opchar = convert_to_mode (char_mode, opchar, 1);
2824 pred = insn_data[(int) code].operand[2].predicate;
2825 if (pred != 0 && ! (*pred) (opchar, char_mode))
2826 opchar = copy_to_mode_reg (char_mode, opchar);
2829 if (insn_data[(int) code].n_operands == 4)
2830 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2832 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2833 GEN_INT (expected_align
2835 GEN_INT (expected_size));
2842 delete_insns_since (last);
2850 /* Write to one of the components of the complex value CPLX. Write VAL to
2851 the real part if IMAG_P is false, and the imaginary part if its true. */
2854 write_complex_part (rtx cplx, rtx val, bool imag_p)
2856 enum machine_mode cmode;
2857 enum machine_mode imode;
2860 if (GET_CODE (cplx) == CONCAT)
2862 emit_move_insn (XEXP (cplx, imag_p), val);
2866 cmode = GET_MODE (cplx);
2867 imode = GET_MODE_INNER (cmode);
2868 ibitsize = GET_MODE_BITSIZE (imode);
2870 /* For MEMs simplify_gen_subreg may generate an invalid new address
2871 because, e.g., the original address is considered mode-dependent
2872 by the target, which restricts simplify_subreg from invoking
2873 adjust_address_nv. Instead of preparing fallback support for an
2874 invalid address, we call adjust_address_nv directly. */
2877 emit_move_insn (adjust_address_nv (cplx, imode,
2878 imag_p ? GET_MODE_SIZE (imode) : 0),
2883 /* If the sub-object is at least word sized, then we know that subregging
2884 will work. This special case is important, since store_bit_field
2885 wants to operate on integer modes, and there's rarely an OImode to
2886 correspond to TCmode. */
2887 if (ibitsize >= BITS_PER_WORD
2888 /* For hard regs we have exact predicates. Assume we can split
2889 the original object if it spans an even number of hard regs.
2890 This special case is important for SCmode on 64-bit platforms
2891 where the natural size of floating-point regs is 32-bit. */
2893 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2894 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2896 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2897 imag_p ? GET_MODE_SIZE (imode) : 0);
2900 emit_move_insn (part, val);
2904 /* simplify_gen_subreg may fail for sub-word MEMs. */
2905 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2908 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2911 /* Extract one of the components of the complex value CPLX. Extract the
2912 real part if IMAG_P is false, and the imaginary part if it's true. */
2915 read_complex_part (rtx cplx, bool imag_p)
2917 enum machine_mode cmode, imode;
2920 if (GET_CODE (cplx) == CONCAT)
2921 return XEXP (cplx, imag_p);
2923 cmode = GET_MODE (cplx);
2924 imode = GET_MODE_INNER (cmode);
2925 ibitsize = GET_MODE_BITSIZE (imode);
2927 /* Special case reads from complex constants that got spilled to memory. */
2928 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2930 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2931 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2933 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2934 if (CONSTANT_CLASS_P (part))
2935 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2939 /* For MEMs simplify_gen_subreg may generate an invalid new address
2940 because, e.g., the original address is considered mode-dependent
2941 by the target, which restricts simplify_subreg from invoking
2942 adjust_address_nv. Instead of preparing fallback support for an
2943 invalid address, we call adjust_address_nv directly. */
2945 return adjust_address_nv (cplx, imode,
2946 imag_p ? GET_MODE_SIZE (imode) : 0);
2948 /* If the sub-object is at least word sized, then we know that subregging
2949 will work. This special case is important, since extract_bit_field
2950 wants to operate on integer modes, and there's rarely an OImode to
2951 correspond to TCmode. */
2952 if (ibitsize >= BITS_PER_WORD
2953 /* For hard regs we have exact predicates. Assume we can split
2954 the original object if it spans an even number of hard regs.
2955 This special case is important for SCmode on 64-bit platforms
2956 where the natural size of floating-point regs is 32-bit. */
2958 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2959 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2961 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2962 imag_p ? GET_MODE_SIZE (imode) : 0);
2966 /* simplify_gen_subreg may fail for sub-word MEMs. */
2967 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2970 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2971 true, NULL_RTX, imode, imode);
2974 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2975 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2976 represented in NEW_MODE. If FORCE is true, this will never happen, as
2977 we'll force-create a SUBREG if needed. */
2980 emit_move_change_mode (enum machine_mode new_mode,
2981 enum machine_mode old_mode, rtx x, bool force)
2985 if (push_operand (x, GET_MODE (x)))
2987 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2988 MEM_COPY_ATTRIBUTES (ret, x);
2992 /* We don't have to worry about changing the address since the
2993 size in bytes is supposed to be the same. */
2994 if (reload_in_progress)
2996 /* Copy the MEM to change the mode and move any
2997 substitutions from the old MEM to the new one. */
2998 ret = adjust_address_nv (x, new_mode, 0);
2999 copy_replacements (x, ret);
3002 ret = adjust_address (x, new_mode, 0);
3006 /* Note that we do want simplify_subreg's behavior of validating
3007 that the new mode is ok for a hard register. If we were to use
3008 simplify_gen_subreg, we would create the subreg, but would
3009 probably run into the target not being able to implement it. */
3010 /* Except, of course, when FORCE is true, when this is exactly what
3011 we want. Which is needed for CCmodes on some targets. */
3013 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3015 ret = simplify_subreg (new_mode, x, old_mode, 0);
3021 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3022 an integer mode of the same size as MODE. Returns the instruction
3023 emitted, or NULL if such a move could not be generated. */
3026 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3028 enum machine_mode imode;
3029 enum insn_code code;
3031 /* There must exist a mode of the exact size we require. */
3032 imode = int_mode_for_mode (mode);
3033 if (imode == BLKmode)
3036 /* The target must support moves in this mode. */
3037 code = optab_handler (mov_optab, imode)->insn_code;
3038 if (code == CODE_FOR_nothing)
3041 x = emit_move_change_mode (imode, mode, x, force);
3044 y = emit_move_change_mode (imode, mode, y, force);
3047 return emit_insn (GEN_FCN (code) (x, y));
3050 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3051 Return an equivalent MEM that does not use an auto-increment. */
3054 emit_move_resolve_push (enum machine_mode mode, rtx x)
3056 enum rtx_code code = GET_CODE (XEXP (x, 0));
3057 HOST_WIDE_INT adjust;
3060 adjust = GET_MODE_SIZE (mode);
3061 #ifdef PUSH_ROUNDING
3062 adjust = PUSH_ROUNDING (adjust);
3064 if (code == PRE_DEC || code == POST_DEC)
3066 else if (code == PRE_MODIFY || code == POST_MODIFY)
3068 rtx expr = XEXP (XEXP (x, 0), 1);
3071 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3072 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3073 val = INTVAL (XEXP (expr, 1));
3074 if (GET_CODE (expr) == MINUS)
3076 gcc_assert (adjust == val || adjust == -val);
3080 /* Do not use anti_adjust_stack, since we don't want to update
3081 stack_pointer_delta. */
3082 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3083 GEN_INT (adjust), stack_pointer_rtx,
3084 0, OPTAB_LIB_WIDEN);
3085 if (temp != stack_pointer_rtx)
3086 emit_move_insn (stack_pointer_rtx, temp);
3093 temp = stack_pointer_rtx;
3098 temp = plus_constant (stack_pointer_rtx, -adjust);
3104 return replace_equiv_address (x, temp);
3107 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3108 X is known to satisfy push_operand, and MODE is known to be complex.
3109 Returns the last instruction emitted. */
3112 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3114 enum machine_mode submode = GET_MODE_INNER (mode);
3117 #ifdef PUSH_ROUNDING
3118 unsigned int submodesize = GET_MODE_SIZE (submode);
3120 /* In case we output to the stack, but the size is smaller than the
3121 machine can push exactly, we need to use move instructions. */
3122 if (PUSH_ROUNDING (submodesize) != submodesize)
3124 x = emit_move_resolve_push (mode, x);
3125 return emit_move_insn (x, y);
3129 /* Note that the real part always precedes the imag part in memory
3130 regardless of machine's endianness. */
3131 switch (GET_CODE (XEXP (x, 0)))
3145 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3146 read_complex_part (y, imag_first));
3147 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3148 read_complex_part (y, !imag_first));
3151 /* A subroutine of emit_move_complex. Perform the move from Y to X
3152 via two moves of the parts. Returns the last instruction emitted. */
3155 emit_move_complex_parts (rtx x, rtx y)
3157 /* Show the output dies here. This is necessary for SUBREGs
3158 of pseudos since we cannot track their lifetimes correctly;
3159 hard regs shouldn't appear here except as return values. */
3160 if (!reload_completed && !reload_in_progress
3161 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3164 write_complex_part (x, read_complex_part (y, false), false);
3165 write_complex_part (x, read_complex_part (y, true), true);
3167 return get_last_insn ();
3170 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3171 MODE is known to be complex. Returns the last instruction emitted. */
3174 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3178 /* Need to take special care for pushes, to maintain proper ordering
3179 of the data, and possibly extra padding. */
3180 if (push_operand (x, mode))
3181 return emit_move_complex_push (mode, x, y);
3183 /* See if we can coerce the target into moving both values at once. */
3185 /* Move floating point as parts. */
3186 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3187 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3189 /* Not possible if the values are inherently not adjacent. */
3190 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3192 /* Is possible if both are registers (or subregs of registers). */
3193 else if (register_operand (x, mode) && register_operand (y, mode))
3195 /* If one of the operands is a memory, and alignment constraints
3196 are friendly enough, we may be able to do combined memory operations.
3197 We do not attempt this if Y is a constant because that combination is
3198 usually better with the by-parts thing below. */
3199 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3200 && (!STRICT_ALIGNMENT
3201 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3210 /* For memory to memory moves, optimal behavior can be had with the
3211 existing block move logic. */
3212 if (MEM_P (x) && MEM_P (y))
3214 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3215 BLOCK_OP_NO_LIBCALL);
3216 return get_last_insn ();
3219 ret = emit_move_via_integer (mode, x, y, true);
3224 return emit_move_complex_parts (x, y);
3227 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3228 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3231 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3235 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3238 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3239 if (code != CODE_FOR_nothing)
3241 x = emit_move_change_mode (CCmode, mode, x, true);
3242 y = emit_move_change_mode (CCmode, mode, y, true);
3243 return emit_insn (GEN_FCN (code) (x, y));
3247 /* Otherwise, find the MODE_INT mode of the same width. */
3248 ret = emit_move_via_integer (mode, x, y, false);
3249 gcc_assert (ret != NULL);
3253 /* Return true if word I of OP lies entirely in the
3254 undefined bits of a paradoxical subreg. */
3257 undefined_operand_subword_p (const_rtx op, int i)
3259 enum machine_mode innermode, innermostmode;
3261 if (GET_CODE (op) != SUBREG)
3263 innermode = GET_MODE (op);
3264 innermostmode = GET_MODE (SUBREG_REG (op));
3265 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3266 /* The SUBREG_BYTE represents offset, as if the value were stored in
3267 memory, except for a paradoxical subreg where we define
3268 SUBREG_BYTE to be 0; undo this exception as in
3270 if (SUBREG_BYTE (op) == 0
3271 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3273 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3274 if (WORDS_BIG_ENDIAN)
3275 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3276 if (BYTES_BIG_ENDIAN)
3277 offset += difference % UNITS_PER_WORD;
3279 if (offset >= GET_MODE_SIZE (innermostmode)
3280 || offset <= -GET_MODE_SIZE (word_mode))
3285 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3286 MODE is any multi-word or full-word mode that lacks a move_insn
3287 pattern. Note that you will get better code if you define such
3288 patterns, even if they must turn into multiple assembler instructions. */
3291 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3298 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3300 /* If X is a push on the stack, do the push now and replace
3301 X with a reference to the stack pointer. */
3302 if (push_operand (x, mode))
3303 x = emit_move_resolve_push (mode, x);
3305 /* If we are in reload, see if either operand is a MEM whose address
3306 is scheduled for replacement. */
3307 if (reload_in_progress && MEM_P (x)
3308 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3309 x = replace_equiv_address_nv (x, inner);
3310 if (reload_in_progress && MEM_P (y)
3311 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3312 y = replace_equiv_address_nv (y, inner);
3316 need_clobber = false;
3318 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3321 rtx xpart = operand_subword (x, i, 1, mode);
3324 /* Do not generate code for a move if it would come entirely
3325 from the undefined bits of a paradoxical subreg. */
3326 if (undefined_operand_subword_p (y, i))
3329 ypart = operand_subword (y, i, 1, mode);
3331 /* If we can't get a part of Y, put Y into memory if it is a
3332 constant. Otherwise, force it into a register. Then we must
3333 be able to get a part of Y. */
3334 if (ypart == 0 && CONSTANT_P (y))
3336 y = use_anchored_address (force_const_mem (mode, y));
3337 ypart = operand_subword (y, i, 1, mode);
3339 else if (ypart == 0)
3340 ypart = operand_subword_force (y, i, mode);
3342 gcc_assert (xpart && ypart);
3344 need_clobber |= (GET_CODE (xpart) == SUBREG);
3346 last_insn = emit_move_insn (xpart, ypart);
3352 /* Show the output dies here. This is necessary for SUBREGs
3353 of pseudos since we cannot track their lifetimes correctly;
3354 hard regs shouldn't appear here except as return values.
3355 We never want to emit such a clobber after reload. */
3357 && ! (reload_in_progress || reload_completed)
3358 && need_clobber != 0)
3366 /* Low level part of emit_move_insn.
3367 Called just like emit_move_insn, but assumes X and Y
3368 are basically valid. */
3371 emit_move_insn_1 (rtx x, rtx y)
3373 enum machine_mode mode = GET_MODE (x);
3374 enum insn_code code;
3376 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3378 code = optab_handler (mov_optab, mode)->insn_code;
3379 if (code != CODE_FOR_nothing)
3380 return emit_insn (GEN_FCN (code) (x, y));
3382 /* Expand complex moves by moving real part and imag part. */
3383 if (COMPLEX_MODE_P (mode))
3384 return emit_move_complex (mode, x, y);
3386 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3387 || ALL_FIXED_POINT_MODE_P (mode))
3389 rtx result = emit_move_via_integer (mode, x, y, true);
3391 /* If we can't find an integer mode, use multi words. */
3395 return emit_move_multi_word (mode, x, y);
3398 if (GET_MODE_CLASS (mode) == MODE_CC)
3399 return emit_move_ccmode (mode, x, y);
3401 /* Try using a move pattern for the corresponding integer mode. This is
3402 only safe when simplify_subreg can convert MODE constants into integer
3403 constants. At present, it can only do this reliably if the value
3404 fits within a HOST_WIDE_INT. */
3405 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3407 rtx ret = emit_move_via_integer (mode, x, y, false);
3412 return emit_move_multi_word (mode, x, y);
3415 /* Generate code to copy Y into X.
3416 Both Y and X must have the same mode, except that
3417 Y can be a constant with VOIDmode.
3418 This mode cannot be BLKmode; use emit_block_move for that.
3420 Return the last instruction emitted. */
3423 emit_move_insn (rtx x, rtx y)
3425 enum machine_mode mode = GET_MODE (x);
3426 rtx y_cst = NULL_RTX;
3429 gcc_assert (mode != BLKmode
3430 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3435 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3436 && (last_insn = compress_float_constant (x, y)))
3441 if (!LEGITIMATE_CONSTANT_P (y))
3443 y = force_const_mem (mode, y);
3445 /* If the target's cannot_force_const_mem prevented the spill,
3446 assume that the target's move expanders will also take care
3447 of the non-legitimate constant. */
3451 y = use_anchored_address (y);
3455 /* If X or Y are memory references, verify that their addresses are valid
3458 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3460 && ! push_operand (x, GET_MODE (x))))
3461 x = validize_mem (x);
3464 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3465 MEM_ADDR_SPACE (y)))
3466 y = validize_mem (y);
3468 gcc_assert (mode != BLKmode);
3470 last_insn = emit_move_insn_1 (x, y);
3472 if (y_cst && REG_P (x)
3473 && (set = single_set (last_insn)) != NULL_RTX
3474 && SET_DEST (set) == x
3475 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3476 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3481 /* If Y is representable exactly in a narrower mode, and the target can
3482 perform the extension directly from constant or memory, then emit the
3483 move as an extension. */
3486 compress_float_constant (rtx x, rtx y)
3488 enum machine_mode dstmode = GET_MODE (x);
3489 enum machine_mode orig_srcmode = GET_MODE (y);
3490 enum machine_mode srcmode;
3492 int oldcost, newcost;
3493 bool speed = optimize_insn_for_speed_p ();
3495 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3497 if (LEGITIMATE_CONSTANT_P (y))
3498 oldcost = rtx_cost (y, SET, speed);
3500 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3502 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3503 srcmode != orig_srcmode;
3504 srcmode = GET_MODE_WIDER_MODE (srcmode))
3507 rtx trunc_y, last_insn;
3509 /* Skip if the target can't extend this way. */
3510 ic = can_extend_p (dstmode, srcmode, 0);
3511 if (ic == CODE_FOR_nothing)
3514 /* Skip if the narrowed value isn't exact. */
3515 if (! exact_real_truncate (srcmode, &r))
3518 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3520 if (LEGITIMATE_CONSTANT_P (trunc_y))
3522 /* Skip if the target needs extra instructions to perform
3524 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3526 /* This is valid, but may not be cheaper than the original. */
3527 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3528 if (oldcost < newcost)
3531 else if (float_extend_from_mem[dstmode][srcmode])
3533 trunc_y = force_const_mem (srcmode, trunc_y);
3534 /* This is valid, but may not be cheaper than the original. */
3535 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3536 if (oldcost < newcost)
3538 trunc_y = validize_mem (trunc_y);
3543 /* For CSE's benefit, force the compressed constant pool entry
3544 into a new pseudo. This constant may be used in different modes,
3545 and if not, combine will put things back together for us. */
3546 trunc_y = force_reg (srcmode, trunc_y);
3547 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3548 last_insn = get_last_insn ();
3551 set_unique_reg_note (last_insn, REG_EQUAL, y);
3559 /* Pushing data onto the stack. */
3561 /* Push a block of length SIZE (perhaps variable)
3562 and return an rtx to address the beginning of the block.
3563 The value may be virtual_outgoing_args_rtx.
3565 EXTRA is the number of bytes of padding to push in addition to SIZE.
3566 BELOW nonzero means this padding comes at low addresses;
3567 otherwise, the padding comes at high addresses. */
3570 push_block (rtx size, int extra, int below)
3574 size = convert_modes (Pmode, ptr_mode, size, 1);
3575 if (CONSTANT_P (size))
3576 anti_adjust_stack (plus_constant (size, extra));
3577 else if (REG_P (size) && extra == 0)
3578 anti_adjust_stack (size);
3581 temp = copy_to_mode_reg (Pmode, size);
3583 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3584 temp, 0, OPTAB_LIB_WIDEN);
3585 anti_adjust_stack (temp);
3588 #ifndef STACK_GROWS_DOWNWARD
3594 temp = virtual_outgoing_args_rtx;
3595 if (extra != 0 && below)
3596 temp = plus_constant (temp, extra);
3600 if (CONST_INT_P (size))
3601 temp = plus_constant (virtual_outgoing_args_rtx,
3602 -INTVAL (size) - (below ? 0 : extra));
3603 else if (extra != 0 && !below)
3604 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3605 negate_rtx (Pmode, plus_constant (size, extra)));
3607 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3608 negate_rtx (Pmode, size));
3611 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3614 #ifdef PUSH_ROUNDING
3616 /* Emit single push insn. */
3619 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3622 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3624 enum insn_code icode;
3625 insn_operand_predicate_fn pred;
3627 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3628 /* If there is push pattern, use it. Otherwise try old way of throwing
3629 MEM representing push operation to move expander. */
3630 icode = optab_handler (push_optab, mode)->insn_code;
3631 if (icode != CODE_FOR_nothing)
3633 if (((pred = insn_data[(int) icode].operand[0].predicate)
3634 && !((*pred) (x, mode))))
3635 x = force_reg (mode, x);
3636 emit_insn (GEN_FCN (icode) (x));
3639 if (GET_MODE_SIZE (mode) == rounded_size)
3640 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3641 /* If we are to pad downward, adjust the stack pointer first and
3642 then store X into the stack location using an offset. This is
3643 because emit_move_insn does not know how to pad; it does not have
3645 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3647 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3648 HOST_WIDE_INT offset;
3650 emit_move_insn (stack_pointer_rtx,
3651 expand_binop (Pmode,
3652 #ifdef STACK_GROWS_DOWNWARD
3658 GEN_INT (rounded_size),
3659 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3661 offset = (HOST_WIDE_INT) padding_size;
3662 #ifdef STACK_GROWS_DOWNWARD
3663 if (STACK_PUSH_CODE == POST_DEC)
3664 /* We have already decremented the stack pointer, so get the
3666 offset += (HOST_WIDE_INT) rounded_size;
3668 if (STACK_PUSH_CODE == POST_INC)
3669 /* We have already incremented the stack pointer, so get the
3671 offset -= (HOST_WIDE_INT) rounded_size;
3673 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3677 #ifdef STACK_GROWS_DOWNWARD
3678 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3679 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3680 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3682 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3683 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3684 GEN_INT (rounded_size));
3686 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3689 dest = gen_rtx_MEM (mode, dest_addr);
3693 set_mem_attributes (dest, type, 1);
3695 if (flag_optimize_sibling_calls)
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3702 emit_move_insn (dest, x);
3706 /* Generate code to push X onto the stack, assuming it has mode MODE and
3708 MODE is redundant except when X is a CONST_INT (since they don't
3710 SIZE is an rtx for the size of data to be copied (in bytes),
3711 needed only if X is BLKmode.
3713 ALIGN (in bits) is maximum alignment we can assume.
3715 If PARTIAL and REG are both nonzero, then copy that many of the first
3716 bytes of X into registers starting with REG, and push the rest of X.
3717 The amount of space pushed is decreased by PARTIAL bytes.
3718 REG must be a hard register in this case.
3719 If REG is zero but PARTIAL is not, take any all others actions for an
3720 argument partially in registers, but do not actually load any
3723 EXTRA is the amount in bytes of extra space to leave next to this arg.
3724 This is ignored if an argument block has already been allocated.
3726 On a machine that lacks real push insns, ARGS_ADDR is the address of
3727 the bottom of the argument block for this call. We use indexing off there
3728 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3729 argument block has not been preallocated.
3731 ARGS_SO_FAR is the size of args previously pushed for this call.
3733 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3734 for arguments passed in registers. If nonzero, it will be the number
3735 of bytes required. */
3738 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3739 unsigned int align, int partial, rtx reg, int extra,
3740 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3744 enum direction stack_direction
3745 #ifdef STACK_GROWS_DOWNWARD
3751 /* Decide where to pad the argument: `downward' for below,
3752 `upward' for above, or `none' for don't pad it.
3753 Default is below for small data on big-endian machines; else above. */
3754 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3756 /* Invert direction if stack is post-decrement.
3758 if (STACK_PUSH_CODE == POST_DEC)
3759 if (where_pad != none)
3760 where_pad = (where_pad == downward ? upward : downward);
3765 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3767 /* Copy a block into the stack, entirely or partially. */
3774 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3775 used = partial - offset;
3777 if (mode != BLKmode)
3779 /* A value is to be stored in an insufficiently aligned
3780 stack slot; copy via a suitably aligned slot if
3782 size = GEN_INT (GET_MODE_SIZE (mode));
3783 if (!MEM_P (xinner))
3785 temp = assign_temp (type, 0, 1, 1);
3786 emit_move_insn (temp, xinner);
3793 /* USED is now the # of bytes we need not copy to the stack
3794 because registers will take care of them. */
3797 xinner = adjust_address (xinner, BLKmode, used);
3799 /* If the partial register-part of the arg counts in its stack size,
3800 skip the part of stack space corresponding to the registers.
3801 Otherwise, start copying to the beginning of the stack space,
3802 by setting SKIP to 0. */
3803 skip = (reg_parm_stack_space == 0) ? 0 : used;
3805 #ifdef PUSH_ROUNDING
3806 /* Do it with several push insns if that doesn't take lots of insns
3807 and if there is no difficulty with push insns that skip bytes
3808 on the stack for alignment purposes. */
3811 && CONST_INT_P (size)
3813 && MEM_ALIGN (xinner) >= align
3814 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3815 /* Here we avoid the case of a structure whose weak alignment
3816 forces many pushes of a small amount of data,
3817 and such small pushes do rounding that causes trouble. */
3818 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3819 || align >= BIGGEST_ALIGNMENT
3820 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3821 == (align / BITS_PER_UNIT)))
3822 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3824 /* Push padding now if padding above and stack grows down,
3825 or if padding below and stack grows up.
3826 But if space already allocated, this has already been done. */
3827 if (extra && args_addr == 0
3828 && where_pad != none && where_pad != stack_direction)
3829 anti_adjust_stack (GEN_INT (extra));
3831 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3834 #endif /* PUSH_ROUNDING */
3838 /* Otherwise make space on the stack and copy the data
3839 to the address of that space. */
3841 /* Deduct words put into registers from the size we must copy. */
3844 if (CONST_INT_P (size))
3845 size = GEN_INT (INTVAL (size) - used);
3847 size = expand_binop (GET_MODE (size), sub_optab, size,
3848 GEN_INT (used), NULL_RTX, 0,
3852 /* Get the address of the stack space.
3853 In this case, we do not deal with EXTRA separately.
3854 A single stack adjust will do. */
3857 temp = push_block (size, extra, where_pad == downward);
3860 else if (CONST_INT_P (args_so_far))
3861 temp = memory_address (BLKmode,
3862 plus_constant (args_addr,
3863 skip + INTVAL (args_so_far)));
3865 temp = memory_address (BLKmode,
3866 plus_constant (gen_rtx_PLUS (Pmode,
3871 if (!ACCUMULATE_OUTGOING_ARGS)
3873 /* If the source is referenced relative to the stack pointer,
3874 copy it to another register to stabilize it. We do not need
3875 to do this if we know that we won't be changing sp. */
3877 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3878 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3879 temp = copy_to_reg (temp);
3882 target = gen_rtx_MEM (BLKmode, temp);
3884 /* We do *not* set_mem_attributes here, because incoming arguments
3885 may overlap with sibling call outgoing arguments and we cannot
3886 allow reordering of reads from function arguments with stores
3887 to outgoing arguments of sibling calls. We do, however, want
3888 to record the alignment of the stack slot. */
3889 /* ALIGN may well be better aligned than TYPE, e.g. due to
3890 PARM_BOUNDARY. Assume the caller isn't lying. */
3891 set_mem_align (target, align);
3893 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3896 else if (partial > 0)
3898 /* Scalar partly in registers. */
3900 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3903 /* # bytes of start of argument
3904 that we must make space for but need not store. */
3905 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3906 int args_offset = INTVAL (args_so_far);
3909 /* Push padding now if padding above and stack grows down,
3910 or if padding below and stack grows up.
3911 But if space already allocated, this has already been done. */
3912 if (extra && args_addr == 0
3913 && where_pad != none && where_pad != stack_direction)
3914 anti_adjust_stack (GEN_INT (extra));
3916 /* If we make space by pushing it, we might as well push
3917 the real data. Otherwise, we can leave OFFSET nonzero
3918 and leave the space uninitialized. */
3922 /* Now NOT_STACK gets the number of words that we don't need to
3923 allocate on the stack. Convert OFFSET to words too. */
3924 not_stack = (partial - offset) / UNITS_PER_WORD;
3925 offset /= UNITS_PER_WORD;
3927 /* If the partial register-part of the arg counts in its stack size,
3928 skip the part of stack space corresponding to the registers.
3929 Otherwise, start copying to the beginning of the stack space,
3930 by setting SKIP to 0. */
3931 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3933 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3934 x = validize_mem (force_const_mem (mode, x));
3936 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3937 SUBREGs of such registers are not allowed. */
3938 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3939 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3940 x = copy_to_reg (x);
3942 /* Loop over all the words allocated on the stack for this arg. */
3943 /* We can do it by words, because any scalar bigger than a word
3944 has a size a multiple of a word. */
3945 #ifndef PUSH_ARGS_REVERSED
3946 for (i = not_stack; i < size; i++)
3948 for (i = size - 1; i >= not_stack; i--)
3950 if (i >= not_stack + offset)
3951 emit_push_insn (operand_subword_force (x, i, mode),
3952 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3954 GEN_INT (args_offset + ((i - not_stack + skip)
3956 reg_parm_stack_space, alignment_pad);
3963 /* Push padding now if padding above and stack grows down,
3964 or if padding below and stack grows up.
3965 But if space already allocated, this has already been done. */
3966 if (extra && args_addr == 0
3967 && where_pad != none && where_pad != stack_direction)
3968 anti_adjust_stack (GEN_INT (extra));
3970 #ifdef PUSH_ROUNDING
3971 if (args_addr == 0 && PUSH_ARGS)
3972 emit_single_push_insn (mode, x, type);
3976 if (CONST_INT_P (args_so_far))
3978 = memory_address (mode,
3979 plus_constant (args_addr,
3980 INTVAL (args_so_far)));
3982 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3984 dest = gen_rtx_MEM (mode, addr);
3986 /* We do *not* set_mem_attributes here, because incoming arguments
3987 may overlap with sibling call outgoing arguments and we cannot
3988 allow reordering of reads from function arguments with stores
3989 to outgoing arguments of sibling calls. We do, however, want
3990 to record the alignment of the stack slot. */
3991 /* ALIGN may well be better aligned than TYPE, e.g. due to
3992 PARM_BOUNDARY. Assume the caller isn't lying. */
3993 set_mem_align (dest, align);
3995 emit_move_insn (dest, x);
3999 /* If part should go in registers, copy that part
4000 into the appropriate registers. Do this now, at the end,
4001 since mem-to-mem copies above may do function calls. */
4002 if (partial > 0 && reg != 0)
4004 /* Handle calls that pass values in multiple non-contiguous locations.
4005 The Irix 6 ABI has examples of this. */
4006 if (GET_CODE (reg) == PARALLEL)
4007 emit_group_load (reg, x, type, -1);
4010 gcc_assert (partial % UNITS_PER_WORD == 0);
4011 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4015 if (extra && args_addr == 0 && where_pad == stack_direction)
4016 anti_adjust_stack (GEN_INT (extra));
4018 if (alignment_pad && args_addr == 0)
4019 anti_adjust_stack (alignment_pad);
4022 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4026 get_subtarget (rtx x)
4030 /* Only registers can be subtargets. */
4032 /* Don't use hard regs to avoid extending their life. */
4033 || REGNO (x) < FIRST_PSEUDO_REGISTER
4037 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4038 FIELD is a bitfield. Returns true if the optimization was successful,
4039 and there's nothing else to do. */
4042 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4043 unsigned HOST_WIDE_INT bitpos,
4044 enum machine_mode mode1, rtx str_rtx,
4047 enum machine_mode str_mode = GET_MODE (str_rtx);
4048 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4053 if (mode1 != VOIDmode
4054 || bitsize >= BITS_PER_WORD
4055 || str_bitsize > BITS_PER_WORD
4056 || TREE_SIDE_EFFECTS (to)
4057 || TREE_THIS_VOLATILE (to))
4061 if (!BINARY_CLASS_P (src)
4062 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4065 op0 = TREE_OPERAND (src, 0);
4066 op1 = TREE_OPERAND (src, 1);
4069 if (!operand_equal_p (to, op0, 0))
4072 if (MEM_P (str_rtx))
4074 unsigned HOST_WIDE_INT offset1;
4076 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4077 str_mode = word_mode;
4078 str_mode = get_best_mode (bitsize, bitpos,
4079 MEM_ALIGN (str_rtx), str_mode, 0);
4080 if (str_mode == VOIDmode)
4082 str_bitsize = GET_MODE_BITSIZE (str_mode);
4085 bitpos %= str_bitsize;
4086 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4087 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4089 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4092 /* If the bit field covers the whole REG/MEM, store_field
4093 will likely generate better code. */
4094 if (bitsize >= str_bitsize)
4097 /* We can't handle fields split across multiple entities. */
4098 if (bitpos + bitsize > str_bitsize)
4101 if (BYTES_BIG_ENDIAN)
4102 bitpos = str_bitsize - bitpos - bitsize;
4104 switch (TREE_CODE (src))
4108 /* For now, just optimize the case of the topmost bitfield
4109 where we don't need to do any masking and also
4110 1 bit bitfields where xor can be used.
4111 We might win by one instruction for the other bitfields
4112 too if insv/extv instructions aren't used, so that
4113 can be added later. */
4114 if (bitpos + bitsize != str_bitsize
4115 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4118 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4119 value = convert_modes (str_mode,
4120 TYPE_MODE (TREE_TYPE (op1)), value,
4121 TYPE_UNSIGNED (TREE_TYPE (op1)));
4123 /* We may be accessing data outside the field, which means
4124 we can alias adjacent data. */
4125 if (MEM_P (str_rtx))
4127 str_rtx = shallow_copy_rtx (str_rtx);
4128 set_mem_alias_set (str_rtx, 0);
4129 set_mem_expr (str_rtx, 0);
4132 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4133 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4135 value = expand_and (str_mode, value, const1_rtx, NULL);
4138 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4139 build_int_cst (NULL_TREE, bitpos),
4141 result = expand_binop (str_mode, binop, str_rtx,
4142 value, str_rtx, 1, OPTAB_WIDEN);
4143 if (result != str_rtx)
4144 emit_move_insn (str_rtx, result);
4149 if (TREE_CODE (op1) != INTEGER_CST)
4151 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4152 value = convert_modes (GET_MODE (str_rtx),
4153 TYPE_MODE (TREE_TYPE (op1)), value,
4154 TYPE_UNSIGNED (TREE_TYPE (op1)));
4156 /* We may be accessing data outside the field, which means
4157 we can alias adjacent data. */
4158 if (MEM_P (str_rtx))
4160 str_rtx = shallow_copy_rtx (str_rtx);
4161 set_mem_alias_set (str_rtx, 0);
4162 set_mem_expr (str_rtx, 0);
4165 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4166 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4168 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4170 value = expand_and (GET_MODE (str_rtx), value, mask,
4173 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4174 build_int_cst (NULL_TREE, bitpos),
4176 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4177 value, str_rtx, 1, OPTAB_WIDEN);
4178 if (result != str_rtx)
4179 emit_move_insn (str_rtx, result);
4190 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4191 is true, try generating a nontemporal store. */
4194 expand_assignment (tree to, tree from, bool nontemporal)
4199 /* Don't crash if the lhs of the assignment was erroneous. */
4200 if (TREE_CODE (to) == ERROR_MARK)
4202 result = expand_normal (from);
4206 /* Optimize away no-op moves without side-effects. */
4207 if (operand_equal_p (to, from, 0))
4210 /* Assignment of a structure component needs special treatment
4211 if the structure component's rtx is not simply a MEM.
4212 Assignment of an array element at a constant index, and assignment of
4213 an array element in an unaligned packed structure field, has the same
4215 if (handled_component_p (to)
4216 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4218 enum machine_mode mode1;
4219 HOST_WIDE_INT bitsize, bitpos;
4226 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4227 &unsignedp, &volatilep, true);
4229 /* If we are going to use store_bit_field and extract_bit_field,
4230 make sure to_rtx will be safe for multiple use. */
4232 to_rtx = expand_normal (tem);
4234 /* If the bitfield is volatile, we want to access it in the
4235 field's mode, not the computed mode. */
4237 && GET_CODE (to_rtx) == MEM
4238 && flag_strict_volatile_bitfields > 0)
4239 to_rtx = adjust_address (to_rtx, mode1, 0);
4243 enum machine_mode address_mode;
4246 if (!MEM_P (to_rtx))
4248 /* We can get constant negative offsets into arrays with broken
4249 user code. Translate this to a trap instead of ICEing. */
4250 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4251 expand_builtin_trap ();
4252 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4255 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4257 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4258 if (GET_MODE (offset_rtx) != address_mode)
4259 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4261 /* A constant address in TO_RTX can have VOIDmode, we must not try
4262 to call force_reg for that case. Avoid that case. */
4264 && GET_MODE (to_rtx) == BLKmode
4265 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4267 && (bitpos % bitsize) == 0
4268 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4269 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4271 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4275 to_rtx = offset_address (to_rtx, offset_rtx,
4276 highest_pow2_factor_for_target (to,
4280 /* No action is needed if the target is not a memory and the field
4281 lies completely outside that target. This can occur if the source
4282 code contains an out-of-bounds access to a small array. */
4284 && GET_MODE (to_rtx) != BLKmode
4285 && (unsigned HOST_WIDE_INT) bitpos
4286 >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4288 expand_normal (from);
4291 /* Handle expand_expr of a complex value returning a CONCAT. */
4292 else if (GET_CODE (to_rtx) == CONCAT)
4294 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4296 gcc_assert (bitpos == 0);
4297 result = store_expr (from, to_rtx, false, nontemporal);
4301 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4302 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4310 /* If the field is at offset zero, we could have been given the
4311 DECL_RTX of the parent struct. Don't munge it. */
4312 to_rtx = shallow_copy_rtx (to_rtx);
4314 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4316 /* Deal with volatile and readonly fields. The former is only
4317 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4319 MEM_VOLATILE_P (to_rtx) = 1;
4320 if (component_uses_parent_alias_set (to))
4321 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4324 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4328 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4329 TREE_TYPE (tem), get_alias_set (to),
4334 preserve_temp_slots (result);
4340 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4342 addr_space_t as = ADDR_SPACE_GENERIC;
4343 enum machine_mode mode, op_mode1;
4344 enum insn_code icode;
4345 rtx reg, addr, mem, insn;
4347 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4348 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4350 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4351 reg = force_not_mem (reg);
4353 mode = TYPE_MODE (TREE_TYPE (to));
4354 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4356 addr = memory_address_addr_space (mode, addr, as);
4357 mem = gen_rtx_MEM (mode, addr);
4359 set_mem_attributes (mem, to, 0);
4360 set_mem_addr_space (mem, as);
4362 icode = movmisalign_optab->handlers[mode].insn_code;
4363 gcc_assert (icode != CODE_FOR_nothing);
4365 op_mode1 = insn_data[icode].operand[1].mode;
4366 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4367 && op_mode1 != VOIDmode)
4368 reg = copy_to_mode_reg (op_mode1, reg);
4370 insn = GEN_FCN (icode) (mem, reg);
4375 /* If the rhs is a function call and its value is not an aggregate,
4376 call the function before we start to compute the lhs.
4377 This is needed for correct code for cases such as
4378 val = setjmp (buf) on machines where reference to val
4379 requires loading up part of an address in a separate insn.
4381 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4382 since it might be a promoted variable where the zero- or sign- extension
4383 needs to be done. Handling this in the normal way is safe because no
4384 computation is done before the call. The same is true for SSA names. */
4385 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4386 && COMPLETE_TYPE_P (TREE_TYPE (from))
4387 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4388 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4389 && REG_P (DECL_RTL (to)))
4390 || TREE_CODE (to) == SSA_NAME))
4395 value = expand_normal (from);
4397 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4399 /* Handle calls that return values in multiple non-contiguous locations.
4400 The Irix 6 ABI has examples of this. */
4401 if (GET_CODE (to_rtx) == PARALLEL)
4402 emit_group_load (to_rtx, value, TREE_TYPE (from),
4403 int_size_in_bytes (TREE_TYPE (from)));
4404 else if (GET_MODE (to_rtx) == BLKmode)
4405 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4408 if (POINTER_TYPE_P (TREE_TYPE (to)))
4409 value = convert_memory_address_addr_space
4410 (GET_MODE (to_rtx), value,
4411 TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4413 emit_move_insn (to_rtx, value);
4415 preserve_temp_slots (to_rtx);
4421 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4422 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4425 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4427 /* Don't move directly into a return register. */
4428 if (TREE_CODE (to) == RESULT_DECL
4429 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4434 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4436 if (GET_CODE (to_rtx) == PARALLEL)
4437 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4438 int_size_in_bytes (TREE_TYPE (from)));
4440 emit_move_insn (to_rtx, temp);
4442 preserve_temp_slots (to_rtx);
4448 /* In case we are returning the contents of an object which overlaps
4449 the place the value is being stored, use a safe function when copying
4450 a value through a pointer into a structure value return block. */
4451 if (TREE_CODE (to) == RESULT_DECL
4452 && TREE_CODE (from) == INDIRECT_REF
4453 && ADDR_SPACE_GENERIC_P
4454 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4455 && refs_may_alias_p (to, from)
4456 && cfun->returns_struct
4457 && !cfun->returns_pcc_struct)
4462 size = expr_size (from);
4463 from_rtx = expand_normal (from);
4465 emit_library_call (memmove_libfunc, LCT_NORMAL,
4466 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4467 XEXP (from_rtx, 0), Pmode,
4468 convert_to_mode (TYPE_MODE (sizetype),
4469 size, TYPE_UNSIGNED (sizetype)),
4470 TYPE_MODE (sizetype));
4472 preserve_temp_slots (to_rtx);
4478 /* Compute FROM and store the value in the rtx we got. */
4481 result = store_expr (from, to_rtx, 0, nontemporal);
4482 preserve_temp_slots (result);
4488 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4489 succeeded, false otherwise. */
4492 emit_storent_insn (rtx to, rtx from)
4494 enum machine_mode mode = GET_MODE (to), imode;
4495 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4498 if (code == CODE_FOR_nothing)
4501 imode = insn_data[code].operand[0].mode;
4502 if (!insn_data[code].operand[0].predicate (to, imode))
4505 imode = insn_data[code].operand[1].mode;
4506 if (!insn_data[code].operand[1].predicate (from, imode))
4508 from = copy_to_mode_reg (imode, from);
4509 if (!insn_data[code].operand[1].predicate (from, imode))
4513 pattern = GEN_FCN (code) (to, from);
4514 if (pattern == NULL_RTX)
4517 emit_insn (pattern);
4521 /* Generate code for computing expression EXP,
4522 and storing the value into TARGET.
4524 If the mode is BLKmode then we may return TARGET itself.
4525 It turns out that in BLKmode it doesn't cause a problem.
4526 because C has no operators that could combine two different
4527 assignments into the same BLKmode object with different values
4528 with no sequence point. Will other languages need this to
4531 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4532 stack, and block moves may need to be treated specially.
4534 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4537 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4540 rtx alt_rtl = NULL_RTX;
4541 location_t loc = EXPR_LOCATION (exp);
4543 if (VOID_TYPE_P (TREE_TYPE (exp)))
4545 /* C++ can generate ?: expressions with a throw expression in one
4546 branch and an rvalue in the other. Here, we resolve attempts to
4547 store the throw expression's nonexistent result. */
4548 gcc_assert (!call_param_p);
4549 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4552 if (TREE_CODE (exp) == COMPOUND_EXPR)
4554 /* Perform first part of compound expression, then assign from second
4556 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4557 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4558 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4561 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4563 /* For conditional expression, get safe form of the target. Then
4564 test the condition, doing the appropriate assignment on either
4565 side. This avoids the creation of unnecessary temporaries.
4566 For non-BLKmode, it is more efficient not to do this. */
4568 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4570 do_pending_stack_adjust ();
4572 jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4573 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4575 emit_jump_insn (gen_jump (lab2));
4578 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4585 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4586 /* If this is a scalar in a register that is stored in a wider mode
4587 than the declared mode, compute the result into its declared mode
4588 and then convert to the wider mode. Our value is the computed
4591 rtx inner_target = 0;
4593 /* We can do the conversion inside EXP, which will often result
4594 in some optimizations. Do the conversion in two steps: first
4595 change the signedness, if needed, then the extend. But don't
4596 do this if the type of EXP is a subtype of something else
4597 since then the conversion might involve more than just
4598 converting modes. */
4599 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4600 && TREE_TYPE (TREE_TYPE (exp)) == 0
4601 && GET_MODE_PRECISION (GET_MODE (target))
4602 == TYPE_PRECISION (TREE_TYPE (exp)))
4604 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4605 != SUBREG_PROMOTED_UNSIGNED_P (target))
4607 /* Some types, e.g. Fortran's logical*4, won't have a signed
4608 version, so use the mode instead. */
4610 = (signed_or_unsigned_type_for
4611 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4613 ntype = lang_hooks.types.type_for_mode
4614 (TYPE_MODE (TREE_TYPE (exp)),
4615 SUBREG_PROMOTED_UNSIGNED_P (target));
4617 exp = fold_convert_loc (loc, ntype, exp);
4620 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4621 (GET_MODE (SUBREG_REG (target)),
4622 SUBREG_PROMOTED_UNSIGNED_P (target)),
4625 inner_target = SUBREG_REG (target);
4628 temp = expand_expr (exp, inner_target, VOIDmode,
4629 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4631 /* If TEMP is a VOIDmode constant, use convert_modes to make
4632 sure that we properly convert it. */
4633 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4635 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4636 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4637 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4638 GET_MODE (target), temp,
4639 SUBREG_PROMOTED_UNSIGNED_P (target));
4642 convert_move (SUBREG_REG (target), temp,
4643 SUBREG_PROMOTED_UNSIGNED_P (target));
4647 else if (TREE_CODE (exp) == STRING_CST
4648 && !nontemporal && !call_param_p
4649 && TREE_STRING_LENGTH (exp) > 0
4650 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4652 /* Optimize initialization of an array with a STRING_CST. */
4653 HOST_WIDE_INT exp_len, str_copy_len;
4656 exp_len = int_expr_size (exp);
4660 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4661 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4664 str_copy_len = TREE_STRING_LENGTH (exp);
4665 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4667 str_copy_len += STORE_MAX_PIECES - 1;
4668 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4670 str_copy_len = MIN (str_copy_len, exp_len);
4671 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4672 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4673 MEM_ALIGN (target), false))
4678 dest_mem = store_by_pieces (dest_mem,
4679 str_copy_len, builtin_strncpy_read_str,
4680 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4681 MEM_ALIGN (target), false,
4682 exp_len > str_copy_len ? 1 : 0);
4683 if (exp_len > str_copy_len)
4684 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4685 GEN_INT (exp_len - str_copy_len),
4694 /* If we want to use a nontemporal store, force the value to
4696 tmp_target = nontemporal ? NULL_RTX : target;
4697 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4699 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4703 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4704 the same as that of TARGET, adjust the constant. This is needed, for
4705 example, in case it is a CONST_DOUBLE and we want only a word-sized
4707 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4708 && TREE_CODE (exp) != ERROR_MARK
4709 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4710 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4711 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4713 /* If value was not generated in the target, store it there.
4714 Convert the value to TARGET's type first if necessary and emit the
4715 pending incrementations that have been queued when expanding EXP.
4716 Note that we cannot emit the whole queue blindly because this will
4717 effectively disable the POST_INC optimization later.
4719 If TEMP and TARGET compare equal according to rtx_equal_p, but
4720 one or both of them are volatile memory refs, we have to distinguish
4722 - expand_expr has used TARGET. In this case, we must not generate
4723 another copy. This can be detected by TARGET being equal according
4725 - expand_expr has not used TARGET - that means that the source just
4726 happens to have the same RTX form. Since temp will have been created
4727 by expand_expr, it will compare unequal according to == .
4728 We must generate a copy in this case, to reach the correct number
4729 of volatile memory references. */
4731 if ((! rtx_equal_p (temp, target)
4732 || (temp != target && (side_effects_p (temp)
4733 || side_effects_p (target))))
4734 && TREE_CODE (exp) != ERROR_MARK
4735 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4736 but TARGET is not valid memory reference, TEMP will differ
4737 from TARGET although it is really the same location. */
4738 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4739 /* If there's nothing to copy, don't bother. Don't call
4740 expr_size unless necessary, because some front-ends (C++)
4741 expr_size-hook must not be given objects that are not
4742 supposed to be bit-copied or bit-initialized. */
4743 && expr_size (exp) != const0_rtx)
4745 if (GET_MODE (temp) != GET_MODE (target)
4746 && GET_MODE (temp) != VOIDmode)
4748 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4749 if (GET_MODE (target) == BLKmode
4750 || GET_MODE (temp) == BLKmode)
4751 emit_block_move (target, temp, expr_size (exp),
4753 ? BLOCK_OP_CALL_PARM
4754 : BLOCK_OP_NORMAL));
4756 convert_move (target, temp, unsignedp);
4759 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4761 /* Handle copying a string constant into an array. The string
4762 constant may be shorter than the array. So copy just the string's
4763 actual length, and clear the rest. First get the size of the data
4764 type of the string, which is actually the size of the target. */
4765 rtx size = expr_size (exp);
4767 if (CONST_INT_P (size)
4768 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4769 emit_block_move (target, temp, size,
4771 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4774 enum machine_mode pointer_mode
4775 = targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4776 enum machine_mode address_mode
4777 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4779 /* Compute the size of the data to copy from the string. */
4781 = size_binop_loc (loc, MIN_EXPR,
4782 make_tree (sizetype, size),
4783 size_int (TREE_STRING_LENGTH (exp)));
4785 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4787 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4790 /* Copy that much. */
4791 copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4792 TYPE_UNSIGNED (sizetype));
4793 emit_block_move (target, temp, copy_size_rtx,
4795 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4797 /* Figure out how much is left in TARGET that we have to clear.
4798 Do all calculations in pointer_mode. */
4799 if (CONST_INT_P (copy_size_rtx))
4801 size = plus_constant (size, -INTVAL (copy_size_rtx));
4802 target = adjust_address (target, BLKmode,
4803 INTVAL (copy_size_rtx));
4807 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4808 copy_size_rtx, NULL_RTX, 0,
4811 if (GET_MODE (copy_size_rtx) != address_mode)
4812 copy_size_rtx = convert_to_mode (address_mode,
4814 TYPE_UNSIGNED (sizetype));
4816 target = offset_address (target, copy_size_rtx,
4817 highest_pow2_factor (copy_size));
4818 label = gen_label_rtx ();
4819 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4820 GET_MODE (size), 0, label);
4823 if (size != const0_rtx)
4824 clear_storage (target, size, BLOCK_OP_NORMAL);
4830 /* Handle calls that return values in multiple non-contiguous locations.
4831 The Irix 6 ABI has examples of this. */
4832 else if (GET_CODE (target) == PARALLEL)
4833 emit_group_load (target, temp, TREE_TYPE (exp),
4834 int_size_in_bytes (TREE_TYPE (exp)));
4835 else if (GET_MODE (temp) == BLKmode)
4836 emit_block_move (target, temp, expr_size (exp),
4838 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4839 else if (nontemporal
4840 && emit_storent_insn (target, temp))
4841 /* If we managed to emit a nontemporal store, there is nothing else to
4846 temp = force_operand (temp, target);
4848 emit_move_insn (target, temp);
4855 /* Helper for categorize_ctor_elements. Identical interface. */
4858 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4859 HOST_WIDE_INT *p_elt_count,
4862 unsigned HOST_WIDE_INT idx;
4863 HOST_WIDE_INT nz_elts, elt_count;
4864 tree value, purpose;
4866 /* Whether CTOR is a valid constant initializer, in accordance with what
4867 initializer_constant_valid_p does. If inferred from the constructor
4868 elements, true until proven otherwise. */
4869 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4870 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4875 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4877 HOST_WIDE_INT mult = 1;
4879 if (TREE_CODE (purpose) == RANGE_EXPR)
4881 tree lo_index = TREE_OPERAND (purpose, 0);
4882 tree hi_index = TREE_OPERAND (purpose, 1);
4884 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4885 mult = (tree_low_cst (hi_index, 1)
4886 - tree_low_cst (lo_index, 1) + 1);
4889 switch (TREE_CODE (value))
4893 HOST_WIDE_INT nz = 0, ic = 0;
4896 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4898 nz_elts += mult * nz;
4899 elt_count += mult * ic;
4901 if (const_from_elts_p && const_p)
4902 const_p = const_elt_p;
4909 if (!initializer_zerop (value))
4915 nz_elts += mult * TREE_STRING_LENGTH (value);
4916 elt_count += mult * TREE_STRING_LENGTH (value);
4920 if (!initializer_zerop (TREE_REALPART (value)))
4922 if (!initializer_zerop (TREE_IMAGPART (value)))
4930 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4932 if (!initializer_zerop (TREE_VALUE (v)))
4941 HOST_WIDE_INT tc = count_type_elements (TREE_TYPE (value), true);
4944 nz_elts += mult * tc;
4945 elt_count += mult * tc;
4947 if (const_from_elts_p && const_p)
4948 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4956 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4957 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4960 bool clear_this = true;
4962 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4964 /* We don't expect more than one element of the union to be
4965 initialized. Not sure what we should do otherwise... */
4966 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4969 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4970 CONSTRUCTOR_ELTS (ctor),
4973 /* ??? We could look at each element of the union, and find the
4974 largest element. Which would avoid comparing the size of the
4975 initialized element against any tail padding in the union.
4976 Doesn't seem worth the effort... */
4977 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4978 TYPE_SIZE (init_sub_type)) == 1)
4980 /* And now we have to find out if the element itself is fully
4981 constructed. E.g. for union { struct { int a, b; } s; } u
4982 = { .s = { .a = 1 } }. */
4983 if (elt_count == count_type_elements (init_sub_type, false))
4988 *p_must_clear = clear_this;
4991 *p_nz_elts += nz_elts;
4992 *p_elt_count += elt_count;
4997 /* Examine CTOR to discover:
4998 * how many scalar fields are set to nonzero values,
4999 and place it in *P_NZ_ELTS;
5000 * how many scalar fields in total are in CTOR,
5001 and place it in *P_ELT_COUNT.
5002 * if a type is a union, and the initializer from the constructor
5003 is not the largest element in the union, then set *p_must_clear.
5005 Return whether or not CTOR is a valid static constant initializer, the same
5006 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
5009 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5010 HOST_WIDE_INT *p_elt_count,
5015 *p_must_clear = false;
5018 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5021 /* Count the number of scalars in TYPE. Return -1 on overflow or
5022 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
5023 array member at the end of the structure. */
5026 count_type_elements (const_tree type, bool allow_flexarr)
5028 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5029 switch (TREE_CODE (type))
5033 tree telts = array_type_nelts (type);
5034 if (telts && host_integerp (telts, 1))
5036 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5037 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5040 else if (max / n > m)
5048 HOST_WIDE_INT n = 0, t;
5051 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5052 if (TREE_CODE (f) == FIELD_DECL)
5054 t = count_type_elements (TREE_TYPE (f), false);
5057 /* Check for structures with flexible array member. */
5058 tree tf = TREE_TYPE (f);
5060 && TREE_CHAIN (f) == NULL
5061 && TREE_CODE (tf) == ARRAY_TYPE
5063 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5064 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5065 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5066 && int_size_in_bytes (type) >= 0)
5078 case QUAL_UNION_TYPE:
5085 return TYPE_VECTOR_SUBPARTS (type);
5089 case FIXED_POINT_TYPE:
5094 case REFERENCE_TYPE:
5109 /* Return 1 if EXP contains mostly (3/4) zeros. */
5112 mostly_zeros_p (const_tree exp)
5114 if (TREE_CODE (exp) == CONSTRUCTOR)
5117 HOST_WIDE_INT nz_elts, count, elts;
5120 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5124 elts = count_type_elements (TREE_TYPE (exp), false);
5126 return nz_elts < elts / 4;
5129 return initializer_zerop (exp);
5132 /* Return 1 if EXP contains all zeros. */
5135 all_zeros_p (const_tree exp)
5137 if (TREE_CODE (exp) == CONSTRUCTOR)
5140 HOST_WIDE_INT nz_elts, count;
5143 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5144 return nz_elts == 0;
5147 return initializer_zerop (exp);
5150 /* Helper function for store_constructor.
5151 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5152 TYPE is the type of the CONSTRUCTOR, not the element type.
5153 CLEARED is as for store_constructor.
5154 ALIAS_SET is the alias set to use for any stores.
5156 This provides a recursive shortcut back to store_constructor when it isn't
5157 necessary to go through store_field. This is so that we can pass through
5158 the cleared field to let store_constructor know that we may not have to
5159 clear a substructure if the outer structure has already been cleared. */
5162 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5163 HOST_WIDE_INT bitpos, enum machine_mode mode,
5164 tree exp, tree type, int cleared,
5165 alias_set_type alias_set)
5167 if (TREE_CODE (exp) == CONSTRUCTOR
5168 /* We can only call store_constructor recursively if the size and
5169 bit position are on a byte boundary. */
5170 && bitpos % BITS_PER_UNIT == 0
5171 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5172 /* If we have a nonzero bitpos for a register target, then we just
5173 let store_field do the bitfield handling. This is unlikely to
5174 generate unnecessary clear instructions anyways. */
5175 && (bitpos == 0 || MEM_P (target)))
5179 = adjust_address (target,
5180 GET_MODE (target) == BLKmode
5182 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5183 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5186 /* Update the alias set, if required. */
5187 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5188 && MEM_ALIAS_SET (target) != 0)
5190 target = copy_rtx (target);
5191 set_mem_alias_set (target, alias_set);
5194 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5197 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5200 /* Store the value of constructor EXP into the rtx TARGET.
5201 TARGET is either a REG or a MEM; we know it cannot conflict, since
5202 safe_from_p has been called.
5203 CLEARED is true if TARGET is known to have been zero'd.
5204 SIZE is the number of bytes of TARGET we are allowed to modify: this
5205 may not be the same as the size of EXP if we are assigning to a field
5206 which has been packed to exclude padding bits. */
5209 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5211 tree type = TREE_TYPE (exp);
5212 #ifdef WORD_REGISTER_OPERATIONS
5213 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5216 switch (TREE_CODE (type))
5220 case QUAL_UNION_TYPE:
5222 unsigned HOST_WIDE_INT idx;
5225 /* If size is zero or the target is already cleared, do nothing. */
5226 if (size == 0 || cleared)
5228 /* We either clear the aggregate or indicate the value is dead. */
5229 else if ((TREE_CODE (type) == UNION_TYPE
5230 || TREE_CODE (type) == QUAL_UNION_TYPE)
5231 && ! CONSTRUCTOR_ELTS (exp))
5232 /* If the constructor is empty, clear the union. */
5234 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5238 /* If we are building a static constructor into a register,
5239 set the initial value as zero so we can fold the value into
5240 a constant. But if more than one register is involved,
5241 this probably loses. */
5242 else if (REG_P (target) && TREE_STATIC (exp)
5243 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5245 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5249 /* If the constructor has fewer fields than the structure or
5250 if we are initializing the structure to mostly zeros, clear
5251 the whole structure first. Don't do this if TARGET is a
5252 register whose mode size isn't equal to SIZE since
5253 clear_storage can't handle this case. */
5255 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5256 != fields_length (type))
5257 || mostly_zeros_p (exp))
5259 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5262 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5266 if (REG_P (target) && !cleared)
5267 emit_clobber (target);
5269 /* Store each element of the constructor into the
5270 corresponding field of TARGET. */
5271 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5273 enum machine_mode mode;
5274 HOST_WIDE_INT bitsize;
5275 HOST_WIDE_INT bitpos = 0;
5277 rtx to_rtx = target;
5279 /* Just ignore missing fields. We cleared the whole
5280 structure, above, if any fields are missing. */
5284 if (cleared && initializer_zerop (value))
5287 if (host_integerp (DECL_SIZE (field), 1))
5288 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5292 mode = DECL_MODE (field);
5293 if (DECL_BIT_FIELD (field))
5296 offset = DECL_FIELD_OFFSET (field);
5297 if (host_integerp (offset, 0)
5298 && host_integerp (bit_position (field), 0))
5300 bitpos = int_bit_position (field);
5304 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5308 enum machine_mode address_mode;
5312 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5313 make_tree (TREE_TYPE (exp),
5316 offset_rtx = expand_normal (offset);
5317 gcc_assert (MEM_P (to_rtx));
5320 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5321 if (GET_MODE (offset_rtx) != address_mode)
5322 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5324 to_rtx = offset_address (to_rtx, offset_rtx,
5325 highest_pow2_factor (offset));
5328 #ifdef WORD_REGISTER_OPERATIONS
5329 /* If this initializes a field that is smaller than a
5330 word, at the start of a word, try to widen it to a full
5331 word. This special case allows us to output C++ member
5332 function initializations in a form that the optimizers
5335 && bitsize < BITS_PER_WORD
5336 && bitpos % BITS_PER_WORD == 0
5337 && GET_MODE_CLASS (mode) == MODE_INT
5338 && TREE_CODE (value) == INTEGER_CST
5340 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5342 tree type = TREE_TYPE (value);
5344 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5346 type = lang_hooks.types.type_for_size
5347 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5348 value = fold_convert (type, value);
5351 if (BYTES_BIG_ENDIAN)
5353 = fold_build2 (LSHIFT_EXPR, type, value,
5354 build_int_cst (type,
5355 BITS_PER_WORD - bitsize));
5356 bitsize = BITS_PER_WORD;
5361 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5362 && DECL_NONADDRESSABLE_P (field))
5364 to_rtx = copy_rtx (to_rtx);
5365 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5368 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5369 value, type, cleared,
5370 get_alias_set (TREE_TYPE (field)));
5377 unsigned HOST_WIDE_INT i;
5380 tree elttype = TREE_TYPE (type);
5382 HOST_WIDE_INT minelt = 0;
5383 HOST_WIDE_INT maxelt = 0;
5385 domain = TYPE_DOMAIN (type);
5386 const_bounds_p = (TYPE_MIN_VALUE (domain)
5387 && TYPE_MAX_VALUE (domain)
5388 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5389 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5391 /* If we have constant bounds for the range of the type, get them. */
5394 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5395 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5398 /* If the constructor has fewer elements than the array, clear
5399 the whole array first. Similarly if this is static
5400 constructor of a non-BLKmode object. */
5403 else if (REG_P (target) && TREE_STATIC (exp))
5407 unsigned HOST_WIDE_INT idx;
5409 HOST_WIDE_INT count = 0, zero_count = 0;
5410 need_to_clear = ! const_bounds_p;
5412 /* This loop is a more accurate version of the loop in
5413 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5414 is also needed to check for missing elements. */
5415 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5417 HOST_WIDE_INT this_node_count;
5422 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5424 tree lo_index = TREE_OPERAND (index, 0);
5425 tree hi_index = TREE_OPERAND (index, 1);
5427 if (! host_integerp (lo_index, 1)
5428 || ! host_integerp (hi_index, 1))
5434 this_node_count = (tree_low_cst (hi_index, 1)
5435 - tree_low_cst (lo_index, 1) + 1);
5438 this_node_count = 1;
5440 count += this_node_count;
5441 if (mostly_zeros_p (value))
5442 zero_count += this_node_count;
5445 /* Clear the entire array first if there are any missing
5446 elements, or if the incidence of zero elements is >=
5449 && (count < maxelt - minelt + 1
5450 || 4 * zero_count >= 3 * count))
5454 if (need_to_clear && size > 0)
5457 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5459 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5463 if (!cleared && REG_P (target))
5464 /* Inform later passes that the old value is dead. */
5465 emit_clobber (target);
5467 /* Store each element of the constructor into the
5468 corresponding element of TARGET, determined by counting the
5470 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5472 enum machine_mode mode;
5473 HOST_WIDE_INT bitsize;
5474 HOST_WIDE_INT bitpos;
5475 rtx xtarget = target;
5477 if (cleared && initializer_zerop (value))
5480 mode = TYPE_MODE (elttype);
5481 if (mode == BLKmode)
5482 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5483 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5486 bitsize = GET_MODE_BITSIZE (mode);
5488 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5490 tree lo_index = TREE_OPERAND (index, 0);
5491 tree hi_index = TREE_OPERAND (index, 1);
5492 rtx index_r, pos_rtx;
5493 HOST_WIDE_INT lo, hi, count;
5496 /* If the range is constant and "small", unroll the loop. */
5498 && host_integerp (lo_index, 0)
5499 && host_integerp (hi_index, 0)
5500 && (lo = tree_low_cst (lo_index, 0),
5501 hi = tree_low_cst (hi_index, 0),
5502 count = hi - lo + 1,
5505 || (host_integerp (TYPE_SIZE (elttype), 1)
5506 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5509 lo -= minelt; hi -= minelt;
5510 for (; lo <= hi; lo++)
5512 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5515 && !MEM_KEEP_ALIAS_SET_P (target)
5516 && TREE_CODE (type) == ARRAY_TYPE
5517 && TYPE_NONALIASED_COMPONENT (type))
5519 target = copy_rtx (target);
5520 MEM_KEEP_ALIAS_SET_P (target) = 1;
5523 store_constructor_field
5524 (target, bitsize, bitpos, mode, value, type, cleared,
5525 get_alias_set (elttype));
5530 rtx loop_start = gen_label_rtx ();
5531 rtx loop_end = gen_label_rtx ();
5534 expand_normal (hi_index);
5536 index = build_decl (EXPR_LOCATION (exp),
5537 VAR_DECL, NULL_TREE, domain);
5538 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5539 SET_DECL_RTL (index, index_r);
5540 store_expr (lo_index, index_r, 0, false);
5542 /* Build the head of the loop. */
5543 do_pending_stack_adjust ();
5544 emit_label (loop_start);
5546 /* Assign value to element index. */
5548 fold_convert (ssizetype,
5549 fold_build2 (MINUS_EXPR,
5552 TYPE_MIN_VALUE (domain)));
5555 size_binop (MULT_EXPR, position,
5556 fold_convert (ssizetype,
5557 TYPE_SIZE_UNIT (elttype)));
5559 pos_rtx = expand_normal (position);
5560 xtarget = offset_address (target, pos_rtx,
5561 highest_pow2_factor (position));
5562 xtarget = adjust_address (xtarget, mode, 0);
5563 if (TREE_CODE (value) == CONSTRUCTOR)
5564 store_constructor (value, xtarget, cleared,
5565 bitsize / BITS_PER_UNIT);
5567 store_expr (value, xtarget, 0, false);
5569 /* Generate a conditional jump to exit the loop. */
5570 exit_cond = build2 (LT_EXPR, integer_type_node,
5572 jumpif (exit_cond, loop_end, -1);
5574 /* Update the loop counter, and jump to the head of
5576 expand_assignment (index,
5577 build2 (PLUS_EXPR, TREE_TYPE (index),
5578 index, integer_one_node),
5581 emit_jump (loop_start);
5583 /* Build the end of the loop. */
5584 emit_label (loop_end);
5587 else if ((index != 0 && ! host_integerp (index, 0))
5588 || ! host_integerp (TYPE_SIZE (elttype), 1))
5593 index = ssize_int (1);
5596 index = fold_convert (ssizetype,
5597 fold_build2 (MINUS_EXPR,
5600 TYPE_MIN_VALUE (domain)));
5603 size_binop (MULT_EXPR, index,
5604 fold_convert (ssizetype,
5605 TYPE_SIZE_UNIT (elttype)));
5606 xtarget = offset_address (target,
5607 expand_normal (position),
5608 highest_pow2_factor (position));
5609 xtarget = adjust_address (xtarget, mode, 0);
5610 store_expr (value, xtarget, 0, false);
5615 bitpos = ((tree_low_cst (index, 0) - minelt)
5616 * tree_low_cst (TYPE_SIZE (elttype), 1));
5618 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5620 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5621 && TREE_CODE (type) == ARRAY_TYPE
5622 && TYPE_NONALIASED_COMPONENT (type))
5624 target = copy_rtx (target);
5625 MEM_KEEP_ALIAS_SET_P (target) = 1;
5627 store_constructor_field (target, bitsize, bitpos, mode, value,
5628 type, cleared, get_alias_set (elttype));
5636 unsigned HOST_WIDE_INT idx;
5637 constructor_elt *ce;
5641 tree elttype = TREE_TYPE (type);
5642 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5643 enum machine_mode eltmode = TYPE_MODE (elttype);
5644 HOST_WIDE_INT bitsize;
5645 HOST_WIDE_INT bitpos;
5646 rtvec vector = NULL;
5648 alias_set_type alias;
5650 gcc_assert (eltmode != BLKmode);
5652 n_elts = TYPE_VECTOR_SUBPARTS (type);
5653 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5655 enum machine_mode mode = GET_MODE (target);
5657 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5658 if (icode != CODE_FOR_nothing)
5662 vector = rtvec_alloc (n_elts);
5663 for (i = 0; i < n_elts; i++)
5664 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5668 /* If the constructor has fewer elements than the vector,
5669 clear the whole array first. Similarly if this is static
5670 constructor of a non-BLKmode object. */
5673 else if (REG_P (target) && TREE_STATIC (exp))
5677 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5680 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5682 int n_elts_here = tree_low_cst
5683 (int_const_binop (TRUNC_DIV_EXPR,
5684 TYPE_SIZE (TREE_TYPE (value)),
5685 TYPE_SIZE (elttype), 0), 1);
5687 count += n_elts_here;
5688 if (mostly_zeros_p (value))
5689 zero_count += n_elts_here;
5692 /* Clear the entire vector first if there are any missing elements,
5693 or if the incidence of zero elements is >= 75%. */
5694 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5697 if (need_to_clear && size > 0 && !vector)
5700 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5702 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5706 /* Inform later passes that the old value is dead. */
5707 if (!cleared && !vector && REG_P (target))
5708 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5711 alias = MEM_ALIAS_SET (target);
5713 alias = get_alias_set (elttype);
5715 /* Store each element of the constructor into the corresponding
5716 element of TARGET, determined by counting the elements. */
5717 for (idx = 0, i = 0;
5718 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5719 idx++, i += bitsize / elt_size)
5721 HOST_WIDE_INT eltpos;
5722 tree value = ce->value;
5724 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5725 if (cleared && initializer_zerop (value))
5729 eltpos = tree_low_cst (ce->index, 1);
5735 /* Vector CONSTRUCTORs should only be built from smaller
5736 vectors in the case of BLKmode vectors. */
5737 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5738 RTVEC_ELT (vector, eltpos)
5739 = expand_normal (value);
5743 enum machine_mode value_mode =
5744 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5745 ? TYPE_MODE (TREE_TYPE (value))
5747 bitpos = eltpos * elt_size;
5748 store_constructor_field (target, bitsize, bitpos,
5749 value_mode, value, type,
5755 emit_insn (GEN_FCN (icode)
5757 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5766 /* Store the value of EXP (an expression tree)
5767 into a subfield of TARGET which has mode MODE and occupies
5768 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5769 If MODE is VOIDmode, it means that we are storing into a bit-field.
5771 Always return const0_rtx unless we have something particular to
5774 TYPE is the type of the underlying object,
5776 ALIAS_SET is the alias set for the destination. This value will
5777 (in general) be different from that for TARGET, since TARGET is a
5778 reference to the containing structure.
5780 If NONTEMPORAL is true, try generating a nontemporal store. */
5783 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5784 enum machine_mode mode, tree exp, tree type,
5785 alias_set_type alias_set, bool nontemporal)
5787 if (TREE_CODE (exp) == ERROR_MARK)
5790 /* If we have nothing to store, do nothing unless the expression has
5793 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5795 /* If we are storing into an unaligned field of an aligned union that is
5796 in a register, we may have the mode of TARGET being an integer mode but
5797 MODE == BLKmode. In that case, get an aligned object whose size and
5798 alignment are the same as TARGET and store TARGET into it (we can avoid
5799 the store if the field being stored is the entire width of TARGET). Then
5800 call ourselves recursively to store the field into a BLKmode version of
5801 that object. Finally, load from the object into TARGET. This is not
5802 very efficient in general, but should only be slightly more expensive
5803 than the otherwise-required unaligned accesses. Perhaps this can be
5804 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5805 twice, once with emit_move_insn and once via store_field. */
5808 && (REG_P (target) || GET_CODE (target) == SUBREG))
5810 rtx object = assign_temp (type, 0, 1, 1);
5811 rtx blk_object = adjust_address (object, BLKmode, 0);
5813 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5814 emit_move_insn (object, target);
5816 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5819 emit_move_insn (target, object);
5821 /* We want to return the BLKmode version of the data. */
5825 if (GET_CODE (target) == CONCAT)
5827 /* We're storing into a struct containing a single __complex. */
5829 gcc_assert (!bitpos);
5830 return store_expr (exp, target, 0, nontemporal);
5833 /* If the structure is in a register or if the component
5834 is a bit field, we cannot use addressing to access it.
5835 Use bit-field techniques or SUBREG to store in it. */
5837 if (mode == VOIDmode
5838 || (mode != BLKmode && ! direct_store[(int) mode]
5839 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5840 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5842 || GET_CODE (target) == SUBREG
5843 /* If the field isn't aligned enough to store as an ordinary memref,
5844 store it as a bit field. */
5846 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5847 || bitpos % GET_MODE_ALIGNMENT (mode))
5848 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5849 || (bitpos % BITS_PER_UNIT != 0)))
5850 /* If the RHS and field are a constant size and the size of the
5851 RHS isn't the same size as the bitfield, we must use bitfield
5854 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5855 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5860 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5861 implies a mask operation. If the precision is the same size as
5862 the field we're storing into, that mask is redundant. This is
5863 particularly common with bit field assignments generated by the
5865 nop_def = get_def_for_expr (exp, NOP_EXPR);
5868 tree type = TREE_TYPE (exp);
5869 if (INTEGRAL_TYPE_P (type)
5870 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5871 && bitsize == TYPE_PRECISION (type))
5873 tree op = gimple_assign_rhs1 (nop_def);
5874 type = TREE_TYPE (op);
5875 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5880 temp = expand_normal (exp);
5882 /* If BITSIZE is narrower than the size of the type of EXP
5883 we will be narrowing TEMP. Normally, what's wanted are the
5884 low-order bits. However, if EXP's type is a record and this is
5885 big-endian machine, we want the upper BITSIZE bits. */
5886 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5887 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5888 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5889 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5890 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5894 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5896 if (mode != VOIDmode && mode != BLKmode
5897 && mode != TYPE_MODE (TREE_TYPE (exp)))
5898 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5900 /* If the modes of TEMP and TARGET are both BLKmode, both
5901 must be in memory and BITPOS must be aligned on a byte
5902 boundary. If so, we simply do a block copy. Likewise
5903 for a BLKmode-like TARGET. */
5904 if (GET_MODE (temp) == BLKmode
5905 && (GET_MODE (target) == BLKmode
5907 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5908 && (bitpos % BITS_PER_UNIT) == 0
5909 && (bitsize % BITS_PER_UNIT) == 0)))
5911 gcc_assert (MEM_P (target) && MEM_P (temp)
5912 && (bitpos % BITS_PER_UNIT) == 0);
5914 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5915 emit_block_move (target, temp,
5916 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5923 /* Store the value in the bitfield. */
5924 store_bit_field (target, bitsize, bitpos, mode, temp);
5930 /* Now build a reference to just the desired component. */
5931 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5933 if (to_rtx == target)
5934 to_rtx = copy_rtx (to_rtx);
5936 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5937 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5938 set_mem_alias_set (to_rtx, alias_set);
5940 return store_expr (exp, to_rtx, 0, nontemporal);
5944 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5945 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5946 codes and find the ultimate containing object, which we return.
5948 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5949 bit position, and *PUNSIGNEDP to the signedness of the field.
5950 If the position of the field is variable, we store a tree
5951 giving the variable offset (in units) in *POFFSET.
5952 This offset is in addition to the bit position.
5953 If the position is not variable, we store 0 in *POFFSET.
5955 If any of the extraction expressions is volatile,
5956 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5958 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5959 Otherwise, it is a mode that can be used to access the field.
5961 If the field describes a variable-sized object, *PMODE is set to
5962 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5963 this case, but the address of the object can be found.
5965 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5966 look through nodes that serve as markers of a greater alignment than
5967 the one that can be deduced from the expression. These nodes make it
5968 possible for front-ends to prevent temporaries from being created by
5969 the middle-end on alignment considerations. For that purpose, the
5970 normal operating mode at high-level is to always pass FALSE so that
5971 the ultimate containing object is really returned; moreover, the
5972 associated predicate handled_component_p will always return TRUE
5973 on these nodes, thus indicating that they are essentially handled
5974 by get_inner_reference. TRUE should only be passed when the caller
5975 is scanning the expression in order to build another representation
5976 and specifically knows how to handle these nodes; as such, this is
5977 the normal operating mode in the RTL expanders. */
5980 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5981 HOST_WIDE_INT *pbitpos, tree *poffset,
5982 enum machine_mode *pmode, int *punsignedp,
5983 int *pvolatilep, bool keep_aligning)
5986 enum machine_mode mode = VOIDmode;
5987 bool blkmode_bitfield = false;
5988 tree offset = size_zero_node;
5989 double_int bit_offset = double_int_zero;
5991 /* First get the mode, signedness, and size. We do this from just the
5992 outermost expression. */
5994 if (TREE_CODE (exp) == COMPONENT_REF)
5996 tree field = TREE_OPERAND (exp, 1);
5997 size_tree = DECL_SIZE (field);
5998 if (!DECL_BIT_FIELD (field))
5999 mode = DECL_MODE (field);
6000 else if (DECL_MODE (field) == BLKmode)
6001 blkmode_bitfield = true;
6002 else if (TREE_THIS_VOLATILE (exp)
6003 && flag_strict_volatile_bitfields > 0)
6004 /* Volatile bitfields should be accessed in the mode of the
6005 field's type, not the mode computed based on the bit
6007 mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6009 *punsignedp = DECL_UNSIGNED (field);
6011 else if (TREE_CODE (exp) == BIT_FIELD_REF)
6013 size_tree = TREE_OPERAND (exp, 1);
6014 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6015 || TYPE_UNSIGNED (TREE_TYPE (exp)));
6017 /* For vector types, with the correct size of access, use the mode of
6019 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6020 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6021 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6022 mode = TYPE_MODE (TREE_TYPE (exp));
6026 mode = TYPE_MODE (TREE_TYPE (exp));
6027 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6029 if (mode == BLKmode)
6030 size_tree = TYPE_SIZE (TREE_TYPE (exp));
6032 *pbitsize = GET_MODE_BITSIZE (mode);
6037 if (! host_integerp (size_tree, 1))
6038 mode = BLKmode, *pbitsize = -1;
6040 *pbitsize = tree_low_cst (size_tree, 1);
6043 /* Compute cumulative bit-offset for nested component-refs and array-refs,
6044 and find the ultimate containing object. */
6047 switch (TREE_CODE (exp))
6051 = double_int_add (bit_offset,
6052 tree_to_double_int (TREE_OPERAND (exp, 2)));
6057 tree field = TREE_OPERAND (exp, 1);
6058 tree this_offset = component_ref_field_offset (exp);
6060 /* If this field hasn't been filled in yet, don't go past it.
6061 This should only happen when folding expressions made during
6062 type construction. */
6063 if (this_offset == 0)
6066 offset = size_binop (PLUS_EXPR, offset, this_offset);
6067 bit_offset = double_int_add (bit_offset,
6069 (DECL_FIELD_BIT_OFFSET (field)));
6071 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6076 case ARRAY_RANGE_REF:
6078 tree index = TREE_OPERAND (exp, 1);
6079 tree low_bound = array_ref_low_bound (exp);
6080 tree unit_size = array_ref_element_size (exp);
6082 /* We assume all arrays have sizes that are a multiple of a byte.
6083 First subtract the lower bound, if any, in the type of the
6084 index, then convert to sizetype and multiply by the size of
6085 the array element. */
6086 if (! integer_zerop (low_bound))
6087 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6090 offset = size_binop (PLUS_EXPR, offset,
6091 size_binop (MULT_EXPR,
6092 fold_convert (sizetype, index),
6101 bit_offset = double_int_add (bit_offset,
6102 uhwi_to_double_int (*pbitsize));
6105 case VIEW_CONVERT_EXPR:
6106 if (keep_aligning && STRICT_ALIGNMENT
6107 && (TYPE_ALIGN (TREE_TYPE (exp))
6108 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6109 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6110 < BIGGEST_ALIGNMENT)
6111 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6112 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6120 /* If any reference in the chain is volatile, the effect is volatile. */
6121 if (TREE_THIS_VOLATILE (exp))
6124 exp = TREE_OPERAND (exp, 0);
6128 /* If OFFSET is constant, see if we can return the whole thing as a
6129 constant bit position. Make sure to handle overflow during
6131 if (host_integerp (offset, 0))
6133 double_int tem = double_int_lshift (tree_to_double_int (offset),
6135 ? 3 : exact_log2 (BITS_PER_UNIT),
6136 HOST_BITS_PER_DOUBLE_INT, true);
6137 tem = double_int_add (tem, bit_offset);
6138 if (double_int_fits_in_shwi_p (tem))
6140 *pbitpos = double_int_to_shwi (tem);
6141 *poffset = offset = NULL_TREE;
6145 /* Otherwise, split it up. */
6148 *pbitpos = double_int_to_shwi (bit_offset);
6152 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6153 if (mode == VOIDmode
6155 && (*pbitpos % BITS_PER_UNIT) == 0
6156 && (*pbitsize % BITS_PER_UNIT) == 0)
6164 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6165 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6166 EXP is marked as PACKED. */
6169 contains_packed_reference (const_tree exp)
6171 bool packed_p = false;
6175 switch (TREE_CODE (exp))
6179 tree field = TREE_OPERAND (exp, 1);
6180 packed_p = DECL_PACKED (field)
6181 || TYPE_PACKED (TREE_TYPE (field))
6182 || TYPE_PACKED (TREE_TYPE (exp));
6190 case ARRAY_RANGE_REF:
6193 case VIEW_CONVERT_EXPR:
6199 exp = TREE_OPERAND (exp, 0);
6205 /* Return a tree of sizetype representing the size, in bytes, of the element
6206 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6209 array_ref_element_size (tree exp)
6211 tree aligned_size = TREE_OPERAND (exp, 3);
6212 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6213 location_t loc = EXPR_LOCATION (exp);
6215 /* If a size was specified in the ARRAY_REF, it's the size measured
6216 in alignment units of the element type. So multiply by that value. */
6219 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6220 sizetype from another type of the same width and signedness. */
6221 if (TREE_TYPE (aligned_size) != sizetype)
6222 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6223 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6224 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6227 /* Otherwise, take the size from that of the element type. Substitute
6228 any PLACEHOLDER_EXPR that we have. */
6230 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6233 /* Return a tree representing the lower bound of the array mentioned in
6234 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6237 array_ref_low_bound (tree exp)
6239 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6241 /* If a lower bound is specified in EXP, use it. */
6242 if (TREE_OPERAND (exp, 2))
6243 return TREE_OPERAND (exp, 2);
6245 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6246 substituting for a PLACEHOLDER_EXPR as needed. */
6247 if (domain_type && TYPE_MIN_VALUE (domain_type))
6248 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6250 /* Otherwise, return a zero of the appropriate type. */
6251 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6254 /* Return a tree representing the upper bound of the array mentioned in
6255 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6258 array_ref_up_bound (tree exp)
6260 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6262 /* If there is a domain type and it has an upper bound, use it, substituting
6263 for a PLACEHOLDER_EXPR as needed. */
6264 if (domain_type && TYPE_MAX_VALUE (domain_type))
6265 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6267 /* Otherwise fail. */
6271 /* Return a tree representing the offset, in bytes, of the field referenced
6272 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6275 component_ref_field_offset (tree exp)
6277 tree aligned_offset = TREE_OPERAND (exp, 2);
6278 tree field = TREE_OPERAND (exp, 1);
6279 location_t loc = EXPR_LOCATION (exp);
6281 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6282 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6286 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6287 sizetype from another type of the same width and signedness. */
6288 if (TREE_TYPE (aligned_offset) != sizetype)
6289 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6290 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6291 size_int (DECL_OFFSET_ALIGN (field)
6295 /* Otherwise, take the offset from that of the field. Substitute
6296 any PLACEHOLDER_EXPR that we have. */
6298 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6301 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6303 static unsigned HOST_WIDE_INT
6304 target_align (const_tree target)
6306 /* We might have a chain of nested references with intermediate misaligning
6307 bitfields components, so need to recurse to find out. */
6309 unsigned HOST_WIDE_INT this_align, outer_align;
6311 switch (TREE_CODE (target))
6317 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6318 outer_align = target_align (TREE_OPERAND (target, 0));
6319 return MIN (this_align, outer_align);
6322 case ARRAY_RANGE_REF:
6323 this_align = TYPE_ALIGN (TREE_TYPE (target));
6324 outer_align = target_align (TREE_OPERAND (target, 0));
6325 return MIN (this_align, outer_align);
6328 case NON_LVALUE_EXPR:
6329 case VIEW_CONVERT_EXPR:
6330 this_align = TYPE_ALIGN (TREE_TYPE (target));
6331 outer_align = target_align (TREE_OPERAND (target, 0));
6332 return MAX (this_align, outer_align);
6335 return TYPE_ALIGN (TREE_TYPE (target));
6340 /* Given an rtx VALUE that may contain additions and multiplications, return
6341 an equivalent value that just refers to a register, memory, or constant.
6342 This is done by generating instructions to perform the arithmetic and
6343 returning a pseudo-register containing the value.
6345 The returned value may be a REG, SUBREG, MEM or constant. */
6348 force_operand (rtx value, rtx target)
6351 /* Use subtarget as the target for operand 0 of a binary operation. */
6352 rtx subtarget = get_subtarget (target);
6353 enum rtx_code code = GET_CODE (value);
6355 /* Check for subreg applied to an expression produced by loop optimizer. */
6357 && !REG_P (SUBREG_REG (value))
6358 && !MEM_P (SUBREG_REG (value)))
6361 = simplify_gen_subreg (GET_MODE (value),
6362 force_reg (GET_MODE (SUBREG_REG (value)),
6363 force_operand (SUBREG_REG (value),
6365 GET_MODE (SUBREG_REG (value)),
6366 SUBREG_BYTE (value));
6367 code = GET_CODE (value);
6370 /* Check for a PIC address load. */
6371 if ((code == PLUS || code == MINUS)
6372 && XEXP (value, 0) == pic_offset_table_rtx
6373 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6374 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6375 || GET_CODE (XEXP (value, 1)) == CONST))
6378 subtarget = gen_reg_rtx (GET_MODE (value));
6379 emit_move_insn (subtarget, value);
6383 if (ARITHMETIC_P (value))
6385 op2 = XEXP (value, 1);
6386 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6388 if (code == MINUS && CONST_INT_P (op2))
6391 op2 = negate_rtx (GET_MODE (value), op2);
6394 /* Check for an addition with OP2 a constant integer and our first
6395 operand a PLUS of a virtual register and something else. In that
6396 case, we want to emit the sum of the virtual register and the
6397 constant first and then add the other value. This allows virtual
6398 register instantiation to simply modify the constant rather than
6399 creating another one around this addition. */
6400 if (code == PLUS && CONST_INT_P (op2)
6401 && GET_CODE (XEXP (value, 0)) == PLUS
6402 && REG_P (XEXP (XEXP (value, 0), 0))
6403 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6404 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6406 rtx temp = expand_simple_binop (GET_MODE (value), code,
6407 XEXP (XEXP (value, 0), 0), op2,
6408 subtarget, 0, OPTAB_LIB_WIDEN);
6409 return expand_simple_binop (GET_MODE (value), code, temp,
6410 force_operand (XEXP (XEXP (value,
6412 target, 0, OPTAB_LIB_WIDEN);
6415 op1 = force_operand (XEXP (value, 0), subtarget);
6416 op2 = force_operand (op2, NULL_RTX);
6420 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6422 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6423 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6424 target, 1, OPTAB_LIB_WIDEN);
6426 return expand_divmod (0,
6427 FLOAT_MODE_P (GET_MODE (value))
6428 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6429 GET_MODE (value), op1, op2, target, 0);
6431 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6434 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6437 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6440 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6441 target, 0, OPTAB_LIB_WIDEN);
6443 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6444 target, 1, OPTAB_LIB_WIDEN);
6447 if (UNARY_P (value))
6450 target = gen_reg_rtx (GET_MODE (value));
6451 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6458 case FLOAT_TRUNCATE:
6459 convert_move (target, op1, code == ZERO_EXTEND);
6464 expand_fix (target, op1, code == UNSIGNED_FIX);
6468 case UNSIGNED_FLOAT:
6469 expand_float (target, op1, code == UNSIGNED_FLOAT);
6473 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6477 #ifdef INSN_SCHEDULING
6478 /* On machines that have insn scheduling, we want all memory reference to be
6479 explicit, so we need to deal with such paradoxical SUBREGs. */
6480 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6481 && (GET_MODE_SIZE (GET_MODE (value))
6482 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6484 = simplify_gen_subreg (GET_MODE (value),
6485 force_reg (GET_MODE (SUBREG_REG (value)),
6486 force_operand (SUBREG_REG (value),
6488 GET_MODE (SUBREG_REG (value)),
6489 SUBREG_BYTE (value));
6495 /* Subroutine of expand_expr: return nonzero iff there is no way that
6496 EXP can reference X, which is being modified. TOP_P is nonzero if this
6497 call is going to be used to determine whether we need a temporary
6498 for EXP, as opposed to a recursive call to this function.
6500 It is always safe for this routine to return zero since it merely
6501 searches for optimization opportunities. */
6504 safe_from_p (const_rtx x, tree exp, int top_p)
6510 /* If EXP has varying size, we MUST use a target since we currently
6511 have no way of allocating temporaries of variable size
6512 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6513 So we assume here that something at a higher level has prevented a
6514 clash. This is somewhat bogus, but the best we can do. Only
6515 do this when X is BLKmode and when we are at the top level. */
6516 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6517 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6518 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6519 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6520 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6522 && GET_MODE (x) == BLKmode)
6523 /* If X is in the outgoing argument area, it is always safe. */
6525 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6526 || (GET_CODE (XEXP (x, 0)) == PLUS
6527 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6530 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6531 find the underlying pseudo. */
6532 if (GET_CODE (x) == SUBREG)
6535 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6539 /* Now look at our tree code and possibly recurse. */
6540 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6542 case tcc_declaration:
6543 exp_rtl = DECL_RTL_IF_SET (exp);
6549 case tcc_exceptional:
6550 if (TREE_CODE (exp) == TREE_LIST)
6554 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6556 exp = TREE_CHAIN (exp);
6559 if (TREE_CODE (exp) != TREE_LIST)
6560 return safe_from_p (x, exp, 0);
6563 else if (TREE_CODE (exp) == CONSTRUCTOR)
6565 constructor_elt *ce;
6566 unsigned HOST_WIDE_INT idx;
6569 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6571 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6572 || !safe_from_p (x, ce->value, 0))
6576 else if (TREE_CODE (exp) == ERROR_MARK)
6577 return 1; /* An already-visited SAVE_EXPR? */
6582 /* The only case we look at here is the DECL_INITIAL inside a
6584 return (TREE_CODE (exp) != DECL_EXPR
6585 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6586 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6587 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6590 case tcc_comparison:
6591 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6596 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6598 case tcc_expression:
6601 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6602 the expression. If it is set, we conflict iff we are that rtx or
6603 both are in memory. Otherwise, we check all operands of the
6604 expression recursively. */
6606 switch (TREE_CODE (exp))
6609 /* If the operand is static or we are static, we can't conflict.
6610 Likewise if we don't conflict with the operand at all. */
6611 if (staticp (TREE_OPERAND (exp, 0))
6612 || TREE_STATIC (exp)
6613 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6616 /* Otherwise, the only way this can conflict is if we are taking
6617 the address of a DECL a that address if part of X, which is
6619 exp = TREE_OPERAND (exp, 0);
6622 if (!DECL_RTL_SET_P (exp)
6623 || !MEM_P (DECL_RTL (exp)))
6626 exp_rtl = XEXP (DECL_RTL (exp), 0);
6630 case MISALIGNED_INDIRECT_REF:
6631 case ALIGN_INDIRECT_REF:
6634 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6635 get_alias_set (exp)))
6640 /* Assume that the call will clobber all hard registers and
6642 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6647 case WITH_CLEANUP_EXPR:
6648 case CLEANUP_POINT_EXPR:
6649 /* Lowered by gimplify.c. */
6653 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6659 /* If we have an rtx, we do not need to scan our operands. */
6663 nops = TREE_OPERAND_LENGTH (exp);
6664 for (i = 0; i < nops; i++)
6665 if (TREE_OPERAND (exp, i) != 0
6666 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6672 /* Should never get a type here. */
6676 /* If we have an rtl, find any enclosed object. Then see if we conflict
6680 if (GET_CODE (exp_rtl) == SUBREG)
6682 exp_rtl = SUBREG_REG (exp_rtl);
6684 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6688 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6689 are memory and they conflict. */
6690 return ! (rtx_equal_p (x, exp_rtl)
6691 || (MEM_P (x) && MEM_P (exp_rtl)
6692 && true_dependence (exp_rtl, VOIDmode, x,
6693 rtx_addr_varies_p)));
6696 /* If we reach here, it is safe. */
6701 /* Return the highest power of two that EXP is known to be a multiple of.
6702 This is used in updating alignment of MEMs in array references. */
6704 unsigned HOST_WIDE_INT
6705 highest_pow2_factor (const_tree exp)
6707 unsigned HOST_WIDE_INT c0, c1;
6709 switch (TREE_CODE (exp))
6712 /* We can find the lowest bit that's a one. If the low
6713 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6714 We need to handle this case since we can find it in a COND_EXPR,
6715 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6716 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6718 if (TREE_OVERFLOW (exp))
6719 return BIGGEST_ALIGNMENT;
6722 /* Note: tree_low_cst is intentionally not used here,
6723 we don't care about the upper bits. */
6724 c0 = TREE_INT_CST_LOW (exp);
6726 return c0 ? c0 : BIGGEST_ALIGNMENT;
6730 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6731 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6732 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6733 return MIN (c0, c1);
6736 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6737 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6740 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6742 if (integer_pow2p (TREE_OPERAND (exp, 1))
6743 && host_integerp (TREE_OPERAND (exp, 1), 1))
6745 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6746 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6747 return MAX (1, c0 / c1);
6752 /* The highest power of two of a bit-and expression is the maximum of
6753 that of its operands. We typically get here for a complex LHS and
6754 a constant negative power of two on the RHS to force an explicit
6755 alignment, so don't bother looking at the LHS. */
6756 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6760 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6763 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6766 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6767 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6768 return MIN (c0, c1);
6777 /* Similar, except that the alignment requirements of TARGET are
6778 taken into account. Assume it is at least as aligned as its
6779 type, unless it is a COMPONENT_REF in which case the layout of
6780 the structure gives the alignment. */
6782 static unsigned HOST_WIDE_INT
6783 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6785 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6786 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6788 return MAX (factor, talign);
6791 /* Return &VAR expression for emulated thread local VAR. */
6794 emutls_var_address (tree var)
6796 tree emuvar = emutls_decl (var);
6797 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6798 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6799 tree arglist = build_tree_list (NULL_TREE, arg);
6800 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6801 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6805 /* Subroutine of expand_expr. Expand the two operands of a binary
6806 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6807 The value may be stored in TARGET if TARGET is nonzero. The
6808 MODIFIER argument is as documented by expand_expr. */
6811 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6812 enum expand_modifier modifier)
6814 if (! safe_from_p (target, exp1, 1))
6816 if (operand_equal_p (exp0, exp1, 0))
6818 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6819 *op1 = copy_rtx (*op0);
6823 /* If we need to preserve evaluation order, copy exp0 into its own
6824 temporary variable so that it can't be clobbered by exp1. */
6825 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6826 exp0 = save_expr (exp0);
6827 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6828 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6833 /* Return a MEM that contains constant EXP. DEFER is as for
6834 output_constant_def and MODIFIER is as for expand_expr. */
6837 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6841 mem = output_constant_def (exp, defer);
6842 if (modifier != EXPAND_INITIALIZER)
6843 mem = use_anchored_address (mem);
6847 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6848 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6851 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6852 enum expand_modifier modifier, addr_space_t as)
6854 rtx result, subtarget;
6856 HOST_WIDE_INT bitsize, bitpos;
6857 int volatilep, unsignedp;
6858 enum machine_mode mode1;
6860 /* If we are taking the address of a constant and are at the top level,
6861 we have to use output_constant_def since we can't call force_const_mem
6863 /* ??? This should be considered a front-end bug. We should not be
6864 generating ADDR_EXPR of something that isn't an LVALUE. The only
6865 exception here is STRING_CST. */
6866 if (CONSTANT_CLASS_P (exp))
6867 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6869 /* Everything must be something allowed by is_gimple_addressable. */
6870 switch (TREE_CODE (exp))
6873 /* This case will happen via recursion for &a->b. */
6874 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6877 /* Expand the initializer like constants above. */
6878 return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6881 /* The real part of the complex number is always first, therefore
6882 the address is the same as the address of the parent object. */
6885 inner = TREE_OPERAND (exp, 0);
6889 /* The imaginary part of the complex number is always second.
6890 The expression is therefore always offset by the size of the
6893 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6894 inner = TREE_OPERAND (exp, 0);
6898 /* TLS emulation hook - replace __thread VAR's &VAR with
6899 __emutls_get_address (&_emutls.VAR). */
6900 if (! targetm.have_tls
6901 && TREE_CODE (exp) == VAR_DECL
6902 && DECL_THREAD_LOCAL_P (exp))
6904 exp = emutls_var_address (exp);
6905 return expand_expr (exp, target, tmode, modifier);
6910 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6911 expand_expr, as that can have various side effects; LABEL_DECLs for
6912 example, may not have their DECL_RTL set yet. Expand the rtl of
6913 CONSTRUCTORs too, which should yield a memory reference for the
6914 constructor's contents. Assume language specific tree nodes can
6915 be expanded in some interesting way. */
6916 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6918 || TREE_CODE (exp) == CONSTRUCTOR
6919 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6921 result = expand_expr (exp, target, tmode,
6922 modifier == EXPAND_INITIALIZER
6923 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6925 /* If the DECL isn't in memory, then the DECL wasn't properly
6926 marked TREE_ADDRESSABLE, which will be either a front-end
6927 or a tree optimizer bug. */
6928 gcc_assert (MEM_P (result));
6929 result = XEXP (result, 0);
6931 /* ??? Is this needed anymore? */
6932 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6934 assemble_external (exp);
6935 TREE_USED (exp) = 1;
6938 if (modifier != EXPAND_INITIALIZER
6939 && modifier != EXPAND_CONST_ADDRESS)
6940 result = force_operand (result, target);
6944 /* Pass FALSE as the last argument to get_inner_reference although
6945 we are expanding to RTL. The rationale is that we know how to
6946 handle "aligning nodes" here: we can just bypass them because
6947 they won't change the final object whose address will be returned
6948 (they actually exist only for that purpose). */
6949 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6950 &mode1, &unsignedp, &volatilep, false);
6954 /* We must have made progress. */
6955 gcc_assert (inner != exp);
6957 subtarget = offset || bitpos ? NULL_RTX : target;
6958 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6959 inner alignment, force the inner to be sufficiently aligned. */
6960 if (CONSTANT_CLASS_P (inner)
6961 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6963 inner = copy_node (inner);
6964 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6965 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6966 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6968 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
6974 if (modifier != EXPAND_NORMAL)
6975 result = force_operand (result, NULL);
6976 tmp = expand_expr (offset, NULL_RTX, tmode,
6977 modifier == EXPAND_INITIALIZER
6978 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6980 result = convert_memory_address_addr_space (tmode, result, as);
6981 tmp = convert_memory_address_addr_space (tmode, tmp, as);
6983 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6984 result = gen_rtx_PLUS (tmode, result, tmp);
6987 subtarget = bitpos ? NULL_RTX : target;
6988 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6989 1, OPTAB_LIB_WIDEN);
6995 /* Someone beforehand should have rejected taking the address
6996 of such an object. */
6997 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6999 result = plus_constant (result, bitpos / BITS_PER_UNIT);
7000 if (modifier < EXPAND_SUM)
7001 result = force_operand (result, target);
7007 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
7008 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
7011 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7012 enum expand_modifier modifier)
7014 addr_space_t as = ADDR_SPACE_GENERIC;
7015 enum machine_mode address_mode = Pmode;
7016 enum machine_mode pointer_mode = ptr_mode;
7017 enum machine_mode rmode;
7020 /* Target mode of VOIDmode says "whatever's natural". */
7021 if (tmode == VOIDmode)
7022 tmode = TYPE_MODE (TREE_TYPE (exp));
7024 if (POINTER_TYPE_P (TREE_TYPE (exp)))
7026 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7027 address_mode = targetm.addr_space.address_mode (as);
7028 pointer_mode = targetm.addr_space.pointer_mode (as);
7031 /* We can get called with some Weird Things if the user does silliness
7032 like "(short) &a". In that case, convert_memory_address won't do
7033 the right thing, so ignore the given target mode. */
7034 if (tmode != address_mode && tmode != pointer_mode)
7035 tmode = address_mode;
7037 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7038 tmode, modifier, as);
7040 /* Despite expand_expr claims concerning ignoring TMODE when not
7041 strictly convenient, stuff breaks if we don't honor it. Note
7042 that combined with the above, we only do this for pointer modes. */
7043 rmode = GET_MODE (result);
7044 if (rmode == VOIDmode)
7047 result = convert_memory_address_addr_space (tmode, result, as);
7052 /* Generate code for computing CONSTRUCTOR EXP.
7053 An rtx for the computed value is returned. If AVOID_TEMP_MEM
7054 is TRUE, instead of creating a temporary variable in memory
7055 NULL is returned and the caller needs to handle it differently. */
7058 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7059 bool avoid_temp_mem)
7061 tree type = TREE_TYPE (exp);
7062 enum machine_mode mode = TYPE_MODE (type);
7064 /* Try to avoid creating a temporary at all. This is possible
7065 if all of the initializer is zero.
7066 FIXME: try to handle all [0..255] initializers we can handle
7068 if (TREE_STATIC (exp)
7069 && !TREE_ADDRESSABLE (exp)
7070 && target != 0 && mode == BLKmode
7071 && all_zeros_p (exp))
7073 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7077 /* All elts simple constants => refer to a constant in memory. But
7078 if this is a non-BLKmode mode, let it store a field at a time
7079 since that should make a CONST_INT or CONST_DOUBLE when we
7080 fold. Likewise, if we have a target we can use, it is best to
7081 store directly into the target unless the type is large enough
7082 that memcpy will be used. If we are making an initializer and
7083 all operands are constant, put it in memory as well.
7085 FIXME: Avoid trying to fill vector constructors piece-meal.
7086 Output them with output_constant_def below unless we're sure
7087 they're zeros. This should go away when vector initializers
7088 are treated like VECTOR_CST instead of arrays. */
7089 if ((TREE_STATIC (exp)
7090 && ((mode == BLKmode
7091 && ! (target != 0 && safe_from_p (target, exp, 1)))
7092 || TREE_ADDRESSABLE (exp)
7093 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7094 && (! MOVE_BY_PIECES_P
7095 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7097 && ! mostly_zeros_p (exp))))
7098 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7099 && TREE_CONSTANT (exp)))
7106 constructor = expand_expr_constant (exp, 1, modifier);
7108 if (modifier != EXPAND_CONST_ADDRESS
7109 && modifier != EXPAND_INITIALIZER
7110 && modifier != EXPAND_SUM)
7111 constructor = validize_mem (constructor);
7116 /* Handle calls that pass values in multiple non-contiguous
7117 locations. The Irix 6 ABI has examples of this. */
7118 if (target == 0 || ! safe_from_p (target, exp, 1)
7119 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7125 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7126 | (TREE_READONLY (exp)
7127 * TYPE_QUAL_CONST))),
7128 0, TREE_ADDRESSABLE (exp), 1);
7131 store_constructor (exp, target, 0, int_expr_size (exp));
7136 /* expand_expr: generate code for computing expression EXP.
7137 An rtx for the computed value is returned. The value is never null.
7138 In the case of a void EXP, const0_rtx is returned.
7140 The value may be stored in TARGET if TARGET is nonzero.
7141 TARGET is just a suggestion; callers must assume that
7142 the rtx returned may not be the same as TARGET.
7144 If TARGET is CONST0_RTX, it means that the value will be ignored.
7146 If TMODE is not VOIDmode, it suggests generating the
7147 result in mode TMODE. But this is done only when convenient.
7148 Otherwise, TMODE is ignored and the value generated in its natural mode.
7149 TMODE is just a suggestion; callers must assume that
7150 the rtx returned may not have mode TMODE.
7152 Note that TARGET may have neither TMODE nor MODE. In that case, it
7153 probably will not be used.
7155 If MODIFIER is EXPAND_SUM then when EXP is an addition
7156 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7157 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7158 products as above, or REG or MEM, or constant.
7159 Ordinarily in such cases we would output mul or add instructions
7160 and then return a pseudo reg containing the sum.
7162 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7163 it also marks a label as absolutely required (it can't be dead).
7164 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7165 This is used for outputting expressions used in initializers.
7167 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7168 with a constant address even if that address is not normally legitimate.
7169 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7171 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7172 a call parameter. Such targets require special care as we haven't yet
7173 marked TARGET so that it's safe from being trashed by libcalls. We
7174 don't want to use TARGET for anything but the final result;
7175 Intermediate values must go elsewhere. Additionally, calls to
7176 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7178 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7179 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7180 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7181 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7185 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7186 enum expand_modifier modifier, rtx *alt_rtl)
7190 /* Handle ERROR_MARK before anybody tries to access its type. */
7191 if (TREE_CODE (exp) == ERROR_MARK
7192 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7194 ret = CONST0_RTX (tmode);
7195 return ret ? ret : const0_rtx;
7198 /* If this is an expression of some kind and it has an associated line
7199 number, then emit the line number before expanding the expression.
7201 We need to save and restore the file and line information so that
7202 errors discovered during expansion are emitted with the right
7203 information. It would be better of the diagnostic routines
7204 used the file/line information embedded in the tree nodes rather
7206 if (cfun && EXPR_HAS_LOCATION (exp))
7208 location_t saved_location = input_location;
7209 location_t saved_curr_loc = get_curr_insn_source_location ();
7210 tree saved_block = get_curr_insn_block ();
7211 input_location = EXPR_LOCATION (exp);
7212 set_curr_insn_source_location (input_location);
7214 /* Record where the insns produced belong. */
7215 set_curr_insn_block (TREE_BLOCK (exp));
7217 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7219 input_location = saved_location;
7220 set_curr_insn_block (saved_block);
7221 set_curr_insn_source_location (saved_curr_loc);
7225 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7232 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7233 enum expand_modifier modifier)
7235 rtx op0, op1, op2, temp;
7238 enum machine_mode mode;
7239 enum tree_code code = ops->code;
7241 rtx subtarget, original_target;
7243 bool reduce_bit_field;
7244 location_t loc = ops->location;
7245 tree treeop0, treeop1;
7246 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7247 ? reduce_to_bit_field_precision ((expr), \
7253 mode = TYPE_MODE (type);
7254 unsignedp = TYPE_UNSIGNED (type);
7259 /* We should be called only on simple (binary or unary) expressions,
7260 exactly those that are valid in gimple expressions that aren't
7261 GIMPLE_SINGLE_RHS (or invalid). */
7262 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7263 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
7264 || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
7266 ignore = (target == const0_rtx
7267 || ((CONVERT_EXPR_CODE_P (code)
7268 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7269 && TREE_CODE (type) == VOID_TYPE));
7271 /* We should be called only if we need the result. */
7272 gcc_assert (!ignore);
7274 /* An operation in what may be a bit-field type needs the
7275 result to be reduced to the precision of the bit-field type,
7276 which is narrower than that of the type's mode. */
7277 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7278 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7280 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7283 /* Use subtarget as the target for operand 0 of a binary operation. */
7284 subtarget = get_subtarget (target);
7285 original_target = target;
7289 case NON_LVALUE_EXPR:
7292 if (treeop0 == error_mark_node)
7295 if (TREE_CODE (type) == UNION_TYPE)
7297 tree valtype = TREE_TYPE (treeop0);
7299 /* If both input and output are BLKmode, this conversion isn't doing
7300 anything except possibly changing memory attribute. */
7301 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7303 rtx result = expand_expr (treeop0, target, tmode,
7306 result = copy_rtx (result);
7307 set_mem_attributes (result, type, 0);
7313 if (TYPE_MODE (type) != BLKmode)
7314 target = gen_reg_rtx (TYPE_MODE (type));
7316 target = assign_temp (type, 0, 1, 1);
7320 /* Store data into beginning of memory target. */
7321 store_expr (treeop0,
7322 adjust_address (target, TYPE_MODE (valtype), 0),
7323 modifier == EXPAND_STACK_PARM,
7328 gcc_assert (REG_P (target));
7330 /* Store this field into a union of the proper type. */
7331 store_field (target,
7332 MIN ((int_size_in_bytes (TREE_TYPE
7335 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7336 0, TYPE_MODE (valtype), treeop0,
7340 /* Return the entire union. */
7344 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7346 op0 = expand_expr (treeop0, target, VOIDmode,
7349 /* If the signedness of the conversion differs and OP0 is
7350 a promoted SUBREG, clear that indication since we now
7351 have to do the proper extension. */
7352 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7353 && GET_CODE (op0) == SUBREG)
7354 SUBREG_PROMOTED_VAR_P (op0) = 0;
7356 return REDUCE_BIT_FIELD (op0);
7359 op0 = expand_expr (treeop0, NULL_RTX, mode,
7360 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7361 if (GET_MODE (op0) == mode)
7364 /* If OP0 is a constant, just convert it into the proper mode. */
7365 else if (CONSTANT_P (op0))
7367 tree inner_type = TREE_TYPE (treeop0);
7368 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7370 if (modifier == EXPAND_INITIALIZER)
7371 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7372 subreg_lowpart_offset (mode,
7375 op0= convert_modes (mode, inner_mode, op0,
7376 TYPE_UNSIGNED (inner_type));
7379 else if (modifier == EXPAND_INITIALIZER)
7380 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7382 else if (target == 0)
7383 op0 = convert_to_mode (mode, op0,
7384 TYPE_UNSIGNED (TREE_TYPE
7388 convert_move (target, op0,
7389 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7393 return REDUCE_BIT_FIELD (op0);
7395 case ADDR_SPACE_CONVERT_EXPR:
7397 tree treeop0_type = TREE_TYPE (treeop0);
7399 addr_space_t as_from;
7401 gcc_assert (POINTER_TYPE_P (type));
7402 gcc_assert (POINTER_TYPE_P (treeop0_type));
7404 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7405 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7407 /* Conversions between pointers to the same address space should
7408 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7409 gcc_assert (as_to != as_from);
7411 /* Ask target code to handle conversion between pointers
7412 to overlapping address spaces. */
7413 if (targetm.addr_space.subset_p (as_to, as_from)
7414 || targetm.addr_space.subset_p (as_from, as_to))
7416 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7417 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7422 /* For disjoint address spaces, converting anything but
7423 a null pointer invokes undefined behaviour. We simply
7424 always return a null pointer here. */
7425 return CONST0_RTX (mode);
7428 case POINTER_PLUS_EXPR:
7429 /* Even though the sizetype mode and the pointer's mode can be different
7430 expand is able to handle this correctly and get the correct result out
7431 of the PLUS_EXPR code. */
7432 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7433 if sizetype precision is smaller than pointer precision. */
7434 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7435 treeop1 = fold_convert_loc (loc, type,
7436 fold_convert_loc (loc, ssizetype,
7439 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7440 something else, make sure we add the register to the constant and
7441 then to the other thing. This case can occur during strength
7442 reduction and doing it this way will produce better code if the
7443 frame pointer or argument pointer is eliminated.
7445 fold-const.c will ensure that the constant is always in the inner
7446 PLUS_EXPR, so the only case we need to do anything about is if
7447 sp, ap, or fp is our second argument, in which case we must swap
7448 the innermost first argument and our second argument. */
7450 if (TREE_CODE (treeop0) == PLUS_EXPR
7451 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7452 && TREE_CODE (treeop1) == VAR_DECL
7453 && (DECL_RTL (treeop1) == frame_pointer_rtx
7454 || DECL_RTL (treeop1) == stack_pointer_rtx
7455 || DECL_RTL (treeop1) == arg_pointer_rtx))
7459 treeop1 = TREE_OPERAND (treeop0, 0);
7460 TREE_OPERAND (treeop0, 0) = t;
7463 /* If the result is to be ptr_mode and we are adding an integer to
7464 something, we might be forming a constant. So try to use
7465 plus_constant. If it produces a sum and we can't accept it,
7466 use force_operand. This allows P = &ARR[const] to generate
7467 efficient code on machines where a SYMBOL_REF is not a valid
7470 If this is an EXPAND_SUM call, always return the sum. */
7471 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7472 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7474 if (modifier == EXPAND_STACK_PARM)
7476 if (TREE_CODE (treeop0) == INTEGER_CST
7477 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7478 && TREE_CONSTANT (treeop1))
7482 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7484 /* Use immed_double_const to ensure that the constant is
7485 truncated according to the mode of OP1, then sign extended
7486 to a HOST_WIDE_INT. Using the constant directly can result
7487 in non-canonical RTL in a 64x32 cross compile. */
7489 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7491 TYPE_MODE (TREE_TYPE (treeop1)));
7492 op1 = plus_constant (op1, INTVAL (constant_part));
7493 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7494 op1 = force_operand (op1, target);
7495 return REDUCE_BIT_FIELD (op1);
7498 else if (TREE_CODE (treeop1) == INTEGER_CST
7499 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7500 && TREE_CONSTANT (treeop0))
7504 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7505 (modifier == EXPAND_INITIALIZER
7506 ? EXPAND_INITIALIZER : EXPAND_SUM));
7507 if (! CONSTANT_P (op0))
7509 op1 = expand_expr (treeop1, NULL_RTX,
7510 VOIDmode, modifier);
7511 /* Return a PLUS if modifier says it's OK. */
7512 if (modifier == EXPAND_SUM
7513 || modifier == EXPAND_INITIALIZER)
7514 return simplify_gen_binary (PLUS, mode, op0, op1);
7517 /* Use immed_double_const to ensure that the constant is
7518 truncated according to the mode of OP1, then sign extended
7519 to a HOST_WIDE_INT. Using the constant directly can result
7520 in non-canonical RTL in a 64x32 cross compile. */
7522 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7524 TYPE_MODE (TREE_TYPE (treeop0)));
7525 op0 = plus_constant (op0, INTVAL (constant_part));
7526 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7527 op0 = force_operand (op0, target);
7528 return REDUCE_BIT_FIELD (op0);
7532 /* No sense saving up arithmetic to be done
7533 if it's all in the wrong mode to form part of an address.
7534 And force_operand won't know whether to sign-extend or
7536 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7537 || mode != ptr_mode)
7539 expand_operands (treeop0, treeop1,
7540 subtarget, &op0, &op1, EXPAND_NORMAL);
7541 if (op0 == const0_rtx)
7543 if (op1 == const0_rtx)
7548 expand_operands (treeop0, treeop1,
7549 subtarget, &op0, &op1, modifier);
7550 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7553 /* For initializers, we are allowed to return a MINUS of two
7554 symbolic constants. Here we handle all cases when both operands
7556 /* Handle difference of two symbolic constants,
7557 for the sake of an initializer. */
7558 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7559 && really_constant_p (treeop0)
7560 && really_constant_p (treeop1))
7562 expand_operands (treeop0, treeop1,
7563 NULL_RTX, &op0, &op1, modifier);
7565 /* If the last operand is a CONST_INT, use plus_constant of
7566 the negated constant. Else make the MINUS. */
7567 if (CONST_INT_P (op1))
7568 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7570 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7573 /* No sense saving up arithmetic to be done
7574 if it's all in the wrong mode to form part of an address.
7575 And force_operand won't know whether to sign-extend or
7577 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7578 || mode != ptr_mode)
7581 expand_operands (treeop0, treeop1,
7582 subtarget, &op0, &op1, modifier);
7584 /* Convert A - const to A + (-const). */
7585 if (CONST_INT_P (op1))
7587 op1 = negate_rtx (mode, op1);
7588 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7593 case WIDEN_MULT_PLUS_EXPR:
7594 case WIDEN_MULT_MINUS_EXPR:
7595 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
7596 op2 = expand_normal (ops->op2);
7597 target = expand_widen_pattern_expr (ops, op0, op1, op2,
7601 case WIDEN_MULT_EXPR:
7602 /* If first operand is constant, swap them.
7603 Thus the following special case checks need only
7604 check the second operand. */
7605 if (TREE_CODE (treeop0) == INTEGER_CST)
7612 /* First, check if we have a multiplication of one signed and one
7613 unsigned operand. */
7614 if (TREE_CODE (treeop1) != INTEGER_CST
7615 && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
7616 != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
7618 enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
7619 this_optab = usmul_widen_optab;
7620 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7622 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7624 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7625 expand_operands (treeop0, treeop1, subtarget, &op0, &op1,
7628 expand_operands (treeop0, treeop1, subtarget, &op1, &op0,
7634 /* Check for a multiplication with matching signedness. */
7635 else if ((TREE_CODE (treeop1) == INTEGER_CST
7636 && int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
7637 || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
7638 == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
7640 tree op0type = TREE_TYPE (treeop0);
7641 enum machine_mode innermode = TYPE_MODE (op0type);
7642 bool zextend_p = TYPE_UNSIGNED (op0type);
7643 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7644 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7646 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7648 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7650 expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
7652 temp = expand_widening_mult (mode, op0, op1, target,
7653 unsignedp, this_optab);
7654 return REDUCE_BIT_FIELD (temp);
7656 if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7657 && innermode == word_mode)
7660 op0 = expand_normal (treeop0);
7661 if (TREE_CODE (treeop1) == INTEGER_CST)
7662 op1 = convert_modes (innermode, mode,
7663 expand_normal (treeop1), unsignedp);
7665 op1 = expand_normal (treeop1);
7666 temp = expand_binop (mode, other_optab, op0, op1, target,
7667 unsignedp, OPTAB_LIB_WIDEN);
7668 hipart = gen_highpart (innermode, temp);
7669 htem = expand_mult_highpart_adjust (innermode, hipart,
7673 emit_move_insn (hipart, htem);
7674 return REDUCE_BIT_FIELD (temp);
7678 treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
7679 treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
7680 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7681 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7684 /* If this is a fixed-point operation, then we cannot use the code
7685 below because "expand_mult" doesn't support sat/no-sat fixed-point
7687 if (ALL_FIXED_POINT_MODE_P (mode))
7690 /* If first operand is constant, swap them.
7691 Thus the following special case checks need only
7692 check the second operand. */
7693 if (TREE_CODE (treeop0) == INTEGER_CST)
7700 /* Attempt to return something suitable for generating an
7701 indexed address, for machines that support that. */
7703 if (modifier == EXPAND_SUM && mode == ptr_mode
7704 && host_integerp (treeop1, 0))
7706 tree exp1 = treeop1;
7708 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7712 op0 = force_operand (op0, NULL_RTX);
7714 op0 = copy_to_mode_reg (mode, op0);
7716 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7717 gen_int_mode (tree_low_cst (exp1, 0),
7718 TYPE_MODE (TREE_TYPE (exp1)))));
7721 if (modifier == EXPAND_STACK_PARM)
7724 expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
7725 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7727 case TRUNC_DIV_EXPR:
7728 case FLOOR_DIV_EXPR:
7730 case ROUND_DIV_EXPR:
7731 case EXACT_DIV_EXPR:
7732 /* If this is a fixed-point operation, then we cannot use the code
7733 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7735 if (ALL_FIXED_POINT_MODE_P (mode))
7738 if (modifier == EXPAND_STACK_PARM)
7740 /* Possible optimization: compute the dividend with EXPAND_SUM
7741 then if the divisor is constant can optimize the case
7742 where some terms of the dividend have coeffs divisible by it. */
7743 expand_operands (treeop0, treeop1,
7744 subtarget, &op0, &op1, EXPAND_NORMAL);
7745 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7750 case TRUNC_MOD_EXPR:
7751 case FLOOR_MOD_EXPR:
7753 case ROUND_MOD_EXPR:
7754 if (modifier == EXPAND_STACK_PARM)
7756 expand_operands (treeop0, treeop1,
7757 subtarget, &op0, &op1, EXPAND_NORMAL);
7758 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7760 case FIXED_CONVERT_EXPR:
7761 op0 = expand_normal (treeop0);
7762 if (target == 0 || modifier == EXPAND_STACK_PARM)
7763 target = gen_reg_rtx (mode);
7765 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7766 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7767 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7768 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7770 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7773 case FIX_TRUNC_EXPR:
7774 op0 = expand_normal (treeop0);
7775 if (target == 0 || modifier == EXPAND_STACK_PARM)
7776 target = gen_reg_rtx (mode);
7777 expand_fix (target, op0, unsignedp);
7781 op0 = expand_normal (treeop0);
7782 if (target == 0 || modifier == EXPAND_STACK_PARM)
7783 target = gen_reg_rtx (mode);
7784 /* expand_float can't figure out what to do if FROM has VOIDmode.
7785 So give it the correct mode. With -O, cse will optimize this. */
7786 if (GET_MODE (op0) == VOIDmode)
7787 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7789 expand_float (target, op0,
7790 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7794 op0 = expand_expr (treeop0, subtarget,
7795 VOIDmode, EXPAND_NORMAL);
7796 if (modifier == EXPAND_STACK_PARM)
7798 temp = expand_unop (mode,
7799 optab_for_tree_code (NEGATE_EXPR, type,
7803 return REDUCE_BIT_FIELD (temp);
7806 op0 = expand_expr (treeop0, subtarget,
7807 VOIDmode, EXPAND_NORMAL);
7808 if (modifier == EXPAND_STACK_PARM)
7811 /* ABS_EXPR is not valid for complex arguments. */
7812 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7813 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7815 /* Unsigned abs is simply the operand. Testing here means we don't
7816 risk generating incorrect code below. */
7817 if (TYPE_UNSIGNED (type))
7820 return expand_abs (mode, op0, target, unsignedp,
7821 safe_from_p (target, treeop0, 1));
7825 target = original_target;
7827 || modifier == EXPAND_STACK_PARM
7828 || (MEM_P (target) && MEM_VOLATILE_P (target))
7829 || GET_MODE (target) != mode
7831 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7832 target = gen_reg_rtx (mode);
7833 expand_operands (treeop0, treeop1,
7834 target, &op0, &op1, EXPAND_NORMAL);
7836 /* First try to do it with a special MIN or MAX instruction.
7837 If that does not win, use a conditional jump to select the proper
7839 this_optab = optab_for_tree_code (code, type, optab_default);
7840 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7845 /* At this point, a MEM target is no longer useful; we will get better
7848 if (! REG_P (target))
7849 target = gen_reg_rtx (mode);
7851 /* If op1 was placed in target, swap op0 and op1. */
7852 if (target != op0 && target == op1)
7859 /* We generate better code and avoid problems with op1 mentioning
7860 target by forcing op1 into a pseudo if it isn't a constant. */
7861 if (! CONSTANT_P (op1))
7862 op1 = force_reg (mode, op1);
7865 enum rtx_code comparison_code;
7868 if (code == MAX_EXPR)
7869 comparison_code = unsignedp ? GEU : GE;
7871 comparison_code = unsignedp ? LEU : LE;
7873 /* Canonicalize to comparisons against 0. */
7874 if (op1 == const1_rtx)
7876 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7877 or (a != 0 ? a : 1) for unsigned.
7878 For MIN we are safe converting (a <= 1 ? a : 1)
7879 into (a <= 0 ? a : 1) */
7880 cmpop1 = const0_rtx;
7881 if (code == MAX_EXPR)
7882 comparison_code = unsignedp ? NE : GT;
7884 if (op1 == constm1_rtx && !unsignedp)
7886 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7887 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7888 cmpop1 = const0_rtx;
7889 if (code == MIN_EXPR)
7890 comparison_code = LT;
7892 #ifdef HAVE_conditional_move
7893 /* Use a conditional move if possible. */
7894 if (can_conditionally_move_p (mode))
7898 /* ??? Same problem as in expmed.c: emit_conditional_move
7899 forces a stack adjustment via compare_from_rtx, and we
7900 lose the stack adjustment if the sequence we are about
7901 to create is discarded. */
7902 do_pending_stack_adjust ();
7906 /* Try to emit the conditional move. */
7907 insn = emit_conditional_move (target, comparison_code,
7912 /* If we could do the conditional move, emit the sequence,
7916 rtx seq = get_insns ();
7922 /* Otherwise discard the sequence and fall back to code with
7928 emit_move_insn (target, op0);
7930 temp = gen_label_rtx ();
7931 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
7932 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
7935 emit_move_insn (target, op1);
7940 op0 = expand_expr (treeop0, subtarget,
7941 VOIDmode, EXPAND_NORMAL);
7942 if (modifier == EXPAND_STACK_PARM)
7944 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7948 /* ??? Can optimize bitwise operations with one arg constant.
7949 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7950 and (a bitwise1 b) bitwise2 b (etc)
7951 but that is probably not worth while. */
7953 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7954 boolean values when we want in all cases to compute both of them. In
7955 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7956 as actual zero-or-1 values and then bitwise anding. In cases where
7957 there cannot be any side effects, better code would be made by
7958 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7959 how to recognize those cases. */
7961 case TRUTH_AND_EXPR:
7962 code = BIT_AND_EXPR;
7967 code = BIT_IOR_EXPR;
7971 case TRUTH_XOR_EXPR:
7972 code = BIT_XOR_EXPR;
7978 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
7979 || (GET_MODE_PRECISION (TYPE_MODE (type))
7980 == TYPE_PRECISION (type)));
7985 /* If this is a fixed-point operation, then we cannot use the code
7986 below because "expand_shift" doesn't support sat/no-sat fixed-point
7988 if (ALL_FIXED_POINT_MODE_P (mode))
7991 if (! safe_from_p (subtarget, treeop1, 1))
7993 if (modifier == EXPAND_STACK_PARM)
7995 op0 = expand_expr (treeop0, subtarget,
7996 VOIDmode, EXPAND_NORMAL);
7997 temp = expand_shift (code, mode, op0, treeop1, target,
7999 if (code == LSHIFT_EXPR)
8000 temp = REDUCE_BIT_FIELD (temp);
8003 /* Could determine the answer when only additive constants differ. Also,
8004 the addition of one can be handled by changing the condition. */
8011 case UNORDERED_EXPR:
8019 temp = do_store_flag (ops,
8020 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8021 tmode != VOIDmode ? tmode : mode);
8025 /* Use a compare and a jump for BLKmode comparisons, or for function
8026 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8029 || modifier == EXPAND_STACK_PARM
8030 || ! safe_from_p (target, treeop0, 1)
8031 || ! safe_from_p (target, treeop1, 1)
8032 /* Make sure we don't have a hard reg (such as function's return
8033 value) live across basic blocks, if not optimizing. */
8034 || (!optimize && REG_P (target)
8035 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8036 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8038 emit_move_insn (target, const0_rtx);
8040 op1 = gen_label_rtx ();
8041 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8043 emit_move_insn (target, const1_rtx);
8048 case TRUTH_NOT_EXPR:
8049 if (modifier == EXPAND_STACK_PARM)
8051 op0 = expand_expr (treeop0, target,
8052 VOIDmode, EXPAND_NORMAL);
8053 /* The parser is careful to generate TRUTH_NOT_EXPR
8054 only with operands that are always zero or one. */
8055 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8056 target, 1, OPTAB_LIB_WIDEN);
8061 /* Get the rtx code of the operands. */
8062 op0 = expand_normal (treeop0);
8063 op1 = expand_normal (treeop1);
8066 target = gen_reg_rtx (TYPE_MODE (type));
8068 /* Move the real (op0) and imaginary (op1) parts to their location. */
8069 write_complex_part (target, op0, false);
8070 write_complex_part (target, op1, true);
8074 case WIDEN_SUM_EXPR:
8076 tree oprnd0 = treeop0;
8077 tree oprnd1 = treeop1;
8079 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8080 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8085 case REDUC_MAX_EXPR:
8086 case REDUC_MIN_EXPR:
8087 case REDUC_PLUS_EXPR:
8089 op0 = expand_normal (treeop0);
8090 this_optab = optab_for_tree_code (code, type, optab_default);
8091 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8096 case VEC_EXTRACT_EVEN_EXPR:
8097 case VEC_EXTRACT_ODD_EXPR:
8099 expand_operands (treeop0, treeop1,
8100 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8101 this_optab = optab_for_tree_code (code, type, optab_default);
8102 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8108 case VEC_INTERLEAVE_HIGH_EXPR:
8109 case VEC_INTERLEAVE_LOW_EXPR:
8111 expand_operands (treeop0, treeop1,
8112 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8113 this_optab = optab_for_tree_code (code, type, optab_default);
8114 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8120 case VEC_LSHIFT_EXPR:
8121 case VEC_RSHIFT_EXPR:
8123 target = expand_vec_shift_expr (ops, target);
8127 case VEC_UNPACK_HI_EXPR:
8128 case VEC_UNPACK_LO_EXPR:
8130 op0 = expand_normal (treeop0);
8131 this_optab = optab_for_tree_code (code, type, optab_default);
8132 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8138 case VEC_UNPACK_FLOAT_HI_EXPR:
8139 case VEC_UNPACK_FLOAT_LO_EXPR:
8141 op0 = expand_normal (treeop0);
8142 /* The signedness is determined from input operand. */
8143 this_optab = optab_for_tree_code (code,
8144 TREE_TYPE (treeop0),
8146 temp = expand_widen_pattern_expr
8147 (ops, op0, NULL_RTX, NULL_RTX,
8148 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8154 case VEC_WIDEN_MULT_HI_EXPR:
8155 case VEC_WIDEN_MULT_LO_EXPR:
8157 tree oprnd0 = treeop0;
8158 tree oprnd1 = treeop1;
8160 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8161 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8163 gcc_assert (target);
8167 case VEC_PACK_TRUNC_EXPR:
8168 case VEC_PACK_SAT_EXPR:
8169 case VEC_PACK_FIX_TRUNC_EXPR:
8170 mode = TYPE_MODE (TREE_TYPE (treeop0));
8177 /* Here to do an ordinary binary operator. */
8179 expand_operands (treeop0, treeop1,
8180 subtarget, &op0, &op1, EXPAND_NORMAL);
8182 this_optab = optab_for_tree_code (code, type, optab_default);
8184 if (modifier == EXPAND_STACK_PARM)
8186 temp = expand_binop (mode, this_optab, op0, op1, target,
8187 unsignedp, OPTAB_LIB_WIDEN);
8189 return REDUCE_BIT_FIELD (temp);
8191 #undef REDUCE_BIT_FIELD
8194 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8195 enum expand_modifier modifier, rtx *alt_rtl)
8197 rtx op0, op1, temp, decl_rtl;
8200 enum machine_mode mode;
8201 enum tree_code code = TREE_CODE (exp);
8203 rtx subtarget, original_target;
8206 bool reduce_bit_field;
8207 location_t loc = EXPR_LOCATION (exp);
8208 struct separate_ops ops;
8209 tree treeop0, treeop1, treeop2;
8210 tree ssa_name = NULL_TREE;
8213 type = TREE_TYPE (exp);
8214 mode = TYPE_MODE (type);
8215 unsignedp = TYPE_UNSIGNED (type);
8217 treeop0 = treeop1 = treeop2 = NULL_TREE;
8218 if (!VL_EXP_CLASS_P (exp))
8219 switch (TREE_CODE_LENGTH (code))
8222 case 3: treeop2 = TREE_OPERAND (exp, 2);
8223 case 2: treeop1 = TREE_OPERAND (exp, 1);
8224 case 1: treeop0 = TREE_OPERAND (exp, 0);
8234 ignore = (target == const0_rtx
8235 || ((CONVERT_EXPR_CODE_P (code)
8236 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8237 && TREE_CODE (type) == VOID_TYPE));
8239 /* An operation in what may be a bit-field type needs the
8240 result to be reduced to the precision of the bit-field type,
8241 which is narrower than that of the type's mode. */
8242 reduce_bit_field = (!ignore
8243 && TREE_CODE (type) == INTEGER_TYPE
8244 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8246 /* If we are going to ignore this result, we need only do something
8247 if there is a side-effect somewhere in the expression. If there
8248 is, short-circuit the most common cases here. Note that we must
8249 not call expand_expr with anything but const0_rtx in case this
8250 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8254 if (! TREE_SIDE_EFFECTS (exp))
8257 /* Ensure we reference a volatile object even if value is ignored, but
8258 don't do this if all we are doing is taking its address. */
8259 if (TREE_THIS_VOLATILE (exp)
8260 && TREE_CODE (exp) != FUNCTION_DECL
8261 && mode != VOIDmode && mode != BLKmode
8262 && modifier != EXPAND_CONST_ADDRESS)
8264 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8266 temp = copy_to_reg (temp);
8270 if (TREE_CODE_CLASS (code) == tcc_unary
8271 || code == COMPONENT_REF || code == INDIRECT_REF)
8272 return expand_expr (treeop0, const0_rtx, VOIDmode,
8275 else if (TREE_CODE_CLASS (code) == tcc_binary
8276 || TREE_CODE_CLASS (code) == tcc_comparison
8277 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8279 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8280 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8283 else if (code == BIT_FIELD_REF)
8285 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8286 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8287 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8294 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8297 /* Use subtarget as the target for operand 0 of a binary operation. */
8298 subtarget = get_subtarget (target);
8299 original_target = target;
8305 tree function = decl_function_context (exp);
8307 temp = label_rtx (exp);
8308 temp = gen_rtx_LABEL_REF (Pmode, temp);
8310 if (function != current_function_decl
8312 LABEL_REF_NONLOCAL_P (temp) = 1;
8314 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8319 /* ??? ivopts calls expander, without any preparation from
8320 out-of-ssa. So fake instructions as if this was an access to the
8321 base variable. This unnecessarily allocates a pseudo, see how we can
8322 reuse it, if partition base vars have it set already. */
8323 if (!currently_expanding_to_rtl)
8324 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
8327 g = get_gimple_for_ssa_name (exp);
8329 return expand_expr_real (gimple_assign_rhs_to_tree (g), target, tmode,
8333 decl_rtl = get_rtx_for_ssa_name (ssa_name);
8334 exp = SSA_NAME_VAR (ssa_name);
8335 goto expand_decl_rtl;
8339 /* If a static var's type was incomplete when the decl was written,
8340 but the type is complete now, lay out the decl now. */
8341 if (DECL_SIZE (exp) == 0
8342 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8343 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8344 layout_decl (exp, 0);
8346 /* TLS emulation hook - replace __thread vars with
8347 *__emutls_get_address (&_emutls.var). */
8348 if (! targetm.have_tls
8349 && TREE_CODE (exp) == VAR_DECL
8350 && DECL_THREAD_LOCAL_P (exp))
8352 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8353 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8356 /* ... fall through ... */
8360 decl_rtl = DECL_RTL (exp);
8362 gcc_assert (decl_rtl);
8363 decl_rtl = copy_rtx (decl_rtl);
8364 /* Record writes to register variables. */
8365 if (modifier == EXPAND_WRITE && REG_P (decl_rtl)
8366 && REGNO (decl_rtl) < FIRST_PSEUDO_REGISTER)
8368 int i = REGNO (decl_rtl);
8369 int nregs = hard_regno_nregs[i][GET_MODE (decl_rtl)];
8372 SET_HARD_REG_BIT (crtl->asm_clobbers, i);
8378 /* Ensure variable marked as used even if it doesn't go through
8379 a parser. If it hasn't be used yet, write out an external
8381 if (! TREE_USED (exp))
8383 assemble_external (exp);
8384 TREE_USED (exp) = 1;
8387 /* Show we haven't gotten RTL for this yet. */
8390 /* Variables inherited from containing functions should have
8391 been lowered by this point. */
8392 context = decl_function_context (exp);
8393 gcc_assert (!context
8394 || context == current_function_decl
8395 || TREE_STATIC (exp)
8396 /* ??? C++ creates functions that are not TREE_STATIC. */
8397 || TREE_CODE (exp) == FUNCTION_DECL);
8399 /* This is the case of an array whose size is to be determined
8400 from its initializer, while the initializer is still being parsed.
8403 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8404 temp = validize_mem (decl_rtl);
8406 /* If DECL_RTL is memory, we are in the normal case and the
8407 address is not valid, get the address into a register. */
8409 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8412 *alt_rtl = decl_rtl;
8413 decl_rtl = use_anchored_address (decl_rtl);
8414 if (modifier != EXPAND_CONST_ADDRESS
8415 && modifier != EXPAND_SUM
8416 && !memory_address_addr_space_p (DECL_MODE (exp),
8418 MEM_ADDR_SPACE (decl_rtl)))
8419 temp = replace_equiv_address (decl_rtl,
8420 copy_rtx (XEXP (decl_rtl, 0)));
8423 /* If we got something, return it. But first, set the alignment
8424 if the address is a register. */
8427 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8428 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8433 /* If the mode of DECL_RTL does not match that of the decl, it
8434 must be a promoted value. We return a SUBREG of the wanted mode,
8435 but mark it so that we know that it was already extended. */
8436 if (REG_P (decl_rtl) && GET_MODE (decl_rtl) != DECL_MODE (exp))
8438 enum machine_mode pmode;
8440 /* Get the signedness to be used for this variable. Ensure we get
8441 the same mode we got when the variable was declared. */
8442 if (code == SSA_NAME
8443 && (g = SSA_NAME_DEF_STMT (ssa_name))
8444 && gimple_code (g) == GIMPLE_CALL)
8445 pmode = promote_function_mode (type, mode, &unsignedp,
8447 (TREE_TYPE (gimple_call_fn (g))),
8450 pmode = promote_decl_mode (exp, &unsignedp);
8451 gcc_assert (GET_MODE (decl_rtl) == pmode);
8453 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8454 SUBREG_PROMOTED_VAR_P (temp) = 1;
8455 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8462 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8463 TREE_INT_CST_HIGH (exp), mode);
8469 tree tmp = NULL_TREE;
8470 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8471 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8472 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8473 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8474 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8475 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8476 return const_vector_from_tree (exp);
8477 if (GET_MODE_CLASS (mode) == MODE_INT)
8479 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8481 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8484 tmp = build_constructor_from_list (type,
8485 TREE_VECTOR_CST_ELTS (exp));
8486 return expand_expr (tmp, ignore ? const0_rtx : target,
8491 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8494 /* If optimized, generate immediate CONST_DOUBLE
8495 which will be turned into memory by reload if necessary.
8497 We used to force a register so that loop.c could see it. But
8498 this does not allow gen_* patterns to perform optimizations with
8499 the constants. It also produces two insns in cases like "x = 1.0;".
8500 On most machines, floating-point constants are not permitted in
8501 many insns, so we'd end up copying it to a register in any case.
8503 Now, we do the copying in expand_binop, if appropriate. */
8504 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8505 TYPE_MODE (TREE_TYPE (exp)));
8508 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8509 TYPE_MODE (TREE_TYPE (exp)));
8512 /* Handle evaluating a complex constant in a CONCAT target. */
8513 if (original_target && GET_CODE (original_target) == CONCAT)
8515 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8518 rtarg = XEXP (original_target, 0);
8519 itarg = XEXP (original_target, 1);
8521 /* Move the real and imaginary parts separately. */
8522 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8523 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8526 emit_move_insn (rtarg, op0);
8528 emit_move_insn (itarg, op1);
8530 return original_target;
8533 /* ... fall through ... */
8536 temp = expand_expr_constant (exp, 1, modifier);
8538 /* temp contains a constant address.
8539 On RISC machines where a constant address isn't valid,
8540 make some insns to get that address into a register. */
8541 if (modifier != EXPAND_CONST_ADDRESS
8542 && modifier != EXPAND_INITIALIZER
8543 && modifier != EXPAND_SUM
8544 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8545 MEM_ADDR_SPACE (temp)))
8546 return replace_equiv_address (temp,
8547 copy_rtx (XEXP (temp, 0)));
8553 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8555 if (!SAVE_EXPR_RESOLVED_P (exp))
8557 /* We can indeed still hit this case, typically via builtin
8558 expanders calling save_expr immediately before expanding
8559 something. Assume this means that we only have to deal
8560 with non-BLKmode values. */
8561 gcc_assert (GET_MODE (ret) != BLKmode);
8563 val = build_decl (EXPR_LOCATION (exp),
8564 VAR_DECL, NULL, TREE_TYPE (exp));
8565 DECL_ARTIFICIAL (val) = 1;
8566 DECL_IGNORED_P (val) = 1;
8568 TREE_OPERAND (exp, 0) = treeop0;
8569 SAVE_EXPR_RESOLVED_P (exp) = 1;
8571 if (!CONSTANT_P (ret))
8572 ret = copy_to_reg (ret);
8573 SET_DECL_RTL (val, ret);
8581 /* If we don't need the result, just ensure we evaluate any
8585 unsigned HOST_WIDE_INT idx;
8588 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8589 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8594 return expand_constructor (exp, target, modifier, false);
8596 case MISALIGNED_INDIRECT_REF:
8597 case ALIGN_INDIRECT_REF:
8600 tree exp1 = treeop0;
8601 addr_space_t as = ADDR_SPACE_GENERIC;
8602 enum machine_mode address_mode = Pmode;
8604 if (modifier != EXPAND_WRITE)
8608 t = fold_read_from_constant_string (exp);
8610 return expand_expr (t, target, tmode, modifier);
8613 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8615 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8616 address_mode = targetm.addr_space.address_mode (as);
8619 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8620 op0 = memory_address_addr_space (mode, op0, as);
8622 if (code == ALIGN_INDIRECT_REF)
8624 int align = TYPE_ALIGN_UNIT (type);
8625 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8626 op0 = memory_address_addr_space (mode, op0, as);
8629 temp = gen_rtx_MEM (mode, op0);
8631 set_mem_attributes (temp, exp, 0);
8632 set_mem_addr_space (temp, as);
8634 /* Resolve the misalignment now, so that we don't have to remember
8635 to resolve it later. Of course, this only works for reads. */
8636 if (code == MISALIGNED_INDIRECT_REF)
8641 gcc_assert (modifier == EXPAND_NORMAL
8642 || modifier == EXPAND_STACK_PARM);
8644 /* The vectorizer should have already checked the mode. */
8645 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8646 gcc_assert (icode != CODE_FOR_nothing);
8648 /* We've already validated the memory, and we're creating a
8649 new pseudo destination. The predicates really can't fail. */
8650 reg = gen_reg_rtx (mode);
8652 /* Nor can the insn generator. */
8653 insn = GEN_FCN (icode) (reg, temp);
8662 case TARGET_MEM_REF:
8664 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8665 struct mem_address addr;
8668 get_address_description (exp, &addr);
8669 op0 = addr_for_mem_ref (&addr, as, true);
8670 op0 = memory_address_addr_space (mode, op0, as);
8671 temp = gen_rtx_MEM (mode, op0);
8672 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8673 set_mem_addr_space (temp, as);
8674 base = get_base_address (TMR_ORIGINAL (exp));
8675 if (INDIRECT_REF_P (base)
8677 && TREE_CODE (TMR_BASE (exp)) == SSA_NAME
8678 && POINTER_TYPE_P (TREE_TYPE (TMR_BASE (exp))))
8680 set_mem_expr (temp, build1 (INDIRECT_REF,
8681 TREE_TYPE (exp), TMR_BASE (exp)));
8682 set_mem_offset (temp, NULL_RTX);
8690 tree array = treeop0;
8691 tree index = treeop1;
8693 /* Fold an expression like: "foo"[2].
8694 This is not done in fold so it won't happen inside &.
8695 Don't fold if this is for wide characters since it's too
8696 difficult to do correctly and this is a very rare case. */
8698 if (modifier != EXPAND_CONST_ADDRESS
8699 && modifier != EXPAND_INITIALIZER
8700 && modifier != EXPAND_MEMORY)
8702 tree t = fold_read_from_constant_string (exp);
8705 return expand_expr (t, target, tmode, modifier);
8708 /* If this is a constant index into a constant array,
8709 just get the value from the array. Handle both the cases when
8710 we have an explicit constructor and when our operand is a variable
8711 that was declared const. */
8713 if (modifier != EXPAND_CONST_ADDRESS
8714 && modifier != EXPAND_INITIALIZER
8715 && modifier != EXPAND_MEMORY
8716 && TREE_CODE (array) == CONSTRUCTOR
8717 && ! TREE_SIDE_EFFECTS (array)
8718 && TREE_CODE (index) == INTEGER_CST)
8720 unsigned HOST_WIDE_INT ix;
8723 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8725 if (tree_int_cst_equal (field, index))
8727 if (!TREE_SIDE_EFFECTS (value))
8728 return expand_expr (fold (value), target, tmode, modifier);
8733 else if (optimize >= 1
8734 && modifier != EXPAND_CONST_ADDRESS
8735 && modifier != EXPAND_INITIALIZER
8736 && modifier != EXPAND_MEMORY
8737 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8738 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8739 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8740 && targetm.binds_local_p (array))
8742 if (TREE_CODE (index) == INTEGER_CST)
8744 tree init = DECL_INITIAL (array);
8746 if (TREE_CODE (init) == CONSTRUCTOR)
8748 unsigned HOST_WIDE_INT ix;
8751 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8753 if (tree_int_cst_equal (field, index))
8755 if (TREE_SIDE_EFFECTS (value))
8758 if (TREE_CODE (value) == CONSTRUCTOR)
8760 /* If VALUE is a CONSTRUCTOR, this
8761 optimization is only useful if
8762 this doesn't store the CONSTRUCTOR
8763 into memory. If it does, it is more
8764 efficient to just load the data from
8765 the array directly. */
8766 rtx ret = expand_constructor (value, target,
8768 if (ret == NULL_RTX)
8772 return expand_expr (fold (value), target, tmode,
8776 else if(TREE_CODE (init) == STRING_CST)
8778 tree index1 = index;
8779 tree low_bound = array_ref_low_bound (exp);
8780 index1 = fold_convert_loc (loc, sizetype,
8783 /* Optimize the special-case of a zero lower bound.
8785 We convert the low_bound to sizetype to avoid some problems
8786 with constant folding. (E.g. suppose the lower bound is 1,
8787 and its mode is QI. Without the conversion,l (ARRAY
8788 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8789 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8791 if (! integer_zerop (low_bound))
8792 index1 = size_diffop_loc (loc, index1,
8793 fold_convert_loc (loc, sizetype,
8796 if (0 > compare_tree_int (index1,
8797 TREE_STRING_LENGTH (init)))
8799 tree type = TREE_TYPE (TREE_TYPE (init));
8800 enum machine_mode mode = TYPE_MODE (type);
8802 if (GET_MODE_CLASS (mode) == MODE_INT
8803 && GET_MODE_SIZE (mode) == 1)
8804 return gen_int_mode (TREE_STRING_POINTER (init)
8805 [TREE_INT_CST_LOW (index1)],
8812 goto normal_inner_ref;
8815 /* If the operand is a CONSTRUCTOR, we can just extract the
8816 appropriate field if it is present. */
8817 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8819 unsigned HOST_WIDE_INT idx;
8822 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8824 if (field == treeop1
8825 /* We can normally use the value of the field in the
8826 CONSTRUCTOR. However, if this is a bitfield in
8827 an integral mode that we can fit in a HOST_WIDE_INT,
8828 we must mask only the number of bits in the bitfield,
8829 since this is done implicitly by the constructor. If
8830 the bitfield does not meet either of those conditions,
8831 we can't do this optimization. */
8832 && (! DECL_BIT_FIELD (field)
8833 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8834 && (GET_MODE_BITSIZE (DECL_MODE (field))
8835 <= HOST_BITS_PER_WIDE_INT))))
8837 if (DECL_BIT_FIELD (field)
8838 && modifier == EXPAND_STACK_PARM)
8840 op0 = expand_expr (value, target, tmode, modifier);
8841 if (DECL_BIT_FIELD (field))
8843 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8844 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8846 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8848 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8849 op0 = expand_and (imode, op0, op1, target);
8854 = build_int_cst (NULL_TREE,
8855 GET_MODE_BITSIZE (imode) - bitsize);
8857 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8859 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8867 goto normal_inner_ref;
8870 case ARRAY_RANGE_REF:
8873 enum machine_mode mode1, mode2;
8874 HOST_WIDE_INT bitsize, bitpos;
8876 int volatilep = 0, must_force_mem;
8877 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8878 &mode1, &unsignedp, &volatilep, true);
8879 rtx orig_op0, memloc;
8881 /* If we got back the original object, something is wrong. Perhaps
8882 we are evaluating an expression too early. In any event, don't
8883 infinitely recurse. */
8884 gcc_assert (tem != exp);
8886 /* If TEM's type is a union of variable size, pass TARGET to the inner
8887 computation, since it will need a temporary and TARGET is known
8888 to have to do. This occurs in unchecked conversion in Ada. */
8891 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8892 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8894 && modifier != EXPAND_STACK_PARM
8895 ? target : NULL_RTX),
8897 (modifier == EXPAND_INITIALIZER
8898 || modifier == EXPAND_CONST_ADDRESS
8899 || modifier == EXPAND_STACK_PARM)
8900 ? modifier : EXPAND_NORMAL);
8903 /* If the bitfield is volatile, we want to access it in the
8904 field's mode, not the computed mode. */
8906 && GET_CODE (op0) == MEM
8907 && flag_strict_volatile_bitfields > 0)
8908 op0 = adjust_address (op0, mode1, 0);
8911 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8913 /* If we have either an offset, a BLKmode result, or a reference
8914 outside the underlying object, we must force it to memory.
8915 Such a case can occur in Ada if we have unchecked conversion
8916 of an expression from a scalar type to an aggregate type or
8917 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8918 passed a partially uninitialized object or a view-conversion
8919 to a larger size. */
8920 must_force_mem = (offset
8922 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8924 /* Handle CONCAT first. */
8925 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8928 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8931 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8934 op0 = XEXP (op0, 0);
8935 mode2 = GET_MODE (op0);
8937 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8938 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8942 op0 = XEXP (op0, 1);
8944 mode2 = GET_MODE (op0);
8947 /* Otherwise force into memory. */
8951 /* If this is a constant, put it in a register if it is a legitimate
8952 constant and we don't need a memory reference. */
8953 if (CONSTANT_P (op0)
8955 && LEGITIMATE_CONSTANT_P (op0)
8957 op0 = force_reg (mode2, op0);
8959 /* Otherwise, if this is a constant, try to force it to the constant
8960 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
8961 is a legitimate constant. */
8962 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
8963 op0 = validize_mem (memloc);
8965 /* Otherwise, if this is a constant or the object is not in memory
8966 and need be, put it there. */
8967 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
8969 tree nt = build_qualified_type (TREE_TYPE (tem),
8970 (TYPE_QUALS (TREE_TYPE (tem))
8971 | TYPE_QUAL_CONST));
8972 memloc = assign_temp (nt, 1, 1, 1);
8973 emit_move_insn (memloc, op0);
8979 enum machine_mode address_mode;
8980 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
8983 gcc_assert (MEM_P (op0));
8986 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
8987 if (GET_MODE (offset_rtx) != address_mode)
8988 offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
8990 if (GET_MODE (op0) == BLKmode
8991 /* A constant address in OP0 can have VOIDmode, we must
8992 not try to call force_reg in that case. */
8993 && GET_MODE (XEXP (op0, 0)) != VOIDmode
8995 && (bitpos % bitsize) == 0
8996 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
8997 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
8999 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9003 op0 = offset_address (op0, offset_rtx,
9004 highest_pow2_factor (offset));
9007 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9008 record its alignment as BIGGEST_ALIGNMENT. */
9009 if (MEM_P (op0) && bitpos == 0 && offset != 0
9010 && is_aligning_offset (offset, tem))
9011 set_mem_align (op0, BIGGEST_ALIGNMENT);
9013 /* Don't forget about volatility even if this is a bitfield. */
9014 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9016 if (op0 == orig_op0)
9017 op0 = copy_rtx (op0);
9019 MEM_VOLATILE_P (op0) = 1;
9022 /* In cases where an aligned union has an unaligned object
9023 as a field, we might be extracting a BLKmode value from
9024 an integer-mode (e.g., SImode) object. Handle this case
9025 by doing the extract into an object as wide as the field
9026 (which we know to be the width of a basic mode), then
9027 storing into memory, and changing the mode to BLKmode. */
9028 if (mode1 == VOIDmode
9029 || REG_P (op0) || GET_CODE (op0) == SUBREG
9030 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9031 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9032 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9033 && modifier != EXPAND_CONST_ADDRESS
9034 && modifier != EXPAND_INITIALIZER)
9035 /* If the field is volatile, we always want an aligned
9037 || (volatilep && flag_strict_volatile_bitfields > 0)
9038 /* If the field isn't aligned enough to fetch as a memref,
9039 fetch it as a bit field. */
9040 || (mode1 != BLKmode
9041 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9042 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9044 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9045 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9046 && ((modifier == EXPAND_CONST_ADDRESS
9047 || modifier == EXPAND_INITIALIZER)
9049 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9050 || (bitpos % BITS_PER_UNIT != 0)))
9051 /* If the type and the field are a constant size and the
9052 size of the type isn't the same size as the bitfield,
9053 we must use bitfield operations. */
9055 && TYPE_SIZE (TREE_TYPE (exp))
9056 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9057 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9060 enum machine_mode ext_mode = mode;
9062 if (ext_mode == BLKmode
9063 && ! (target != 0 && MEM_P (op0)
9065 && bitpos % BITS_PER_UNIT == 0))
9066 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9068 if (ext_mode == BLKmode)
9071 target = assign_temp (type, 0, 1, 1);
9076 /* In this case, BITPOS must start at a byte boundary and
9077 TARGET, if specified, must be a MEM. */
9078 gcc_assert (MEM_P (op0)
9079 && (!target || MEM_P (target))
9080 && !(bitpos % BITS_PER_UNIT));
9082 emit_block_move (target,
9083 adjust_address (op0, VOIDmode,
9084 bitpos / BITS_PER_UNIT),
9085 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9087 (modifier == EXPAND_STACK_PARM
9088 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9093 op0 = validize_mem (op0);
9095 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9096 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9098 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9099 (modifier == EXPAND_STACK_PARM
9100 ? NULL_RTX : target),
9101 ext_mode, ext_mode);
9103 /* If the result is a record type and BITSIZE is narrower than
9104 the mode of OP0, an integral mode, and this is a big endian
9105 machine, we must put the field into the high-order bits. */
9106 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9107 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9108 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9109 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9110 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9114 /* If the result type is BLKmode, store the data into a temporary
9115 of the appropriate type, but with the mode corresponding to the
9116 mode for the data we have (op0's mode). It's tempting to make
9117 this a constant type, since we know it's only being stored once,
9118 but that can cause problems if we are taking the address of this
9119 COMPONENT_REF because the MEM of any reference via that address
9120 will have flags corresponding to the type, which will not
9121 necessarily be constant. */
9122 if (mode == BLKmode)
9124 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9127 /* If the reference doesn't use the alias set of its type,
9128 we cannot create the temporary using that type. */
9129 if (component_uses_parent_alias_set (exp))
9131 new_rtx = assign_stack_local (ext_mode, size, 0);
9132 set_mem_alias_set (new_rtx, get_alias_set (exp));
9135 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9137 emit_move_insn (new_rtx, op0);
9138 op0 = copy_rtx (new_rtx);
9139 PUT_MODE (op0, BLKmode);
9140 set_mem_attributes (op0, exp, 1);
9146 /* If the result is BLKmode, use that to access the object
9148 if (mode == BLKmode)
9151 /* Get a reference to just this component. */
9152 if (modifier == EXPAND_CONST_ADDRESS
9153 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9154 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9156 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9158 if (op0 == orig_op0)
9159 op0 = copy_rtx (op0);
9161 set_mem_attributes (op0, exp, 0);
9162 if (REG_P (XEXP (op0, 0)))
9163 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9165 MEM_VOLATILE_P (op0) |= volatilep;
9166 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9167 || modifier == EXPAND_CONST_ADDRESS
9168 || modifier == EXPAND_INITIALIZER)
9170 else if (target == 0)
9171 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9173 convert_move (target, op0, unsignedp);
9178 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9181 /* All valid uses of __builtin_va_arg_pack () are removed during
9183 if (CALL_EXPR_VA_ARG_PACK (exp))
9184 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9186 tree fndecl = get_callee_fndecl (exp), attr;
9189 && (attr = lookup_attribute ("error",
9190 DECL_ATTRIBUTES (fndecl))) != NULL)
9191 error ("%Kcall to %qs declared with attribute error: %s",
9192 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9193 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9195 && (attr = lookup_attribute ("warning",
9196 DECL_ATTRIBUTES (fndecl))) != NULL)
9197 warning_at (tree_nonartificial_location (exp),
9198 0, "%Kcall to %qs declared with attribute warning: %s",
9199 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9200 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9202 /* Check for a built-in function. */
9203 if (fndecl && DECL_BUILT_IN (fndecl))
9205 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9206 return expand_builtin (exp, target, subtarget, tmode, ignore);
9209 return expand_call (exp, target, ignore);
9211 case VIEW_CONVERT_EXPR:
9214 /* If we are converting to BLKmode, try to avoid an intermediate
9215 temporary by fetching an inner memory reference. */
9217 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9218 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9219 && handled_component_p (treeop0))
9221 enum machine_mode mode1;
9222 HOST_WIDE_INT bitsize, bitpos;
9227 = get_inner_reference (treeop0, &bitsize, &bitpos,
9228 &offset, &mode1, &unsignedp, &volatilep,
9232 /* ??? We should work harder and deal with non-zero offsets. */
9234 && (bitpos % BITS_PER_UNIT) == 0
9236 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9238 /* See the normal_inner_ref case for the rationale. */
9241 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9242 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9244 && modifier != EXPAND_STACK_PARM
9245 ? target : NULL_RTX),
9247 (modifier == EXPAND_INITIALIZER
9248 || modifier == EXPAND_CONST_ADDRESS
9249 || modifier == EXPAND_STACK_PARM)
9250 ? modifier : EXPAND_NORMAL);
9252 if (MEM_P (orig_op0))
9256 /* Get a reference to just this component. */
9257 if (modifier == EXPAND_CONST_ADDRESS
9258 || modifier == EXPAND_SUM
9259 || modifier == EXPAND_INITIALIZER)
9260 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9262 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9264 if (op0 == orig_op0)
9265 op0 = copy_rtx (op0);
9267 set_mem_attributes (op0, treeop0, 0);
9268 if (REG_P (XEXP (op0, 0)))
9269 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9271 MEM_VOLATILE_P (op0) |= volatilep;
9277 op0 = expand_expr (treeop0,
9278 NULL_RTX, VOIDmode, modifier);
9280 /* If the input and output modes are both the same, we are done. */
9281 if (mode == GET_MODE (op0))
9283 /* If neither mode is BLKmode, and both modes are the same size
9284 then we can use gen_lowpart. */
9285 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9286 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9287 && !COMPLEX_MODE_P (GET_MODE (op0)))
9289 if (GET_CODE (op0) == SUBREG)
9290 op0 = force_reg (GET_MODE (op0), op0);
9291 op0 = gen_lowpart (mode, op0);
9293 /* If both types are integral, convert from one mode to the other. */
9294 else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
9295 op0 = convert_modes (mode, GET_MODE (op0), op0,
9296 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9297 /* As a last resort, spill op0 to memory, and reload it in a
9299 else if (!MEM_P (op0))
9301 /* If the operand is not a MEM, force it into memory. Since we
9302 are going to be changing the mode of the MEM, don't call
9303 force_const_mem for constants because we don't allow pool
9304 constants to change mode. */
9305 tree inner_type = TREE_TYPE (treeop0);
9307 gcc_assert (!TREE_ADDRESSABLE (exp));
9309 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9311 = assign_stack_temp_for_type
9312 (TYPE_MODE (inner_type),
9313 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9315 emit_move_insn (target, op0);
9319 /* At this point, OP0 is in the correct mode. If the output type is
9320 such that the operand is known to be aligned, indicate that it is.
9321 Otherwise, we need only be concerned about alignment for non-BLKmode
9325 op0 = copy_rtx (op0);
9327 if (TYPE_ALIGN_OK (type))
9328 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9329 else if (STRICT_ALIGNMENT
9331 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9333 tree inner_type = TREE_TYPE (treeop0);
9334 HOST_WIDE_INT temp_size
9335 = MAX (int_size_in_bytes (inner_type),
9336 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9338 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9339 rtx new_with_op0_mode
9340 = adjust_address (new_rtx, GET_MODE (op0), 0);
9342 gcc_assert (!TREE_ADDRESSABLE (exp));
9344 if (GET_MODE (op0) == BLKmode)
9345 emit_block_move (new_with_op0_mode, op0,
9346 GEN_INT (GET_MODE_SIZE (mode)),
9347 (modifier == EXPAND_STACK_PARM
9348 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9350 emit_move_insn (new_with_op0_mode, op0);
9355 op0 = adjust_address (op0, mode, 0);
9360 /* Use a compare and a jump for BLKmode comparisons, or for function
9361 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9363 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9364 are occassionally created by folding during expansion. */
9365 case TRUTH_ANDIF_EXPR:
9366 case TRUTH_ORIF_EXPR:
9369 || modifier == EXPAND_STACK_PARM
9370 || ! safe_from_p (target, treeop0, 1)
9371 || ! safe_from_p (target, treeop1, 1)
9372 /* Make sure we don't have a hard reg (such as function's return
9373 value) live across basic blocks, if not optimizing. */
9374 || (!optimize && REG_P (target)
9375 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9376 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9379 emit_move_insn (target, const0_rtx);
9381 op1 = gen_label_rtx ();
9382 jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9385 emit_move_insn (target, const1_rtx);
9388 return ignore ? const0_rtx : target;
9390 case STATEMENT_LIST:
9392 tree_stmt_iterator iter;
9394 gcc_assert (ignore);
9396 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9397 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9402 /* A COND_EXPR with its type being VOID_TYPE represents a
9403 conditional jump and is handled in
9404 expand_gimple_cond_expr. */
9405 gcc_assert (!VOID_TYPE_P (type));
9407 /* Note that COND_EXPRs whose type is a structure or union
9408 are required to be constructed to contain assignments of
9409 a temporary variable, so that we can evaluate them here
9410 for side effect only. If type is void, we must do likewise. */
9412 gcc_assert (!TREE_ADDRESSABLE (type)
9414 && TREE_TYPE (treeop1) != void_type_node
9415 && TREE_TYPE (treeop2) != void_type_node);
9417 /* If we are not to produce a result, we have no target. Otherwise,
9418 if a target was specified use it; it will not be used as an
9419 intermediate target unless it is safe. If no target, use a
9422 if (modifier != EXPAND_STACK_PARM
9424 && safe_from_p (original_target, treeop0, 1)
9425 && GET_MODE (original_target) == mode
9426 #ifdef HAVE_conditional_move
9427 && (! can_conditionally_move_p (mode)
9428 || REG_P (original_target))
9430 && !MEM_P (original_target))
9431 temp = original_target;
9433 temp = assign_temp (type, 0, 0, 1);
9435 do_pending_stack_adjust ();
9437 op0 = gen_label_rtx ();
9438 op1 = gen_label_rtx ();
9439 jumpifnot (treeop0, op0, -1);
9440 store_expr (treeop1, temp,
9441 modifier == EXPAND_STACK_PARM,
9444 emit_jump_insn (gen_jump (op1));
9447 store_expr (treeop2, temp,
9448 modifier == EXPAND_STACK_PARM,
9456 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9463 gcc_assert (ignore);
9465 /* Check for |= or &= of a bitfield of size one into another bitfield
9466 of size 1. In this case, (unless we need the result of the
9467 assignment) we can do this more efficiently with a
9468 test followed by an assignment, if necessary.
9470 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9471 things change so we do, this code should be enhanced to
9473 if (TREE_CODE (lhs) == COMPONENT_REF
9474 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9475 || TREE_CODE (rhs) == BIT_AND_EXPR)
9476 && TREE_OPERAND (rhs, 0) == lhs
9477 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9478 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9479 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9481 rtx label = gen_label_rtx ();
9482 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9483 do_jump (TREE_OPERAND (rhs, 1),
9485 value ? 0 : label, -1);
9486 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9487 MOVE_NONTEMPORAL (exp));
9488 do_pending_stack_adjust ();
9493 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9498 return expand_expr_addr_expr (exp, target, tmode, modifier);
9501 op0 = expand_normal (treeop0);
9502 return read_complex_part (op0, false);
9505 op0 = expand_normal (treeop0);
9506 return read_complex_part (op0, true);
9513 /* Expanded in cfgexpand.c. */
9516 case TRY_CATCH_EXPR:
9518 case EH_FILTER_EXPR:
9519 case TRY_FINALLY_EXPR:
9520 /* Lowered by tree-eh.c. */
9523 case WITH_CLEANUP_EXPR:
9524 case CLEANUP_POINT_EXPR:
9526 case CASE_LABEL_EXPR:
9532 case PREINCREMENT_EXPR:
9533 case PREDECREMENT_EXPR:
9534 case POSTINCREMENT_EXPR:
9535 case POSTDECREMENT_EXPR:
9538 /* Lowered by gimplify.c. */
9542 /* Function descriptors are not valid except for as
9543 initialization constants, and should not be expanded. */
9546 case WITH_SIZE_EXPR:
9547 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9548 have pulled out the size to use in whatever context it needed. */
9549 return expand_expr_real (treeop0, original_target, tmode,
9552 case REALIGN_LOAD_EXPR:
9554 tree oprnd0 = treeop0;
9555 tree oprnd1 = treeop1;
9556 tree oprnd2 = treeop2;
9559 this_optab = optab_for_tree_code (code, type, optab_default);
9560 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9561 op2 = expand_normal (oprnd2);
9562 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9570 tree oprnd0 = treeop0;
9571 tree oprnd1 = treeop1;
9572 tree oprnd2 = treeop2;
9575 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9576 op2 = expand_normal (oprnd2);
9577 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9582 case COMPOUND_LITERAL_EXPR:
9584 /* Initialize the anonymous variable declared in the compound
9585 literal, then return the variable. */
9586 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9588 /* Create RTL for this variable. */
9589 if (!DECL_RTL_SET_P (decl))
9591 if (DECL_HARD_REGISTER (decl))
9592 /* The user specified an assembler name for this variable.
9594 rest_of_decl_compilation (decl, 0, 0);
9599 return expand_expr_real (decl, original_target, tmode,
9604 return expand_expr_real_2 (&ops, target, tmode, modifier);
9608 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9609 signedness of TYPE), possibly returning the result in TARGET. */
9611 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9613 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9614 if (target && GET_MODE (target) != GET_MODE (exp))
9616 /* For constant values, reduce using build_int_cst_type. */
9617 if (CONST_INT_P (exp))
9619 HOST_WIDE_INT value = INTVAL (exp);
9620 tree t = build_int_cst_type (type, value);
9621 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9623 else if (TYPE_UNSIGNED (type))
9625 rtx mask = immed_double_int_const (double_int_mask (prec),
9627 return expand_and (GET_MODE (exp), exp, mask, target);
9631 tree count = build_int_cst (NULL_TREE,
9632 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9633 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9634 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9638 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9639 when applied to the address of EXP produces an address known to be
9640 aligned more than BIGGEST_ALIGNMENT. */
9643 is_aligning_offset (const_tree offset, const_tree exp)
9645 /* Strip off any conversions. */
9646 while (CONVERT_EXPR_P (offset))
9647 offset = TREE_OPERAND (offset, 0);
9649 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9650 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9651 if (TREE_CODE (offset) != BIT_AND_EXPR
9652 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9653 || compare_tree_int (TREE_OPERAND (offset, 1),
9654 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9655 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9658 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9659 It must be NEGATE_EXPR. Then strip any more conversions. */
9660 offset = TREE_OPERAND (offset, 0);
9661 while (CONVERT_EXPR_P (offset))
9662 offset = TREE_OPERAND (offset, 0);
9664 if (TREE_CODE (offset) != NEGATE_EXPR)
9667 offset = TREE_OPERAND (offset, 0);
9668 while (CONVERT_EXPR_P (offset))
9669 offset = TREE_OPERAND (offset, 0);
9671 /* This must now be the address of EXP. */
9672 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9675 /* Return the tree node if an ARG corresponds to a string constant or zero
9676 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9677 in bytes within the string that ARG is accessing. The type of the
9678 offset will be `sizetype'. */
9681 string_constant (tree arg, tree *ptr_offset)
9683 tree array, offset, lower_bound;
9686 if (TREE_CODE (arg) == ADDR_EXPR)
9688 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9690 *ptr_offset = size_zero_node;
9691 return TREE_OPERAND (arg, 0);
9693 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9695 array = TREE_OPERAND (arg, 0);
9696 offset = size_zero_node;
9698 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9700 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9701 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9702 if (TREE_CODE (array) != STRING_CST
9703 && TREE_CODE (array) != VAR_DECL)
9706 /* Check if the array has a nonzero lower bound. */
9707 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9708 if (!integer_zerop (lower_bound))
9710 /* If the offset and base aren't both constants, return 0. */
9711 if (TREE_CODE (lower_bound) != INTEGER_CST)
9713 if (TREE_CODE (offset) != INTEGER_CST)
9715 /* Adjust offset by the lower bound. */
9716 offset = size_diffop (fold_convert (sizetype, offset),
9717 fold_convert (sizetype, lower_bound));
9723 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9725 tree arg0 = TREE_OPERAND (arg, 0);
9726 tree arg1 = TREE_OPERAND (arg, 1);
9731 if (TREE_CODE (arg0) == ADDR_EXPR
9732 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9733 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9735 array = TREE_OPERAND (arg0, 0);
9738 else if (TREE_CODE (arg1) == ADDR_EXPR
9739 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9740 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9742 array = TREE_OPERAND (arg1, 0);
9751 if (TREE_CODE (array) == STRING_CST)
9753 *ptr_offset = fold_convert (sizetype, offset);
9756 else if (TREE_CODE (array) == VAR_DECL)
9760 /* Variables initialized to string literals can be handled too. */
9761 if (DECL_INITIAL (array) == NULL_TREE
9762 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9765 /* If they are read-only, non-volatile and bind locally. */
9766 if (! TREE_READONLY (array)
9767 || TREE_SIDE_EFFECTS (array)
9768 || ! targetm.binds_local_p (array))
9771 /* Avoid const char foo[4] = "abcde"; */
9772 if (DECL_SIZE_UNIT (array) == NULL_TREE
9773 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9774 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9775 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9778 /* If variable is bigger than the string literal, OFFSET must be constant
9779 and inside of the bounds of the string literal. */
9780 offset = fold_convert (sizetype, offset);
9781 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9782 && (! host_integerp (offset, 1)
9783 || compare_tree_int (offset, length) >= 0))
9786 *ptr_offset = offset;
9787 return DECL_INITIAL (array);
9793 /* Generate code to calculate OPS, and exploded expression
9794 using a store-flag instruction and return an rtx for the result.
9795 OPS reflects a comparison.
9797 If TARGET is nonzero, store the result there if convenient.
9799 Return zero if there is no suitable set-flag instruction
9800 available on this machine.
9802 Once expand_expr has been called on the arguments of the comparison,
9803 we are committed to doing the store flag, since it is not safe to
9804 re-evaluate the expression. We emit the store-flag insn by calling
9805 emit_store_flag, but only expand the arguments if we have a reason
9806 to believe that emit_store_flag will be successful. If we think that
9807 it will, but it isn't, we have to simulate the store-flag with a
9808 set/jump/set sequence. */
9811 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9814 tree arg0, arg1, type;
9816 enum machine_mode operand_mode;
9819 rtx subtarget = target;
9820 location_t loc = ops->location;
9825 /* Don't crash if the comparison was erroneous. */
9826 if (arg0 == error_mark_node || arg1 == error_mark_node)
9829 type = TREE_TYPE (arg0);
9830 operand_mode = TYPE_MODE (type);
9831 unsignedp = TYPE_UNSIGNED (type);
9833 /* We won't bother with BLKmode store-flag operations because it would mean
9834 passing a lot of information to emit_store_flag. */
9835 if (operand_mode == BLKmode)
9838 /* We won't bother with store-flag operations involving function pointers
9839 when function pointers must be canonicalized before comparisons. */
9840 #ifdef HAVE_canonicalize_funcptr_for_compare
9841 if (HAVE_canonicalize_funcptr_for_compare
9842 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9843 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9845 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9846 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9847 == FUNCTION_TYPE))))
9854 /* Get the rtx comparison code to use. We know that EXP is a comparison
9855 operation of some type. Some comparisons against 1 and -1 can be
9856 converted to comparisons with zero. Do so here so that the tests
9857 below will be aware that we have a comparison with zero. These
9858 tests will not catch constants in the first operand, but constants
9859 are rarely passed as the first operand. */
9870 if (integer_onep (arg1))
9871 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9873 code = unsignedp ? LTU : LT;
9876 if (! unsignedp && integer_all_onesp (arg1))
9877 arg1 = integer_zero_node, code = LT;
9879 code = unsignedp ? LEU : LE;
9882 if (! unsignedp && integer_all_onesp (arg1))
9883 arg1 = integer_zero_node, code = GE;
9885 code = unsignedp ? GTU : GT;
9888 if (integer_onep (arg1))
9889 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9891 code = unsignedp ? GEU : GE;
9894 case UNORDERED_EXPR:
9923 /* Put a constant second. */
9924 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9925 || TREE_CODE (arg0) == FIXED_CST)
9927 tem = arg0; arg0 = arg1; arg1 = tem;
9928 code = swap_condition (code);
9931 /* If this is an equality or inequality test of a single bit, we can
9932 do this by shifting the bit being tested to the low-order bit and
9933 masking the result with the constant 1. If the condition was EQ,
9934 we xor it with 1. This does not require an scc insn and is faster
9935 than an scc insn even if we have it.
9937 The code to make this transformation was moved into fold_single_bit_test,
9938 so we just call into the folder and expand its result. */
9940 if ((code == NE || code == EQ)
9941 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9944 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9945 return expand_expr (fold_single_bit_test (loc,
9946 code == NE ? NE_EXPR : EQ_EXPR,
9948 target, VOIDmode, EXPAND_NORMAL);
9951 if (! get_subtarget (target)
9952 || GET_MODE (subtarget) != operand_mode)
9955 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
9958 target = gen_reg_rtx (mode);
9960 /* Try a cstore if possible. */
9961 return emit_store_flag_force (target, code, op0, op1,
9962 operand_mode, unsignedp, 1);
9966 /* Stubs in case we haven't got a casesi insn. */
9968 # define HAVE_casesi 0
9969 # define gen_casesi(a, b, c, d, e) (0)
9970 # define CODE_FOR_casesi CODE_FOR_nothing
9973 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9974 0 otherwise (i.e. if there is no casesi instruction). */
9976 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9977 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9978 rtx fallback_label ATTRIBUTE_UNUSED)
9980 enum machine_mode index_mode = SImode;
9981 int index_bits = GET_MODE_BITSIZE (index_mode);
9982 rtx op1, op2, index;
9983 enum machine_mode op_mode;
9988 /* Convert the index to SImode. */
9989 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9991 enum machine_mode omode = TYPE_MODE (index_type);
9992 rtx rangertx = expand_normal (range);
9994 /* We must handle the endpoints in the original mode. */
9995 index_expr = build2 (MINUS_EXPR, index_type,
9996 index_expr, minval);
9997 minval = integer_zero_node;
9998 index = expand_normal (index_expr);
10000 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10001 omode, 1, default_label);
10002 /* Now we can safely truncate. */
10003 index = convert_to_mode (index_mode, index, 0);
10007 if (TYPE_MODE (index_type) != index_mode)
10009 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10010 index_expr = fold_convert (index_type, index_expr);
10013 index = expand_normal (index_expr);
10016 do_pending_stack_adjust ();
10018 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10019 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10021 index = copy_to_mode_reg (op_mode, index);
10023 op1 = expand_normal (minval);
10025 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10026 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10027 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10028 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10030 op1 = copy_to_mode_reg (op_mode, op1);
10032 op2 = expand_normal (range);
10034 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10035 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10036 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10037 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10039 op2 = copy_to_mode_reg (op_mode, op2);
10041 emit_jump_insn (gen_casesi (index, op1, op2,
10042 table_label, !default_label
10043 ? fallback_label : default_label));
10047 /* Attempt to generate a tablejump instruction; same concept. */
10048 #ifndef HAVE_tablejump
10049 #define HAVE_tablejump 0
10050 #define gen_tablejump(x, y) (0)
10053 /* Subroutine of the next function.
10055 INDEX is the value being switched on, with the lowest value
10056 in the table already subtracted.
10057 MODE is its expected mode (needed if INDEX is constant).
10058 RANGE is the length of the jump table.
10059 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10061 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10062 index value is out of range. */
10065 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10070 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10071 cfun->cfg->max_jumptable_ents = INTVAL (range);
10073 /* Do an unsigned comparison (in the proper mode) between the index
10074 expression and the value which represents the length of the range.
10075 Since we just finished subtracting the lower bound of the range
10076 from the index expression, this comparison allows us to simultaneously
10077 check that the original index expression value is both greater than
10078 or equal to the minimum value of the range and less than or equal to
10079 the maximum value of the range. */
10082 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10085 /* If index is in range, it must fit in Pmode.
10086 Convert to Pmode so we can index with it. */
10088 index = convert_to_mode (Pmode, index, 1);
10090 /* Don't let a MEM slip through, because then INDEX that comes
10091 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10092 and break_out_memory_refs will go to work on it and mess it up. */
10093 #ifdef PIC_CASE_VECTOR_ADDRESS
10094 if (flag_pic && !REG_P (index))
10095 index = copy_to_mode_reg (Pmode, index);
10098 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10099 GET_MODE_SIZE, because this indicates how large insns are. The other
10100 uses should all be Pmode, because they are addresses. This code
10101 could fail if addresses and insns are not the same size. */
10102 index = gen_rtx_PLUS (Pmode,
10103 gen_rtx_MULT (Pmode, index,
10104 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10105 gen_rtx_LABEL_REF (Pmode, table_label));
10106 #ifdef PIC_CASE_VECTOR_ADDRESS
10108 index = PIC_CASE_VECTOR_ADDRESS (index);
10111 index = memory_address (CASE_VECTOR_MODE, index);
10112 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10113 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10114 convert_move (temp, vector, 0);
10116 emit_jump_insn (gen_tablejump (temp, table_label));
10118 /* If we are generating PIC code or if the table is PC-relative, the
10119 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10120 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10125 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10126 rtx table_label, rtx default_label)
10130 if (! HAVE_tablejump)
10133 index_expr = fold_build2 (MINUS_EXPR, index_type,
10134 fold_convert (index_type, index_expr),
10135 fold_convert (index_type, minval));
10136 index = expand_normal (index_expr);
10137 do_pending_stack_adjust ();
10139 do_tablejump (index, TYPE_MODE (index_type),
10140 convert_modes (TYPE_MODE (index_type),
10141 TYPE_MODE (TREE_TYPE (range)),
10142 expand_normal (range),
10143 TYPE_UNSIGNED (TREE_TYPE (range))),
10144 table_label, default_label);
10148 /* Nonzero if the mode is a valid vector mode for this architecture.
10149 This returns nonzero even if there is no hardware support for the
10150 vector mode, but we can emulate with narrower modes. */
10153 vector_mode_valid_p (enum machine_mode mode)
10155 enum mode_class mclass = GET_MODE_CLASS (mode);
10156 enum machine_mode innermode;
10158 /* Doh! What's going on? */
10159 if (mclass != MODE_VECTOR_INT
10160 && mclass != MODE_VECTOR_FLOAT
10161 && mclass != MODE_VECTOR_FRACT
10162 && mclass != MODE_VECTOR_UFRACT
10163 && mclass != MODE_VECTOR_ACCUM
10164 && mclass != MODE_VECTOR_UACCUM)
10167 /* Hardware support. Woo hoo! */
10168 if (targetm.vector_mode_supported_p (mode))
10171 innermode = GET_MODE_INNER (mode);
10173 /* We should probably return 1 if requesting V4DI and we have no DI,
10174 but we have V2DI, but this is probably very unlikely. */
10176 /* If we have support for the inner mode, we can safely emulate it.
10177 We may not have V2DI, but me can emulate with a pair of DIs. */
10178 return targetm.scalar_mode_supported_p (innermode);
10181 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10183 const_vector_from_tree (tree exp)
10188 enum machine_mode inner, mode;
10190 mode = TYPE_MODE (TREE_TYPE (exp));
10192 if (initializer_zerop (exp))
10193 return CONST0_RTX (mode);
10195 units = GET_MODE_NUNITS (mode);
10196 inner = GET_MODE_INNER (mode);
10198 v = rtvec_alloc (units);
10200 link = TREE_VECTOR_CST_ELTS (exp);
10201 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10203 elt = TREE_VALUE (link);
10205 if (TREE_CODE (elt) == REAL_CST)
10206 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10208 else if (TREE_CODE (elt) == FIXED_CST)
10209 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10212 RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
10216 /* Initialize remaining elements to 0. */
10217 for (; i < units; ++i)
10218 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10220 return gen_rtx_CONST_VECTOR (mode, v);
10224 /* Build a decl for a EH personality function named NAME. */
10227 build_personality_function (const char *name)
10231 type = build_function_type_list (integer_type_node, integer_type_node,
10232 long_long_unsigned_type_node,
10233 ptr_type_node, ptr_type_node, NULL_TREE);
10234 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10235 get_identifier (name), type);
10236 DECL_ARTIFICIAL (decl) = 1;
10237 DECL_EXTERNAL (decl) = 1;
10238 TREE_PUBLIC (decl) = 1;
10240 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10241 are the flags assigned by targetm.encode_section_info. */
10242 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10247 /* Extracts the personality function of DECL and returns the corresponding
10251 get_personality_function (tree decl)
10253 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10254 enum eh_personality_kind pk;
10256 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10257 if (pk == eh_personality_none)
10261 && pk == eh_personality_any)
10262 personality = lang_hooks.eh_personality ();
10264 if (pk == eh_personality_lang)
10265 gcc_assert (personality != NULL_TREE);
10267 return XEXP (DECL_RTL (personality), 0);
10270 #include "gt-expr.h"