1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
59 /* Decide whether a function's arguments should be processed
60 from first to last or from last to first.
62 They should if the stack and args grow in opposite directions, but
63 only if we have push insns. */
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED /* If it's last to first. */
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
79 #define STACK_PUSH_CODE PRE_INC
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* This structure is used by move_by_pieces to describe the move to
94 struct move_by_pieces_d
103 int explicit_inc_from;
104 unsigned HOST_WIDE_INT len;
105 HOST_WIDE_INT offset;
109 /* This structure is used by store_by_pieces to describe the clear to
112 struct store_by_pieces_d
118 unsigned HOST_WIDE_INT len;
119 HOST_WIDE_INT offset;
120 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, tree, alias_set_type, bool);
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero. */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195 < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
198 /* This macro is used to determine whether store_by_pieces should be
199 called to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203 < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
206 /* This array records the insn_code of insns to perform block moves. */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
209 /* This array records the insn_code of insns to perform block sets. */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
212 /* These arrays record the insn_code of three different kinds of insns
213 to perform block compares. */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
218 /* Synchronization primitives. */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 SET_REGNO (reg, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
456 insns = get_insns ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
519 expand_fixed_convert (to, from, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
589 fill_value = const0_rtx;
591 fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 LT, lowfrom, const0_rtx,
595 /* Fill the remaining words. */
596 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
598 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 rtx subword = operand_subword (to, index, 1, to_mode);
601 gcc_assert (subword);
603 if (fill_value != subword)
604 emit_move_insn (subword, fill_value);
607 insns = get_insns ();
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
629 /* Now follow all the conversions between integers
630 no more than a word long. */
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
658 emit_unop_insn (code, to, from, equiv_code);
663 enum machine_mode intermediate;
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
694 emit_move_insn (to, tmp);
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
721 /* Mode combination is not recognized. */
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
735 return convert_modes (mode, VOIDmode, x, unsignedp);
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && CONST_INT_P (x) && INTVAL (x) < 0)
777 HOST_WIDE_INT val = INTVAL (x);
779 if (oldmode != VOIDmode
780 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
782 int width = GET_MODE_BITSIZE (oldmode);
784 /* We need to zero extend VAL. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
788 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798 || (GET_MODE_CLASS (mode) == MODE_INT
799 && GET_MODE_CLASS (oldmode) == MODE_INT
800 && (GET_CODE (x) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 && direct_load[(int) mode])
805 && (! HARD_REGISTER_P (x)
806 || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 GET_MODE_BITSIZE (GET_MODE (x)))))))))
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (CONST_INT_P (x) && oldmode != VOIDmode
814 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
816 HOST_WIDE_INT val = INTVAL (x);
817 int width = GET_MODE_BITSIZE (oldmode);
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val &= ((HOST_WIDE_INT) 1 << width) - 1;
823 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 val |= (HOST_WIDE_INT) (-1) << width;
826 return gen_int_mode (val, mode);
829 return gen_lowpart (mode, x);
832 /* Converting from integer constant into mode is always equivalent to an
834 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
836 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837 return simplify_gen_subreg (mode, x, oldmode, 0);
840 temp = gen_reg_rtx (mode);
841 convert_move (temp, x, unsignedp);
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 unsigned int align ATTRIBUTE_UNUSED)
860 return MOVE_BY_PIECES_P (len, align);
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
869 ALIGN is maximum stack alignment we can assume.
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 unsigned int align, int endp)
879 struct move_by_pieces_d data;
880 rtx to_addr, from_addr = XEXP (from, 0);
881 unsigned int max_size = MOVE_MAX_PIECES + 1;
882 enum machine_mode mode = VOIDmode, tmode;
883 enum insn_code icode;
885 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 data.from_addr = from_addr;
891 to_addr = XEXP (to, 0);
894 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
895 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
897 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
904 #ifdef STACK_GROWS_DOWNWARD
910 data.to_addr = to_addr;
913 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
914 || GET_CODE (from_addr) == POST_INC
915 || GET_CODE (from_addr) == POST_DEC);
917 data.explicit_inc_from = 0;
918 data.explicit_inc_to = 0;
919 if (data.reverse) data.offset = len;
922 /* If copying requires more than two move insns,
923 copy addresses to registers (to make displacements shorter)
924 and use post-increment if available. */
925 if (!(data.autinc_from && data.autinc_to)
926 && move_by_pieces_ninsns (len, align, max_size) > 2)
928 /* Find the mode of the largest move... */
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
930 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
931 if (GET_MODE_SIZE (tmode) < max_size)
934 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
936 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
937 data.autinc_from = 1;
938 data.explicit_inc_from = -1;
940 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
942 data.from_addr = copy_addr_to_reg (from_addr);
943 data.autinc_from = 1;
944 data.explicit_inc_from = 1;
946 if (!data.autinc_from && CONSTANT_P (from_addr))
947 data.from_addr = copy_addr_to_reg (from_addr);
948 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
950 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
952 data.explicit_inc_to = -1;
954 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
956 data.to_addr = copy_addr_to_reg (to_addr);
958 data.explicit_inc_to = 1;
960 if (!data.autinc_to && CONSTANT_P (to_addr))
961 data.to_addr = copy_addr_to_reg (to_addr);
964 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
965 if (align >= GET_MODE_ALIGNMENT (tmode))
966 align = GET_MODE_ALIGNMENT (tmode);
969 enum machine_mode xmode;
971 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
973 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
974 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode, align))
978 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
988 if (GET_MODE_SIZE (tmode) < max_size)
991 if (mode == VOIDmode)
994 icode = optab_handler (mov_optab, mode)->insn_code;
995 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
996 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
998 max_size = GET_MODE_SIZE (mode);
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data.len);
1008 gcc_assert (!data.reverse);
1013 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1014 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1016 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1026 to1 = adjust_address (data.to, QImode, data.offset);
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1049 enum machine_mode tmode, xmode;
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1061 while (max_size > 1)
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1071 if (mode == VOIDmode)
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1078 max_size = GET_MODE_SIZE (mode);
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces_d *data)
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1096 while (data->len >= size)
1099 data->offset -= size;
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1107 to1 = adjust_address (data->to, mode, data->offset);
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1114 from1 = adjust_address (data->from, mode, data->offset);
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1124 emit_insn ((*genfun) (to1, from1));
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1139 if (! data->reverse)
1140 data->offset += size;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1191 gcc_assert (MEM_P (x));
1192 gcc_assert (MEM_P (y));
1195 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1196 block copy is more efficient for other large modes, e.g. DCmode. */
1197 x = adjust_address (x, BLKmode, 0);
1198 y = adjust_address (y, BLKmode, 0);
1200 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1201 can be incorrect is coming from __builtin_memcpy. */
1202 if (CONST_INT_P (size))
1204 if (INTVAL (size) == 0)
1207 x = shallow_copy_rtx (x);
1208 y = shallow_copy_rtx (y);
1209 set_mem_size (x, size);
1210 set_mem_size (y, size);
1213 if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1214 move_by_pieces (x, y, INTVAL (size), align, 0);
1215 else if (emit_block_move_via_movmem (x, y, size, align,
1216 expected_align, expected_size))
1218 else if (may_use_call
1219 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1220 && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1221 retval = emit_block_move_via_libcall (x, y, size,
1222 method == BLOCK_OP_TAILCALL);
1224 emit_block_move_via_loop (x, y, size, align);
1226 if (method == BLOCK_OP_CALL_PARM)
1233 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1235 return emit_block_move_hints (x, y, size, method, 0, -1);
1238 /* A subroutine of emit_block_move. Returns true if calling the
1239 block move libcall will not clobber any parameters which may have
1240 already been placed on the stack. */
1243 block_move_libcall_safe_for_call_parm (void)
1245 #if defined (REG_PARM_STACK_SPACE)
1249 /* If arguments are pushed on the stack, then they're safe. */
1253 /* If registers go on the stack anyway, any argument is sure to clobber
1254 an outgoing argument. */
1255 #if defined (REG_PARM_STACK_SPACE)
1256 fn = emit_block_move_libcall_fn (false);
1257 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1258 && REG_PARM_STACK_SPACE (fn) != 0)
1262 /* If any argument goes in memory, then it might clobber an outgoing
1265 CUMULATIVE_ARGS args_so_far;
1268 fn = emit_block_move_libcall_fn (false);
1269 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1271 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1272 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1274 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1275 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1276 if (!tmp || !REG_P (tmp))
1278 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1280 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1286 /* A subroutine of emit_block_move. Expand a movmem pattern;
1287 return true if successful. */
1290 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1291 unsigned int expected_align, HOST_WIDE_INT expected_size)
1293 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1294 int save_volatile_ok = volatile_ok;
1295 enum machine_mode mode;
1297 if (expected_align < align)
1298 expected_align = align;
1300 /* Since this is a move insn, we don't care about volatility. */
1303 /* Try the most limited insn first, because there's no point
1304 including more than one in the machine description unless
1305 the more limited one has some advantage. */
1307 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1308 mode = GET_MODE_WIDER_MODE (mode))
1310 enum insn_code code = movmem_optab[(int) mode];
1311 insn_operand_predicate_fn pred;
1313 if (code != CODE_FOR_nothing
1314 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1315 here because if SIZE is less than the mode mask, as it is
1316 returned by the macro, it will definitely be less than the
1317 actual mode mask. */
1318 && ((CONST_INT_P (size)
1319 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1320 <= (GET_MODE_MASK (mode) >> 1)))
1321 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1322 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1323 || (*pred) (x, BLKmode))
1324 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1325 || (*pred) (y, BLKmode))
1326 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1327 || (*pred) (opalign, VOIDmode)))
1330 rtx last = get_last_insn ();
1333 op2 = convert_to_mode (mode, size, 1);
1334 pred = insn_data[(int) code].operand[2].predicate;
1335 if (pred != 0 && ! (*pred) (op2, mode))
1336 op2 = copy_to_mode_reg (mode, op2);
1338 /* ??? When called via emit_block_move_for_call, it'd be
1339 nice if there were some way to inform the backend, so
1340 that it doesn't fail the expansion because it thinks
1341 emitting the libcall would be more efficient. */
1343 if (insn_data[(int) code].n_operands == 4)
1344 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1346 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1347 GEN_INT (expected_align
1349 GEN_INT (expected_size));
1353 volatile_ok = save_volatile_ok;
1357 delete_insns_since (last);
1361 volatile_ok = save_volatile_ok;
1365 /* A subroutine of emit_block_move. Expand a call to memcpy.
1366 Return the return value from memcpy, 0 otherwise. */
1369 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1371 rtx dst_addr, src_addr;
1372 tree call_expr, fn, src_tree, dst_tree, size_tree;
1373 enum machine_mode size_mode;
1376 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1377 pseudos. We can then place those new pseudos into a VAR_DECL and
1380 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1381 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1383 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1384 src_addr = convert_memory_address (ptr_mode, src_addr);
1386 dst_tree = make_tree (ptr_type_node, dst_addr);
1387 src_tree = make_tree (ptr_type_node, src_addr);
1389 size_mode = TYPE_MODE (sizetype);
1391 size = convert_to_mode (size_mode, size, 1);
1392 size = copy_to_mode_reg (size_mode, size);
1394 /* It is incorrect to use the libcall calling conventions to call
1395 memcpy in this context. This could be a user call to memcpy and
1396 the user may wish to examine the return value from memcpy. For
1397 targets where libcalls and normal calls have different conventions
1398 for returning pointers, we could end up generating incorrect code. */
1400 size_tree = make_tree (sizetype, size);
1402 fn = emit_block_move_libcall_fn (true);
1403 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1404 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1406 retval = expand_normal (call_expr);
1411 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1412 for the function we use for block copies. The first time FOR_CALL
1413 is true, we call assemble_external. */
1415 static GTY(()) tree block_move_fn;
1418 init_block_move_fn (const char *asmspec)
1424 fn = get_identifier ("memcpy");
1425 args = build_function_type_list (ptr_type_node, ptr_type_node,
1426 const_ptr_type_node, sizetype,
1429 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1430 DECL_EXTERNAL (fn) = 1;
1431 TREE_PUBLIC (fn) = 1;
1432 DECL_ARTIFICIAL (fn) = 1;
1433 TREE_NOTHROW (fn) = 1;
1434 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1435 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1441 set_user_assembler_name (block_move_fn, asmspec);
1445 emit_block_move_libcall_fn (int for_call)
1447 static bool emitted_extern;
1450 init_block_move_fn (NULL);
1452 if (for_call && !emitted_extern)
1454 emitted_extern = true;
1455 make_decl_rtl (block_move_fn);
1456 assemble_external (block_move_fn);
1459 return block_move_fn;
1462 /* A subroutine of emit_block_move. Copy the data via an explicit
1463 loop. This is used only when libcalls are forbidden. */
1464 /* ??? It'd be nice to copy in hunks larger than QImode. */
1467 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1468 unsigned int align ATTRIBUTE_UNUSED)
1470 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1471 enum machine_mode iter_mode;
1473 iter_mode = GET_MODE (size);
1474 if (iter_mode == VOIDmode)
1475 iter_mode = word_mode;
1477 top_label = gen_label_rtx ();
1478 cmp_label = gen_label_rtx ();
1479 iter = gen_reg_rtx (iter_mode);
1481 emit_move_insn (iter, const0_rtx);
1483 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1484 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1485 do_pending_stack_adjust ();
1487 emit_jump (cmp_label);
1488 emit_label (top_label);
1490 tmp = convert_modes (Pmode, iter_mode, iter, true);
1491 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1492 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1493 x = change_address (x, QImode, x_addr);
1494 y = change_address (y, QImode, y_addr);
1496 emit_move_insn (x, y);
1498 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1499 true, OPTAB_LIB_WIDEN);
1501 emit_move_insn (iter, tmp);
1503 emit_label (cmp_label);
1505 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1509 /* Copy all or part of a value X into registers starting at REGNO.
1510 The number of registers to be filled is NREGS. */
1513 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1516 #ifdef HAVE_load_multiple
1524 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1525 x = validize_mem (force_const_mem (mode, x));
1527 /* See if the machine can do this with a load multiple insn. */
1528 #ifdef HAVE_load_multiple
1529 if (HAVE_load_multiple)
1531 last = get_last_insn ();
1532 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1540 delete_insns_since (last);
1544 for (i = 0; i < nregs; i++)
1545 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1546 operand_subword_force (x, i, mode));
1549 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1550 The number of registers to be filled is NREGS. */
1553 move_block_from_reg (int regno, rtx x, int nregs)
1560 /* See if the machine can do this with a store multiple insn. */
1561 #ifdef HAVE_store_multiple
1562 if (HAVE_store_multiple)
1564 rtx last = get_last_insn ();
1565 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1573 delete_insns_since (last);
1577 for (i = 0; i < nregs; i++)
1579 rtx tem = operand_subword (x, i, 1, BLKmode);
1583 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1587 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1588 ORIG, where ORIG is a non-consecutive group of registers represented by
1589 a PARALLEL. The clone is identical to the original except in that the
1590 original set of registers is replaced by a new set of pseudo registers.
1591 The new set has the same modes as the original set. */
1594 gen_group_rtx (rtx orig)
1599 gcc_assert (GET_CODE (orig) == PARALLEL);
1601 length = XVECLEN (orig, 0);
1602 tmps = XALLOCAVEC (rtx, length);
1604 /* Skip a NULL entry in first slot. */
1605 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1610 for (; i < length; i++)
1612 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1613 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1615 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1618 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1621 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1622 except that values are placed in TMPS[i], and must later be moved
1623 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1626 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1630 enum machine_mode m = GET_MODE (orig_src);
1632 gcc_assert (GET_CODE (dst) == PARALLEL);
1635 && !SCALAR_INT_MODE_P (m)
1636 && !MEM_P (orig_src)
1637 && GET_CODE (orig_src) != CONCAT)
1639 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1640 if (imode == BLKmode)
1641 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1643 src = gen_reg_rtx (imode);
1644 if (imode != BLKmode)
1645 src = gen_lowpart (GET_MODE (orig_src), src);
1646 emit_move_insn (src, orig_src);
1647 /* ...and back again. */
1648 if (imode != BLKmode)
1649 src = gen_lowpart (imode, src);
1650 emit_group_load_1 (tmps, dst, src, type, ssize);
1654 /* Check for a NULL entry, used to indicate that the parameter goes
1655 both on the stack and in registers. */
1656 if (XEXP (XVECEXP (dst, 0, 0), 0))
1661 /* Process the pieces. */
1662 for (i = start; i < XVECLEN (dst, 0); i++)
1664 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1665 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1666 unsigned int bytelen = GET_MODE_SIZE (mode);
1669 /* Handle trailing fragments that run over the size of the struct. */
1670 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1672 /* Arrange to shift the fragment to where it belongs.
1673 extract_bit_field loads to the lsb of the reg. */
1675 #ifdef BLOCK_REG_PADDING
1676 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1677 == (BYTES_BIG_ENDIAN ? upward : downward)
1682 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1683 bytelen = ssize - bytepos;
1684 gcc_assert (bytelen > 0);
1687 /* If we won't be loading directly from memory, protect the real source
1688 from strange tricks we might play; but make sure that the source can
1689 be loaded directly into the destination. */
1691 if (!MEM_P (orig_src)
1692 && (!CONSTANT_P (orig_src)
1693 || (GET_MODE (orig_src) != mode
1694 && GET_MODE (orig_src) != VOIDmode)))
1696 if (GET_MODE (orig_src) == VOIDmode)
1697 src = gen_reg_rtx (mode);
1699 src = gen_reg_rtx (GET_MODE (orig_src));
1701 emit_move_insn (src, orig_src);
1704 /* Optimize the access just a bit. */
1706 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1707 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1708 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1709 && bytelen == GET_MODE_SIZE (mode))
1711 tmps[i] = gen_reg_rtx (mode);
1712 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1714 else if (COMPLEX_MODE_P (mode)
1715 && GET_MODE (src) == mode
1716 && bytelen == GET_MODE_SIZE (mode))
1717 /* Let emit_move_complex do the bulk of the work. */
1719 else if (GET_CODE (src) == CONCAT)
1721 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1722 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1724 if ((bytepos == 0 && bytelen == slen0)
1725 || (bytepos != 0 && bytepos + bytelen <= slen))
1727 /* The following assumes that the concatenated objects all
1728 have the same size. In this case, a simple calculation
1729 can be used to determine the object and the bit field
1731 tmps[i] = XEXP (src, bytepos / slen0);
1732 if (! CONSTANT_P (tmps[i])
1733 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1734 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1735 (bytepos % slen0) * BITS_PER_UNIT,
1736 1, NULL_RTX, mode, mode);
1742 gcc_assert (!bytepos);
1743 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1744 emit_move_insn (mem, src);
1745 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1746 0, 1, NULL_RTX, mode, mode);
1749 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1750 SIMD register, which is currently broken. While we get GCC
1751 to emit proper RTL for these cases, let's dump to memory. */
1752 else if (VECTOR_MODE_P (GET_MODE (dst))
1755 int slen = GET_MODE_SIZE (GET_MODE (src));
1758 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1759 emit_move_insn (mem, src);
1760 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1762 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1763 && XVECLEN (dst, 0) > 1)
1764 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1765 else if (CONSTANT_P (src))
1767 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1775 gcc_assert (2 * len == ssize);
1776 split_double (src, &first, &second);
1783 else if (REG_P (src) && GET_MODE (src) == mode)
1786 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1787 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1791 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1792 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1796 /* Emit code to move a block SRC of type TYPE to a block DST,
1797 where DST is non-consecutive registers represented by a PARALLEL.
1798 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1802 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1807 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1808 emit_group_load_1 (tmps, dst, src, type, ssize);
1810 /* Copy the extracted pieces into the proper (probable) hard regs. */
1811 for (i = 0; i < XVECLEN (dst, 0); i++)
1813 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1816 emit_move_insn (d, tmps[i]);
1820 /* Similar, but load SRC into new pseudos in a format that looks like
1821 PARALLEL. This can later be fed to emit_group_move to get things
1822 in the right place. */
1825 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1830 vec = rtvec_alloc (XVECLEN (parallel, 0));
1831 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1833 /* Convert the vector to look just like the original PARALLEL, except
1834 with the computed values. */
1835 for (i = 0; i < XVECLEN (parallel, 0); i++)
1837 rtx e = XVECEXP (parallel, 0, i);
1838 rtx d = XEXP (e, 0);
1842 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1843 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1845 RTVEC_ELT (vec, i) = e;
1848 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1851 /* Emit code to move a block SRC to block DST, where SRC and DST are
1852 non-consecutive groups of registers, each represented by a PARALLEL. */
1855 emit_group_move (rtx dst, rtx src)
1859 gcc_assert (GET_CODE (src) == PARALLEL
1860 && GET_CODE (dst) == PARALLEL
1861 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1863 /* Skip first entry if NULL. */
1864 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1865 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1866 XEXP (XVECEXP (src, 0, i), 0));
1869 /* Move a group of registers represented by a PARALLEL into pseudos. */
1872 emit_group_move_into_temps (rtx src)
1874 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1877 for (i = 0; i < XVECLEN (src, 0); i++)
1879 rtx e = XVECEXP (src, 0, i);
1880 rtx d = XEXP (e, 0);
1883 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1884 RTVEC_ELT (vec, i) = e;
1887 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1890 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1891 where SRC is non-consecutive registers represented by a PARALLEL.
1892 SSIZE represents the total size of block ORIG_DST, or -1 if not
1896 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1899 int start, finish, i;
1900 enum machine_mode m = GET_MODE (orig_dst);
1902 gcc_assert (GET_CODE (src) == PARALLEL);
1904 if (!SCALAR_INT_MODE_P (m)
1905 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1907 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1908 if (imode == BLKmode)
1909 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1911 dst = gen_reg_rtx (imode);
1912 emit_group_store (dst, src, type, ssize);
1913 if (imode != BLKmode)
1914 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1915 emit_move_insn (orig_dst, dst);
1919 /* Check for a NULL entry, used to indicate that the parameter goes
1920 both on the stack and in registers. */
1921 if (XEXP (XVECEXP (src, 0, 0), 0))
1925 finish = XVECLEN (src, 0);
1927 tmps = XALLOCAVEC (rtx, finish);
1929 /* Copy the (probable) hard regs into pseudos. */
1930 for (i = start; i < finish; i++)
1932 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1933 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1935 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1936 emit_move_insn (tmps[i], reg);
1942 /* If we won't be storing directly into memory, protect the real destination
1943 from strange tricks we might play. */
1945 if (GET_CODE (dst) == PARALLEL)
1949 /* We can get a PARALLEL dst if there is a conditional expression in
1950 a return statement. In that case, the dst and src are the same,
1951 so no action is necessary. */
1952 if (rtx_equal_p (dst, src))
1955 /* It is unclear if we can ever reach here, but we may as well handle
1956 it. Allocate a temporary, and split this into a store/load to/from
1959 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1960 emit_group_store (temp, src, type, ssize);
1961 emit_group_load (dst, temp, type, ssize);
1964 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1966 enum machine_mode outer = GET_MODE (dst);
1967 enum machine_mode inner;
1968 HOST_WIDE_INT bytepos;
1972 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1973 dst = gen_reg_rtx (outer);
1975 /* Make life a bit easier for combine. */
1976 /* If the first element of the vector is the low part
1977 of the destination mode, use a paradoxical subreg to
1978 initialize the destination. */
1981 inner = GET_MODE (tmps[start]);
1982 bytepos = subreg_lowpart_offset (inner, outer);
1983 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1985 temp = simplify_gen_subreg (outer, tmps[start],
1989 emit_move_insn (dst, temp);
1996 /* If the first element wasn't the low part, try the last. */
1998 && start < finish - 1)
2000 inner = GET_MODE (tmps[finish - 1]);
2001 bytepos = subreg_lowpart_offset (inner, outer);
2002 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2004 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2008 emit_move_insn (dst, temp);
2015 /* Otherwise, simply initialize the result to zero. */
2017 emit_move_insn (dst, CONST0_RTX (outer));
2020 /* Process the pieces. */
2021 for (i = start; i < finish; i++)
2023 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2024 enum machine_mode mode = GET_MODE (tmps[i]);
2025 unsigned int bytelen = GET_MODE_SIZE (mode);
2026 unsigned int adj_bytelen = bytelen;
2029 /* Handle trailing fragments that run over the size of the struct. */
2030 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2031 adj_bytelen = ssize - bytepos;
2033 if (GET_CODE (dst) == CONCAT)
2035 if (bytepos + adj_bytelen
2036 <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2037 dest = XEXP (dst, 0);
2038 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2040 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2041 dest = XEXP (dst, 1);
2045 enum machine_mode dest_mode = GET_MODE (dest);
2046 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2048 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2050 if (GET_MODE_ALIGNMENT (dest_mode)
2051 >= GET_MODE_ALIGNMENT (tmp_mode))
2053 dest = assign_stack_temp (dest_mode,
2054 GET_MODE_SIZE (dest_mode),
2056 emit_move_insn (adjust_address (dest,
2064 dest = assign_stack_temp (tmp_mode,
2065 GET_MODE_SIZE (tmp_mode),
2067 emit_move_insn (dest, tmps[i]);
2068 dst = adjust_address (dest, dest_mode, bytepos);
2074 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2076 /* store_bit_field always takes its value from the lsb.
2077 Move the fragment to the lsb if it's not already there. */
2079 #ifdef BLOCK_REG_PADDING
2080 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2081 == (BYTES_BIG_ENDIAN ? upward : downward)
2087 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2088 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2089 build_int_cst (NULL_TREE, shift),
2092 bytelen = adj_bytelen;
2095 /* Optimize the access just a bit. */
2097 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2098 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2099 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2100 && bytelen == GET_MODE_SIZE (mode))
2101 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2103 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2107 /* Copy from the pseudo into the (probable) hard reg. */
2108 if (orig_dst != dst)
2109 emit_move_insn (orig_dst, dst);
2112 /* Generate code to copy a BLKmode object of TYPE out of a
2113 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2114 is null, a stack temporary is created. TGTBLK is returned.
2116 The purpose of this routine is to handle functions that return
2117 BLKmode structures in registers. Some machines (the PA for example)
2118 want to return all small structures in registers regardless of the
2119 structure's alignment. */
2122 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2124 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2125 rtx src = NULL, dst = NULL;
2126 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2127 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2128 enum machine_mode copy_mode;
2132 tgtblk = assign_temp (build_qualified_type (type,
2134 | TYPE_QUAL_CONST)),
2136 preserve_temp_slots (tgtblk);
2139 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2140 into a new pseudo which is a full word. */
2142 if (GET_MODE (srcreg) != BLKmode
2143 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2144 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2146 /* If the structure doesn't take up a whole number of words, see whether
2147 SRCREG is padded on the left or on the right. If it's on the left,
2148 set PADDING_CORRECTION to the number of bits to skip.
2150 In most ABIs, the structure will be returned at the least end of
2151 the register, which translates to right padding on little-endian
2152 targets and left padding on big-endian targets. The opposite
2153 holds if the structure is returned at the most significant
2154 end of the register. */
2155 if (bytes % UNITS_PER_WORD != 0
2156 && (targetm.calls.return_in_msb (type)
2158 : BYTES_BIG_ENDIAN))
2160 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2162 /* Copy the structure BITSIZE bits at a time. If the target lives in
2163 memory, take care of not reading/writing past its end by selecting
2164 a copy mode suited to BITSIZE. This should always be possible given
2167 We could probably emit more efficient code for machines which do not use
2168 strict alignment, but it doesn't seem worth the effort at the current
2171 copy_mode = word_mode;
2174 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2175 if (mem_mode != BLKmode)
2176 copy_mode = mem_mode;
2179 for (bitpos = 0, xbitpos = padding_correction;
2180 bitpos < bytes * BITS_PER_UNIT;
2181 bitpos += bitsize, xbitpos += bitsize)
2183 /* We need a new source operand each time xbitpos is on a
2184 word boundary and when xbitpos == padding_correction
2185 (the first time through). */
2186 if (xbitpos % BITS_PER_WORD == 0
2187 || xbitpos == padding_correction)
2188 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2191 /* We need a new destination operand each time bitpos is on
2193 if (bitpos % BITS_PER_WORD == 0)
2194 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2196 /* Use xbitpos for the source extraction (right justified) and
2197 bitpos for the destination store (left justified). */
2198 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2199 extract_bit_field (src, bitsize,
2200 xbitpos % BITS_PER_WORD, 1,
2201 NULL_RTX, copy_mode, copy_mode));
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2211 use_reg (rtx *call_fusage, rtx reg)
2213 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2228 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2230 for (i = 0; i < nregs; i++)
2231 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2234 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2235 PARALLEL REGS. This is for calls that pass values in multiple
2236 non-contiguous locations. The Irix 6 ABI has examples of this. */
2239 use_group_regs (rtx *call_fusage, rtx regs)
2243 for (i = 0; i < XVECLEN (regs, 0); i++)
2245 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2247 /* A NULL entry means the parameter goes both on the stack and in
2248 registers. This can also be a MEM for targets that pass values
2249 partially on the stack and partially in registers. */
2250 if (reg != 0 && REG_P (reg))
2251 use_reg (call_fusage, reg);
2255 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2256 assigment and the code of the expresion on the RHS is CODE. Return
2260 get_def_for_expr (tree name, enum tree_code code)
2264 if (TREE_CODE (name) != SSA_NAME)
2267 def_stmt = get_gimple_for_ssa_name (name);
2269 || gimple_assign_rhs_code (def_stmt) != code)
2276 /* Determine whether the LEN bytes generated by CONSTFUN can be
2277 stored to memory using several move instructions. CONSTFUNDATA is
2278 a pointer which will be passed as argument in every CONSTFUN call.
2279 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2280 a memset operation and false if it's a copy of a constant string.
2281 Return nonzero if a call to store_by_pieces should succeed. */
2284 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2285 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2286 void *constfundata, unsigned int align, bool memsetp)
2288 unsigned HOST_WIDE_INT l;
2289 unsigned int max_size;
2290 HOST_WIDE_INT offset = 0;
2291 enum machine_mode mode, tmode;
2292 enum insn_code icode;
2300 ? SET_BY_PIECES_P (len, align)
2301 : STORE_BY_PIECES_P (len, align)))
2304 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2305 if (align >= GET_MODE_ALIGNMENT (tmode))
2306 align = GET_MODE_ALIGNMENT (tmode);
2309 enum machine_mode xmode;
2311 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2313 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2314 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2315 || SLOW_UNALIGNED_ACCESS (tmode, align))
2318 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2321 /* We would first store what we can in the largest integer mode, then go to
2322 successively smaller modes. */
2325 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2330 max_size = STORE_MAX_PIECES + 1;
2331 while (max_size > 1)
2333 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2334 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2335 if (GET_MODE_SIZE (tmode) < max_size)
2338 if (mode == VOIDmode)
2341 icode = optab_handler (mov_optab, mode)->insn_code;
2342 if (icode != CODE_FOR_nothing
2343 && align >= GET_MODE_ALIGNMENT (mode))
2345 unsigned int size = GET_MODE_SIZE (mode);
2352 cst = (*constfun) (constfundata, offset, mode);
2353 if (!LEGITIMATE_CONSTANT_P (cst))
2363 max_size = GET_MODE_SIZE (mode);
2366 /* The code above should have handled everything. */
2373 /* Generate several move instructions to store LEN bytes generated by
2374 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2375 pointer which will be passed as argument in every CONSTFUN call.
2376 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2377 a memset operation and false if it's a copy of a constant string.
2378 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2379 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2383 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2384 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2385 void *constfundata, unsigned int align, bool memsetp, int endp)
2387 struct store_by_pieces_d data;
2391 gcc_assert (endp != 2);
2396 ? SET_BY_PIECES_P (len, align)
2397 : STORE_BY_PIECES_P (len, align));
2398 data.constfun = constfun;
2399 data.constfundata = constfundata;
2402 store_by_pieces_1 (&data, align);
2407 gcc_assert (!data.reverse);
2412 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2413 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2415 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2418 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2425 to1 = adjust_address (data.to, QImode, data.offset);
2433 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2434 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2437 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2439 struct store_by_pieces_d data;
2444 data.constfun = clear_by_pieces_1;
2445 data.constfundata = NULL;
2448 store_by_pieces_1 (&data, align);
2451 /* Callback routine for clear_by_pieces.
2452 Return const0_rtx unconditionally. */
2455 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2456 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2457 enum machine_mode mode ATTRIBUTE_UNUSED)
2462 /* Subroutine of clear_by_pieces and store_by_pieces.
2463 Generate several move instructions to store LEN bytes of block TO. (A MEM
2464 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2467 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2468 unsigned int align ATTRIBUTE_UNUSED)
2470 rtx to_addr = XEXP (data->to, 0);
2471 unsigned int max_size = STORE_MAX_PIECES + 1;
2472 enum machine_mode mode = VOIDmode, tmode;
2473 enum insn_code icode;
2476 data->to_addr = to_addr;
2478 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2479 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2481 data->explicit_inc_to = 0;
2483 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2485 data->offset = data->len;
2487 /* If storing requires more than two move insns,
2488 copy addresses to registers (to make displacements shorter)
2489 and use post-increment if available. */
2490 if (!data->autinc_to
2491 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2493 /* Determine the main mode we'll be using. */
2494 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2495 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2496 if (GET_MODE_SIZE (tmode) < max_size)
2499 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2501 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2502 data->autinc_to = 1;
2503 data->explicit_inc_to = -1;
2506 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2507 && ! data->autinc_to)
2509 data->to_addr = copy_addr_to_reg (to_addr);
2510 data->autinc_to = 1;
2511 data->explicit_inc_to = 1;
2514 if ( !data->autinc_to && CONSTANT_P (to_addr))
2515 data->to_addr = copy_addr_to_reg (to_addr);
2518 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2519 if (align >= GET_MODE_ALIGNMENT (tmode))
2520 align = GET_MODE_ALIGNMENT (tmode);
2523 enum machine_mode xmode;
2525 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2527 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2528 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2529 || SLOW_UNALIGNED_ACCESS (tmode, align))
2532 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2535 /* First store what we can in the largest integer mode, then go to
2536 successively smaller modes. */
2538 while (max_size > 1)
2540 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2541 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2542 if (GET_MODE_SIZE (tmode) < max_size)
2545 if (mode == VOIDmode)
2548 icode = optab_handler (mov_optab, mode)->insn_code;
2549 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2550 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2552 max_size = GET_MODE_SIZE (mode);
2555 /* The code above should have handled everything. */
2556 gcc_assert (!data->len);
2559 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2560 with move instructions for mode MODE. GENFUN is the gen_... function
2561 to make a move insn for that mode. DATA has all the other info. */
2564 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2565 struct store_by_pieces_d *data)
2567 unsigned int size = GET_MODE_SIZE (mode);
2570 while (data->len >= size)
2573 data->offset -= size;
2575 if (data->autinc_to)
2576 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2579 to1 = adjust_address (data->to, mode, data->offset);
2581 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2582 emit_insn (gen_add2_insn (data->to_addr,
2583 GEN_INT (-(HOST_WIDE_INT) size)));
2585 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2586 emit_insn ((*genfun) (to1, cst));
2588 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2589 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2591 if (! data->reverse)
2592 data->offset += size;
2598 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2599 its length in bytes. */
2602 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2603 unsigned int expected_align, HOST_WIDE_INT expected_size)
2605 enum machine_mode mode = GET_MODE (object);
2608 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2610 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2611 just move a zero. Otherwise, do this a piece at a time. */
2613 && CONST_INT_P (size)
2614 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2616 rtx zero = CONST0_RTX (mode);
2619 emit_move_insn (object, zero);
2623 if (COMPLEX_MODE_P (mode))
2625 zero = CONST0_RTX (GET_MODE_INNER (mode));
2628 write_complex_part (object, zero, 0);
2629 write_complex_part (object, zero, 1);
2635 if (size == const0_rtx)
2638 align = MEM_ALIGN (object);
2640 if (CONST_INT_P (size)
2641 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2642 clear_by_pieces (object, INTVAL (size), align);
2643 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2644 expected_align, expected_size))
2646 else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2647 return set_storage_via_libcall (object, size, const0_rtx,
2648 method == BLOCK_OP_TAILCALL);
2656 clear_storage (rtx object, rtx size, enum block_op_methods method)
2658 return clear_storage_hints (object, size, method, 0, -1);
2662 /* A subroutine of clear_storage. Expand a call to memset.
2663 Return the return value of memset, 0 otherwise. */
2666 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2668 tree call_expr, fn, object_tree, size_tree, val_tree;
2669 enum machine_mode size_mode;
2672 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2673 place those into new pseudos into a VAR_DECL and use them later. */
2675 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2677 size_mode = TYPE_MODE (sizetype);
2678 size = convert_to_mode (size_mode, size, 1);
2679 size = copy_to_mode_reg (size_mode, size);
2681 /* It is incorrect to use the libcall calling conventions to call
2682 memset in this context. This could be a user call to memset and
2683 the user may wish to examine the return value from memset. For
2684 targets where libcalls and normal calls have different conventions
2685 for returning pointers, we could end up generating incorrect code. */
2687 object_tree = make_tree (ptr_type_node, object);
2688 if (!CONST_INT_P (val))
2689 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2690 size_tree = make_tree (sizetype, size);
2691 val_tree = make_tree (integer_type_node, val);
2693 fn = clear_storage_libcall_fn (true);
2694 call_expr = build_call_expr (fn, 3,
2695 object_tree, integer_zero_node, size_tree);
2696 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2698 retval = expand_normal (call_expr);
2703 /* A subroutine of set_storage_via_libcall. Create the tree node
2704 for the function we use for block clears. The first time FOR_CALL
2705 is true, we call assemble_external. */
2707 tree block_clear_fn;
2710 init_block_clear_fn (const char *asmspec)
2712 if (!block_clear_fn)
2716 fn = get_identifier ("memset");
2717 args = build_function_type_list (ptr_type_node, ptr_type_node,
2718 integer_type_node, sizetype,
2721 fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2722 DECL_EXTERNAL (fn) = 1;
2723 TREE_PUBLIC (fn) = 1;
2724 DECL_ARTIFICIAL (fn) = 1;
2725 TREE_NOTHROW (fn) = 1;
2726 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2727 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2729 block_clear_fn = fn;
2733 set_user_assembler_name (block_clear_fn, asmspec);
2737 clear_storage_libcall_fn (int for_call)
2739 static bool emitted_extern;
2741 if (!block_clear_fn)
2742 init_block_clear_fn (NULL);
2744 if (for_call && !emitted_extern)
2746 emitted_extern = true;
2747 make_decl_rtl (block_clear_fn);
2748 assemble_external (block_clear_fn);
2751 return block_clear_fn;
2754 /* Expand a setmem pattern; return true if successful. */
2757 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2758 unsigned int expected_align, HOST_WIDE_INT expected_size)
2760 /* Try the most limited insn first, because there's no point
2761 including more than one in the machine description unless
2762 the more limited one has some advantage. */
2764 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2765 enum machine_mode mode;
2767 if (expected_align < align)
2768 expected_align = align;
2770 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2771 mode = GET_MODE_WIDER_MODE (mode))
2773 enum insn_code code = setmem_optab[(int) mode];
2774 insn_operand_predicate_fn pred;
2776 if (code != CODE_FOR_nothing
2777 /* We don't need MODE to be narrower than
2778 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2779 the mode mask, as it is returned by the macro, it will
2780 definitely be less than the actual mode mask. */
2781 && ((CONST_INT_P (size)
2782 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2783 <= (GET_MODE_MASK (mode) >> 1)))
2784 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2785 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2786 || (*pred) (object, BLKmode))
2787 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2788 || (*pred) (opalign, VOIDmode)))
2791 enum machine_mode char_mode;
2792 rtx last = get_last_insn ();
2795 opsize = convert_to_mode (mode, size, 1);
2796 pred = insn_data[(int) code].operand[1].predicate;
2797 if (pred != 0 && ! (*pred) (opsize, mode))
2798 opsize = copy_to_mode_reg (mode, opsize);
2801 char_mode = insn_data[(int) code].operand[2].mode;
2802 if (char_mode != VOIDmode)
2804 opchar = convert_to_mode (char_mode, opchar, 1);
2805 pred = insn_data[(int) code].operand[2].predicate;
2806 if (pred != 0 && ! (*pred) (opchar, char_mode))
2807 opchar = copy_to_mode_reg (char_mode, opchar);
2810 if (insn_data[(int) code].n_operands == 4)
2811 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2813 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2814 GEN_INT (expected_align
2816 GEN_INT (expected_size));
2823 delete_insns_since (last);
2831 /* Write to one of the components of the complex value CPLX. Write VAL to
2832 the real part if IMAG_P is false, and the imaginary part if its true. */
2835 write_complex_part (rtx cplx, rtx val, bool imag_p)
2837 enum machine_mode cmode;
2838 enum machine_mode imode;
2841 if (GET_CODE (cplx) == CONCAT)
2843 emit_move_insn (XEXP (cplx, imag_p), val);
2847 cmode = GET_MODE (cplx);
2848 imode = GET_MODE_INNER (cmode);
2849 ibitsize = GET_MODE_BITSIZE (imode);
2851 /* For MEMs simplify_gen_subreg may generate an invalid new address
2852 because, e.g., the original address is considered mode-dependent
2853 by the target, which restricts simplify_subreg from invoking
2854 adjust_address_nv. Instead of preparing fallback support for an
2855 invalid address, we call adjust_address_nv directly. */
2858 emit_move_insn (adjust_address_nv (cplx, imode,
2859 imag_p ? GET_MODE_SIZE (imode) : 0),
2864 /* If the sub-object is at least word sized, then we know that subregging
2865 will work. This special case is important, since store_bit_field
2866 wants to operate on integer modes, and there's rarely an OImode to
2867 correspond to TCmode. */
2868 if (ibitsize >= BITS_PER_WORD
2869 /* For hard regs we have exact predicates. Assume we can split
2870 the original object if it spans an even number of hard regs.
2871 This special case is important for SCmode on 64-bit platforms
2872 where the natural size of floating-point regs is 32-bit. */
2874 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2875 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2877 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2878 imag_p ? GET_MODE_SIZE (imode) : 0);
2881 emit_move_insn (part, val);
2885 /* simplify_gen_subreg may fail for sub-word MEMs. */
2886 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2889 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2892 /* Extract one of the components of the complex value CPLX. Extract the
2893 real part if IMAG_P is false, and the imaginary part if it's true. */
2896 read_complex_part (rtx cplx, bool imag_p)
2898 enum machine_mode cmode, imode;
2901 if (GET_CODE (cplx) == CONCAT)
2902 return XEXP (cplx, imag_p);
2904 cmode = GET_MODE (cplx);
2905 imode = GET_MODE_INNER (cmode);
2906 ibitsize = GET_MODE_BITSIZE (imode);
2908 /* Special case reads from complex constants that got spilled to memory. */
2909 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2911 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2912 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2914 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2915 if (CONSTANT_CLASS_P (part))
2916 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2920 /* For MEMs simplify_gen_subreg may generate an invalid new address
2921 because, e.g., the original address is considered mode-dependent
2922 by the target, which restricts simplify_subreg from invoking
2923 adjust_address_nv. Instead of preparing fallback support for an
2924 invalid address, we call adjust_address_nv directly. */
2926 return adjust_address_nv (cplx, imode,
2927 imag_p ? GET_MODE_SIZE (imode) : 0);
2929 /* If the sub-object is at least word sized, then we know that subregging
2930 will work. This special case is important, since extract_bit_field
2931 wants to operate on integer modes, and there's rarely an OImode to
2932 correspond to TCmode. */
2933 if (ibitsize >= BITS_PER_WORD
2934 /* For hard regs we have exact predicates. Assume we can split
2935 the original object if it spans an even number of hard regs.
2936 This special case is important for SCmode on 64-bit platforms
2937 where the natural size of floating-point regs is 32-bit. */
2939 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2940 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2942 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2943 imag_p ? GET_MODE_SIZE (imode) : 0);
2947 /* simplify_gen_subreg may fail for sub-word MEMs. */
2948 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2951 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2952 true, NULL_RTX, imode, imode);
2955 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2956 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2957 represented in NEW_MODE. If FORCE is true, this will never happen, as
2958 we'll force-create a SUBREG if needed. */
2961 emit_move_change_mode (enum machine_mode new_mode,
2962 enum machine_mode old_mode, rtx x, bool force)
2966 if (push_operand (x, GET_MODE (x)))
2968 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2969 MEM_COPY_ATTRIBUTES (ret, x);
2973 /* We don't have to worry about changing the address since the
2974 size in bytes is supposed to be the same. */
2975 if (reload_in_progress)
2977 /* Copy the MEM to change the mode and move any
2978 substitutions from the old MEM to the new one. */
2979 ret = adjust_address_nv (x, new_mode, 0);
2980 copy_replacements (x, ret);
2983 ret = adjust_address (x, new_mode, 0);
2987 /* Note that we do want simplify_subreg's behavior of validating
2988 that the new mode is ok for a hard register. If we were to use
2989 simplify_gen_subreg, we would create the subreg, but would
2990 probably run into the target not being able to implement it. */
2991 /* Except, of course, when FORCE is true, when this is exactly what
2992 we want. Which is needed for CCmodes on some targets. */
2994 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2996 ret = simplify_subreg (new_mode, x, old_mode, 0);
3002 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
3003 an integer mode of the same size as MODE. Returns the instruction
3004 emitted, or NULL if such a move could not be generated. */
3007 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3009 enum machine_mode imode;
3010 enum insn_code code;
3012 /* There must exist a mode of the exact size we require. */
3013 imode = int_mode_for_mode (mode);
3014 if (imode == BLKmode)
3017 /* The target must support moves in this mode. */
3018 code = optab_handler (mov_optab, imode)->insn_code;
3019 if (code == CODE_FOR_nothing)
3022 x = emit_move_change_mode (imode, mode, x, force);
3025 y = emit_move_change_mode (imode, mode, y, force);
3028 return emit_insn (GEN_FCN (code) (x, y));
3031 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3032 Return an equivalent MEM that does not use an auto-increment. */
3035 emit_move_resolve_push (enum machine_mode mode, rtx x)
3037 enum rtx_code code = GET_CODE (XEXP (x, 0));
3038 HOST_WIDE_INT adjust;
3041 adjust = GET_MODE_SIZE (mode);
3042 #ifdef PUSH_ROUNDING
3043 adjust = PUSH_ROUNDING (adjust);
3045 if (code == PRE_DEC || code == POST_DEC)
3047 else if (code == PRE_MODIFY || code == POST_MODIFY)
3049 rtx expr = XEXP (XEXP (x, 0), 1);
3052 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3053 gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3054 val = INTVAL (XEXP (expr, 1));
3055 if (GET_CODE (expr) == MINUS)
3057 gcc_assert (adjust == val || adjust == -val);
3061 /* Do not use anti_adjust_stack, since we don't want to update
3062 stack_pointer_delta. */
3063 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3064 GEN_INT (adjust), stack_pointer_rtx,
3065 0, OPTAB_LIB_WIDEN);
3066 if (temp != stack_pointer_rtx)
3067 emit_move_insn (stack_pointer_rtx, temp);
3074 temp = stack_pointer_rtx;
3079 temp = plus_constant (stack_pointer_rtx, -adjust);
3085 return replace_equiv_address (x, temp);
3088 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3089 X is known to satisfy push_operand, and MODE is known to be complex.
3090 Returns the last instruction emitted. */
3093 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3095 enum machine_mode submode = GET_MODE_INNER (mode);
3098 #ifdef PUSH_ROUNDING
3099 unsigned int submodesize = GET_MODE_SIZE (submode);
3101 /* In case we output to the stack, but the size is smaller than the
3102 machine can push exactly, we need to use move instructions. */
3103 if (PUSH_ROUNDING (submodesize) != submodesize)
3105 x = emit_move_resolve_push (mode, x);
3106 return emit_move_insn (x, y);
3110 /* Note that the real part always precedes the imag part in memory
3111 regardless of machine's endianness. */
3112 switch (GET_CODE (XEXP (x, 0)))
3126 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3127 read_complex_part (y, imag_first));
3128 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3129 read_complex_part (y, !imag_first));
3132 /* A subroutine of emit_move_complex. Perform the move from Y to X
3133 via two moves of the parts. Returns the last instruction emitted. */
3136 emit_move_complex_parts (rtx x, rtx y)
3138 /* Show the output dies here. This is necessary for SUBREGs
3139 of pseudos since we cannot track their lifetimes correctly;
3140 hard regs shouldn't appear here except as return values. */
3141 if (!reload_completed && !reload_in_progress
3142 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3145 write_complex_part (x, read_complex_part (y, false), false);
3146 write_complex_part (x, read_complex_part (y, true), true);
3148 return get_last_insn ();
3151 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3152 MODE is known to be complex. Returns the last instruction emitted. */
3155 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3159 /* Need to take special care for pushes, to maintain proper ordering
3160 of the data, and possibly extra padding. */
3161 if (push_operand (x, mode))
3162 return emit_move_complex_push (mode, x, y);
3164 /* See if we can coerce the target into moving both values at once. */
3166 /* Move floating point as parts. */
3167 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3168 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3170 /* Not possible if the values are inherently not adjacent. */
3171 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3173 /* Is possible if both are registers (or subregs of registers). */
3174 else if (register_operand (x, mode) && register_operand (y, mode))
3176 /* If one of the operands is a memory, and alignment constraints
3177 are friendly enough, we may be able to do combined memory operations.
3178 We do not attempt this if Y is a constant because that combination is
3179 usually better with the by-parts thing below. */
3180 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3181 && (!STRICT_ALIGNMENT
3182 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3191 /* For memory to memory moves, optimal behavior can be had with the
3192 existing block move logic. */
3193 if (MEM_P (x) && MEM_P (y))
3195 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3196 BLOCK_OP_NO_LIBCALL);
3197 return get_last_insn ();
3200 ret = emit_move_via_integer (mode, x, y, true);
3205 return emit_move_complex_parts (x, y);
3208 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3209 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3212 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3216 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3219 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3220 if (code != CODE_FOR_nothing)
3222 x = emit_move_change_mode (CCmode, mode, x, true);
3223 y = emit_move_change_mode (CCmode, mode, y, true);
3224 return emit_insn (GEN_FCN (code) (x, y));
3228 /* Otherwise, find the MODE_INT mode of the same width. */
3229 ret = emit_move_via_integer (mode, x, y, false);
3230 gcc_assert (ret != NULL);
3234 /* Return true if word I of OP lies entirely in the
3235 undefined bits of a paradoxical subreg. */
3238 undefined_operand_subword_p (const_rtx op, int i)
3240 enum machine_mode innermode, innermostmode;
3242 if (GET_CODE (op) != SUBREG)
3244 innermode = GET_MODE (op);
3245 innermostmode = GET_MODE (SUBREG_REG (op));
3246 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3247 /* The SUBREG_BYTE represents offset, as if the value were stored in
3248 memory, except for a paradoxical subreg where we define
3249 SUBREG_BYTE to be 0; undo this exception as in
3251 if (SUBREG_BYTE (op) == 0
3252 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3254 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3255 if (WORDS_BIG_ENDIAN)
3256 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3257 if (BYTES_BIG_ENDIAN)
3258 offset += difference % UNITS_PER_WORD;
3260 if (offset >= GET_MODE_SIZE (innermostmode)
3261 || offset <= -GET_MODE_SIZE (word_mode))
3266 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3267 MODE is any multi-word or full-word mode that lacks a move_insn
3268 pattern. Note that you will get better code if you define such
3269 patterns, even if they must turn into multiple assembler instructions. */
3272 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3279 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3281 /* If X is a push on the stack, do the push now and replace
3282 X with a reference to the stack pointer. */
3283 if (push_operand (x, mode))
3284 x = emit_move_resolve_push (mode, x);
3286 /* If we are in reload, see if either operand is a MEM whose address
3287 is scheduled for replacement. */
3288 if (reload_in_progress && MEM_P (x)
3289 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3290 x = replace_equiv_address_nv (x, inner);
3291 if (reload_in_progress && MEM_P (y)
3292 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3293 y = replace_equiv_address_nv (y, inner);
3297 need_clobber = false;
3299 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3302 rtx xpart = operand_subword (x, i, 1, mode);
3305 /* Do not generate code for a move if it would come entirely
3306 from the undefined bits of a paradoxical subreg. */
3307 if (undefined_operand_subword_p (y, i))
3310 ypart = operand_subword (y, i, 1, mode);
3312 /* If we can't get a part of Y, put Y into memory if it is a
3313 constant. Otherwise, force it into a register. Then we must
3314 be able to get a part of Y. */
3315 if (ypart == 0 && CONSTANT_P (y))
3317 y = use_anchored_address (force_const_mem (mode, y));
3318 ypart = operand_subword (y, i, 1, mode);
3320 else if (ypart == 0)
3321 ypart = operand_subword_force (y, i, mode);
3323 gcc_assert (xpart && ypart);
3325 need_clobber |= (GET_CODE (xpart) == SUBREG);
3327 last_insn = emit_move_insn (xpart, ypart);
3333 /* Show the output dies here. This is necessary for SUBREGs
3334 of pseudos since we cannot track their lifetimes correctly;
3335 hard regs shouldn't appear here except as return values.
3336 We never want to emit such a clobber after reload. */
3338 && ! (reload_in_progress || reload_completed)
3339 && need_clobber != 0)
3347 /* Low level part of emit_move_insn.
3348 Called just like emit_move_insn, but assumes X and Y
3349 are basically valid. */
3352 emit_move_insn_1 (rtx x, rtx y)
3354 enum machine_mode mode = GET_MODE (x);
3355 enum insn_code code;
3357 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3359 code = optab_handler (mov_optab, mode)->insn_code;
3360 if (code != CODE_FOR_nothing)
3361 return emit_insn (GEN_FCN (code) (x, y));
3363 /* Expand complex moves by moving real part and imag part. */
3364 if (COMPLEX_MODE_P (mode))
3365 return emit_move_complex (mode, x, y);
3367 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3368 || ALL_FIXED_POINT_MODE_P (mode))
3370 rtx result = emit_move_via_integer (mode, x, y, true);
3372 /* If we can't find an integer mode, use multi words. */
3376 return emit_move_multi_word (mode, x, y);
3379 if (GET_MODE_CLASS (mode) == MODE_CC)
3380 return emit_move_ccmode (mode, x, y);
3382 /* Try using a move pattern for the corresponding integer mode. This is
3383 only safe when simplify_subreg can convert MODE constants into integer
3384 constants. At present, it can only do this reliably if the value
3385 fits within a HOST_WIDE_INT. */
3386 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3388 rtx ret = emit_move_via_integer (mode, x, y, false);
3393 return emit_move_multi_word (mode, x, y);
3396 /* Generate code to copy Y into X.
3397 Both Y and X must have the same mode, except that
3398 Y can be a constant with VOIDmode.
3399 This mode cannot be BLKmode; use emit_block_move for that.
3401 Return the last instruction emitted. */
3404 emit_move_insn (rtx x, rtx y)
3406 enum machine_mode mode = GET_MODE (x);
3407 rtx y_cst = NULL_RTX;
3410 gcc_assert (mode != BLKmode
3411 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3416 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3417 && (last_insn = compress_float_constant (x, y)))
3422 if (!LEGITIMATE_CONSTANT_P (y))
3424 y = force_const_mem (mode, y);
3426 /* If the target's cannot_force_const_mem prevented the spill,
3427 assume that the target's move expanders will also take care
3428 of the non-legitimate constant. */
3432 y = use_anchored_address (y);
3436 /* If X or Y are memory references, verify that their addresses are valid
3439 && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3441 && ! push_operand (x, GET_MODE (x))))
3442 x = validize_mem (x);
3445 && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3446 MEM_ADDR_SPACE (y)))
3447 y = validize_mem (y);
3449 gcc_assert (mode != BLKmode);
3451 last_insn = emit_move_insn_1 (x, y);
3453 if (y_cst && REG_P (x)
3454 && (set = single_set (last_insn)) != NULL_RTX
3455 && SET_DEST (set) == x
3456 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3457 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3462 /* If Y is representable exactly in a narrower mode, and the target can
3463 perform the extension directly from constant or memory, then emit the
3464 move as an extension. */
3467 compress_float_constant (rtx x, rtx y)
3469 enum machine_mode dstmode = GET_MODE (x);
3470 enum machine_mode orig_srcmode = GET_MODE (y);
3471 enum machine_mode srcmode;
3473 int oldcost, newcost;
3474 bool speed = optimize_insn_for_speed_p ();
3476 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3478 if (LEGITIMATE_CONSTANT_P (y))
3479 oldcost = rtx_cost (y, SET, speed);
3481 oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3483 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3484 srcmode != orig_srcmode;
3485 srcmode = GET_MODE_WIDER_MODE (srcmode))
3488 rtx trunc_y, last_insn;
3490 /* Skip if the target can't extend this way. */
3491 ic = can_extend_p (dstmode, srcmode, 0);
3492 if (ic == CODE_FOR_nothing)
3495 /* Skip if the narrowed value isn't exact. */
3496 if (! exact_real_truncate (srcmode, &r))
3499 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3501 if (LEGITIMATE_CONSTANT_P (trunc_y))
3503 /* Skip if the target needs extra instructions to perform
3505 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3507 /* This is valid, but may not be cheaper than the original. */
3508 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3509 if (oldcost < newcost)
3512 else if (float_extend_from_mem[dstmode][srcmode])
3514 trunc_y = force_const_mem (srcmode, trunc_y);
3515 /* This is valid, but may not be cheaper than the original. */
3516 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3517 if (oldcost < newcost)
3519 trunc_y = validize_mem (trunc_y);
3524 /* For CSE's benefit, force the compressed constant pool entry
3525 into a new pseudo. This constant may be used in different modes,
3526 and if not, combine will put things back together for us. */
3527 trunc_y = force_reg (srcmode, trunc_y);
3528 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3529 last_insn = get_last_insn ();
3532 set_unique_reg_note (last_insn, REG_EQUAL, y);
3540 /* Pushing data onto the stack. */
3542 /* Push a block of length SIZE (perhaps variable)
3543 and return an rtx to address the beginning of the block.
3544 The value may be virtual_outgoing_args_rtx.
3546 EXTRA is the number of bytes of padding to push in addition to SIZE.
3547 BELOW nonzero means this padding comes at low addresses;
3548 otherwise, the padding comes at high addresses. */
3551 push_block (rtx size, int extra, int below)
3555 size = convert_modes (Pmode, ptr_mode, size, 1);
3556 if (CONSTANT_P (size))
3557 anti_adjust_stack (plus_constant (size, extra));
3558 else if (REG_P (size) && extra == 0)
3559 anti_adjust_stack (size);
3562 temp = copy_to_mode_reg (Pmode, size);
3564 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3565 temp, 0, OPTAB_LIB_WIDEN);
3566 anti_adjust_stack (temp);
3569 #ifndef STACK_GROWS_DOWNWARD
3575 temp = virtual_outgoing_args_rtx;
3576 if (extra != 0 && below)
3577 temp = plus_constant (temp, extra);
3581 if (CONST_INT_P (size))
3582 temp = plus_constant (virtual_outgoing_args_rtx,
3583 -INTVAL (size) - (below ? 0 : extra));
3584 else if (extra != 0 && !below)
3585 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3586 negate_rtx (Pmode, plus_constant (size, extra)));
3588 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3589 negate_rtx (Pmode, size));
3592 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3595 #ifdef PUSH_ROUNDING
3597 /* Emit single push insn. */
3600 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3603 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3605 enum insn_code icode;
3606 insn_operand_predicate_fn pred;
3608 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3609 /* If there is push pattern, use it. Otherwise try old way of throwing
3610 MEM representing push operation to move expander. */
3611 icode = optab_handler (push_optab, mode)->insn_code;
3612 if (icode != CODE_FOR_nothing)
3614 if (((pred = insn_data[(int) icode].operand[0].predicate)
3615 && !((*pred) (x, mode))))
3616 x = force_reg (mode, x);
3617 emit_insn (GEN_FCN (icode) (x));
3620 if (GET_MODE_SIZE (mode) == rounded_size)
3621 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3622 /* If we are to pad downward, adjust the stack pointer first and
3623 then store X into the stack location using an offset. This is
3624 because emit_move_insn does not know how to pad; it does not have
3626 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3628 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3629 HOST_WIDE_INT offset;
3631 emit_move_insn (stack_pointer_rtx,
3632 expand_binop (Pmode,
3633 #ifdef STACK_GROWS_DOWNWARD
3639 GEN_INT (rounded_size),
3640 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3642 offset = (HOST_WIDE_INT) padding_size;
3643 #ifdef STACK_GROWS_DOWNWARD
3644 if (STACK_PUSH_CODE == POST_DEC)
3645 /* We have already decremented the stack pointer, so get the
3647 offset += (HOST_WIDE_INT) rounded_size;
3649 if (STACK_PUSH_CODE == POST_INC)
3650 /* We have already incremented the stack pointer, so get the
3652 offset -= (HOST_WIDE_INT) rounded_size;
3654 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3658 #ifdef STACK_GROWS_DOWNWARD
3659 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3660 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3661 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3663 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3664 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3665 GEN_INT (rounded_size));
3667 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3670 dest = gen_rtx_MEM (mode, dest_addr);
3674 set_mem_attributes (dest, type, 1);
3676 if (flag_optimize_sibling_calls)
3677 /* Function incoming arguments may overlap with sibling call
3678 outgoing arguments and we cannot allow reordering of reads
3679 from function arguments with stores to outgoing arguments
3680 of sibling calls. */
3681 set_mem_alias_set (dest, 0);
3683 emit_move_insn (dest, x);
3687 /* Generate code to push X onto the stack, assuming it has mode MODE and
3689 MODE is redundant except when X is a CONST_INT (since they don't
3691 SIZE is an rtx for the size of data to be copied (in bytes),
3692 needed only if X is BLKmode.
3694 ALIGN (in bits) is maximum alignment we can assume.
3696 If PARTIAL and REG are both nonzero, then copy that many of the first
3697 bytes of X into registers starting with REG, and push the rest of X.
3698 The amount of space pushed is decreased by PARTIAL bytes.
3699 REG must be a hard register in this case.
3700 If REG is zero but PARTIAL is not, take any all others actions for an
3701 argument partially in registers, but do not actually load any
3704 EXTRA is the amount in bytes of extra space to leave next to this arg.
3705 This is ignored if an argument block has already been allocated.
3707 On a machine that lacks real push insns, ARGS_ADDR is the address of
3708 the bottom of the argument block for this call. We use indexing off there
3709 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3710 argument block has not been preallocated.
3712 ARGS_SO_FAR is the size of args previously pushed for this call.
3714 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3715 for arguments passed in registers. If nonzero, it will be the number
3716 of bytes required. */
3719 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3720 unsigned int align, int partial, rtx reg, int extra,
3721 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3725 enum direction stack_direction
3726 #ifdef STACK_GROWS_DOWNWARD
3732 /* Decide where to pad the argument: `downward' for below,
3733 `upward' for above, or `none' for don't pad it.
3734 Default is below for small data on big-endian machines; else above. */
3735 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3737 /* Invert direction if stack is post-decrement.
3739 if (STACK_PUSH_CODE == POST_DEC)
3740 if (where_pad != none)
3741 where_pad = (where_pad == downward ? upward : downward);
3746 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3748 /* Copy a block into the stack, entirely or partially. */
3755 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3756 used = partial - offset;
3758 if (mode != BLKmode)
3760 /* A value is to be stored in an insufficiently aligned
3761 stack slot; copy via a suitably aligned slot if
3763 size = GEN_INT (GET_MODE_SIZE (mode));
3764 if (!MEM_P (xinner))
3766 temp = assign_temp (type, 0, 1, 1);
3767 emit_move_insn (temp, xinner);
3774 /* USED is now the # of bytes we need not copy to the stack
3775 because registers will take care of them. */
3778 xinner = adjust_address (xinner, BLKmode, used);
3780 /* If the partial register-part of the arg counts in its stack size,
3781 skip the part of stack space corresponding to the registers.
3782 Otherwise, start copying to the beginning of the stack space,
3783 by setting SKIP to 0. */
3784 skip = (reg_parm_stack_space == 0) ? 0 : used;
3786 #ifdef PUSH_ROUNDING
3787 /* Do it with several push insns if that doesn't take lots of insns
3788 and if there is no difficulty with push insns that skip bytes
3789 on the stack for alignment purposes. */
3792 && CONST_INT_P (size)
3794 && MEM_ALIGN (xinner) >= align
3795 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3796 /* Here we avoid the case of a structure whose weak alignment
3797 forces many pushes of a small amount of data,
3798 and such small pushes do rounding that causes trouble. */
3799 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3800 || align >= BIGGEST_ALIGNMENT
3801 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3802 == (align / BITS_PER_UNIT)))
3803 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3805 /* Push padding now if padding above and stack grows down,
3806 or if padding below and stack grows up.
3807 But if space already allocated, this has already been done. */
3808 if (extra && args_addr == 0
3809 && where_pad != none && where_pad != stack_direction)
3810 anti_adjust_stack (GEN_INT (extra));
3812 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3815 #endif /* PUSH_ROUNDING */
3819 /* Otherwise make space on the stack and copy the data
3820 to the address of that space. */
3822 /* Deduct words put into registers from the size we must copy. */
3825 if (CONST_INT_P (size))
3826 size = GEN_INT (INTVAL (size) - used);
3828 size = expand_binop (GET_MODE (size), sub_optab, size,
3829 GEN_INT (used), NULL_RTX, 0,
3833 /* Get the address of the stack space.
3834 In this case, we do not deal with EXTRA separately.
3835 A single stack adjust will do. */
3838 temp = push_block (size, extra, where_pad == downward);
3841 else if (CONST_INT_P (args_so_far))
3842 temp = memory_address (BLKmode,
3843 plus_constant (args_addr,
3844 skip + INTVAL (args_so_far)));
3846 temp = memory_address (BLKmode,
3847 plus_constant (gen_rtx_PLUS (Pmode,
3852 if (!ACCUMULATE_OUTGOING_ARGS)
3854 /* If the source is referenced relative to the stack pointer,
3855 copy it to another register to stabilize it. We do not need
3856 to do this if we know that we won't be changing sp. */
3858 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3859 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3860 temp = copy_to_reg (temp);
3863 target = gen_rtx_MEM (BLKmode, temp);
3865 /* We do *not* set_mem_attributes here, because incoming arguments
3866 may overlap with sibling call outgoing arguments and we cannot
3867 allow reordering of reads from function arguments with stores
3868 to outgoing arguments of sibling calls. We do, however, want
3869 to record the alignment of the stack slot. */
3870 /* ALIGN may well be better aligned than TYPE, e.g. due to
3871 PARM_BOUNDARY. Assume the caller isn't lying. */
3872 set_mem_align (target, align);
3874 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3877 else if (partial > 0)
3879 /* Scalar partly in registers. */
3881 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3884 /* # bytes of start of argument
3885 that we must make space for but need not store. */
3886 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3887 int args_offset = INTVAL (args_so_far);
3890 /* Push padding now if padding above and stack grows down,
3891 or if padding below and stack grows up.
3892 But if space already allocated, this has already been done. */
3893 if (extra && args_addr == 0
3894 && where_pad != none && where_pad != stack_direction)
3895 anti_adjust_stack (GEN_INT (extra));
3897 /* If we make space by pushing it, we might as well push
3898 the real data. Otherwise, we can leave OFFSET nonzero
3899 and leave the space uninitialized. */
3903 /* Now NOT_STACK gets the number of words that we don't need to
3904 allocate on the stack. Convert OFFSET to words too. */
3905 not_stack = (partial - offset) / UNITS_PER_WORD;
3906 offset /= UNITS_PER_WORD;
3908 /* If the partial register-part of the arg counts in its stack size,
3909 skip the part of stack space corresponding to the registers.
3910 Otherwise, start copying to the beginning of the stack space,
3911 by setting SKIP to 0. */
3912 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3914 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3915 x = validize_mem (force_const_mem (mode, x));
3917 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3918 SUBREGs of such registers are not allowed. */
3919 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3920 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3921 x = copy_to_reg (x);
3923 /* Loop over all the words allocated on the stack for this arg. */
3924 /* We can do it by words, because any scalar bigger than a word
3925 has a size a multiple of a word. */
3926 #ifndef PUSH_ARGS_REVERSED
3927 for (i = not_stack; i < size; i++)
3929 for (i = size - 1; i >= not_stack; i--)
3931 if (i >= not_stack + offset)
3932 emit_push_insn (operand_subword_force (x, i, mode),
3933 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3935 GEN_INT (args_offset + ((i - not_stack + skip)
3937 reg_parm_stack_space, alignment_pad);
3944 /* Push padding now if padding above and stack grows down,
3945 or if padding below and stack grows up.
3946 But if space already allocated, this has already been done. */
3947 if (extra && args_addr == 0
3948 && where_pad != none && where_pad != stack_direction)
3949 anti_adjust_stack (GEN_INT (extra));
3951 #ifdef PUSH_ROUNDING
3952 if (args_addr == 0 && PUSH_ARGS)
3953 emit_single_push_insn (mode, x, type);
3957 if (CONST_INT_P (args_so_far))
3959 = memory_address (mode,
3960 plus_constant (args_addr,
3961 INTVAL (args_so_far)));
3963 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3965 dest = gen_rtx_MEM (mode, addr);
3967 /* We do *not* set_mem_attributes here, because incoming arguments
3968 may overlap with sibling call outgoing arguments and we cannot
3969 allow reordering of reads from function arguments with stores
3970 to outgoing arguments of sibling calls. We do, however, want
3971 to record the alignment of the stack slot. */
3972 /* ALIGN may well be better aligned than TYPE, e.g. due to
3973 PARM_BOUNDARY. Assume the caller isn't lying. */
3974 set_mem_align (dest, align);
3976 emit_move_insn (dest, x);
3980 /* If part should go in registers, copy that part
3981 into the appropriate registers. Do this now, at the end,
3982 since mem-to-mem copies above may do function calls. */
3983 if (partial > 0 && reg != 0)
3985 /* Handle calls that pass values in multiple non-contiguous locations.
3986 The Irix 6 ABI has examples of this. */
3987 if (GET_CODE (reg) == PARALLEL)
3988 emit_group_load (reg, x, type, -1);
3991 gcc_assert (partial % UNITS_PER_WORD == 0);
3992 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3996 if (extra && args_addr == 0 && where_pad == stack_direction)
3997 anti_adjust_stack (GEN_INT (extra));
3999 if (alignment_pad && args_addr == 0)
4000 anti_adjust_stack (alignment_pad);
4003 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4007 get_subtarget (rtx x)
4011 /* Only registers can be subtargets. */
4013 /* Don't use hard regs to avoid extending their life. */
4014 || REGNO (x) < FIRST_PSEUDO_REGISTER
4018 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4019 FIELD is a bitfield. Returns true if the optimization was successful,
4020 and there's nothing else to do. */
4023 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4024 unsigned HOST_WIDE_INT bitpos,
4025 enum machine_mode mode1, rtx str_rtx,
4028 enum machine_mode str_mode = GET_MODE (str_rtx);
4029 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4034 if (mode1 != VOIDmode
4035 || bitsize >= BITS_PER_WORD
4036 || str_bitsize > BITS_PER_WORD
4037 || TREE_SIDE_EFFECTS (to)
4038 || TREE_THIS_VOLATILE (to))
4042 if (!BINARY_CLASS_P (src)
4043 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4046 op0 = TREE_OPERAND (src, 0);
4047 op1 = TREE_OPERAND (src, 1);
4050 if (!operand_equal_p (to, op0, 0))
4053 if (MEM_P (str_rtx))
4055 unsigned HOST_WIDE_INT offset1;
4057 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4058 str_mode = word_mode;
4059 str_mode = get_best_mode (bitsize, bitpos,
4060 MEM_ALIGN (str_rtx), str_mode, 0);
4061 if (str_mode == VOIDmode)
4063 str_bitsize = GET_MODE_BITSIZE (str_mode);
4066 bitpos %= str_bitsize;
4067 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4068 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4070 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4073 /* If the bit field covers the whole REG/MEM, store_field
4074 will likely generate better code. */
4075 if (bitsize >= str_bitsize)
4078 /* We can't handle fields split across multiple entities. */
4079 if (bitpos + bitsize > str_bitsize)
4082 if (BYTES_BIG_ENDIAN)
4083 bitpos = str_bitsize - bitpos - bitsize;
4085 switch (TREE_CODE (src))
4089 /* For now, just optimize the case of the topmost bitfield
4090 where we don't need to do any masking and also
4091 1 bit bitfields where xor can be used.
4092 We might win by one instruction for the other bitfields
4093 too if insv/extv instructions aren't used, so that
4094 can be added later. */
4095 if (bitpos + bitsize != str_bitsize
4096 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4099 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4100 value = convert_modes (str_mode,
4101 TYPE_MODE (TREE_TYPE (op1)), value,
4102 TYPE_UNSIGNED (TREE_TYPE (op1)));
4104 /* We may be accessing data outside the field, which means
4105 we can alias adjacent data. */
4106 if (MEM_P (str_rtx))
4108 str_rtx = shallow_copy_rtx (str_rtx);
4109 set_mem_alias_set (str_rtx, 0);
4110 set_mem_expr (str_rtx, 0);
4113 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4114 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4116 value = expand_and (str_mode, value, const1_rtx, NULL);
4119 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4120 build_int_cst (NULL_TREE, bitpos),
4122 result = expand_binop (str_mode, binop, str_rtx,
4123 value, str_rtx, 1, OPTAB_WIDEN);
4124 if (result != str_rtx)
4125 emit_move_insn (str_rtx, result);
4130 if (TREE_CODE (op1) != INTEGER_CST)
4132 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4133 value = convert_modes (GET_MODE (str_rtx),
4134 TYPE_MODE (TREE_TYPE (op1)), value,
4135 TYPE_UNSIGNED (TREE_TYPE (op1)));
4137 /* We may be accessing data outside the field, which means
4138 we can alias adjacent data. */
4139 if (MEM_P (str_rtx))
4141 str_rtx = shallow_copy_rtx (str_rtx);
4142 set_mem_alias_set (str_rtx, 0);
4143 set_mem_expr (str_rtx, 0);
4146 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4147 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4149 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4151 value = expand_and (GET_MODE (str_rtx), value, mask,
4154 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4155 build_int_cst (NULL_TREE, bitpos),
4157 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4158 value, str_rtx, 1, OPTAB_WIDEN);
4159 if (result != str_rtx)
4160 emit_move_insn (str_rtx, result);
4171 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4172 is true, try generating a nontemporal store. */
4175 expand_assignment (tree to, tree from, bool nontemporal)
4180 /* Don't crash if the lhs of the assignment was erroneous. */
4181 if (TREE_CODE (to) == ERROR_MARK)
4183 result = expand_normal (from);
4187 /* Optimize away no-op moves without side-effects. */
4188 if (operand_equal_p (to, from, 0))
4191 /* Assignment of a structure component needs special treatment
4192 if the structure component's rtx is not simply a MEM.
4193 Assignment of an array element at a constant index, and assignment of
4194 an array element in an unaligned packed structure field, has the same
4196 if (handled_component_p (to)
4197 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4199 enum machine_mode mode1;
4200 HOST_WIDE_INT bitsize, bitpos;
4207 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4208 &unsignedp, &volatilep, true);
4210 /* If we are going to use store_bit_field and extract_bit_field,
4211 make sure to_rtx will be safe for multiple use. */
4213 to_rtx = expand_normal (tem);
4219 if (!MEM_P (to_rtx))
4221 /* We can get constant negative offsets into arrays with broken
4222 user code. Translate this to a trap instead of ICEing. */
4223 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4224 expand_builtin_trap ();
4225 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4228 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4229 #ifdef POINTERS_EXTEND_UNSIGNED
4230 if (GET_MODE (offset_rtx) != Pmode)
4231 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4233 if (GET_MODE (offset_rtx) != ptr_mode)
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4237 /* A constant address in TO_RTX can have VOIDmode, we must not try
4238 to call force_reg for that case. Avoid that case. */
4240 && GET_MODE (to_rtx) == BLKmode
4241 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4243 && (bitpos % bitsize) == 0
4244 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4245 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4247 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4251 to_rtx = offset_address (to_rtx, offset_rtx,
4252 highest_pow2_factor_for_target (to,
4256 /* Handle expand_expr of a complex value returning a CONCAT. */
4257 if (GET_CODE (to_rtx) == CONCAT)
4259 if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4261 gcc_assert (bitpos == 0);
4262 result = store_expr (from, to_rtx, false, nontemporal);
4266 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4267 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4275 /* If the field is at offset zero, we could have been given the
4276 DECL_RTX of the parent struct. Don't munge it. */
4277 to_rtx = shallow_copy_rtx (to_rtx);
4279 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4281 /* Deal with volatile and readonly fields. The former is only
4282 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4284 MEM_VOLATILE_P (to_rtx) = 1;
4285 if (component_uses_parent_alias_set (to))
4286 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4289 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4293 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4294 TREE_TYPE (tem), get_alias_set (to),
4299 preserve_temp_slots (result);
4305 else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4307 addr_space_t as = ADDR_SPACE_GENERIC;
4308 enum machine_mode mode, op_mode1;
4309 enum insn_code icode;
4310 rtx reg, addr, mem, insn;
4312 if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4313 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4315 reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4316 reg = force_not_mem (reg);
4318 mode = TYPE_MODE (TREE_TYPE (to));
4319 addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4321 addr = memory_address_addr_space (mode, addr, as);
4322 mem = gen_rtx_MEM (mode, addr);
4324 set_mem_attributes (mem, to, 0);
4325 set_mem_addr_space (mem, as);
4327 icode = movmisalign_optab->handlers[mode].insn_code;
4328 gcc_assert (icode != CODE_FOR_nothing);
4330 op_mode1 = insn_data[icode].operand[1].mode;
4331 if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4332 && op_mode1 != VOIDmode)
4333 reg = copy_to_mode_reg (op_mode1, reg);
4335 insn = GEN_FCN (icode) (mem, reg);
4340 /* If the rhs is a function call and its value is not an aggregate,
4341 call the function before we start to compute the lhs.
4342 This is needed for correct code for cases such as
4343 val = setjmp (buf) on machines where reference to val
4344 requires loading up part of an address in a separate insn.
4346 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4347 since it might be a promoted variable where the zero- or sign- extension
4348 needs to be done. Handling this in the normal way is safe because no
4349 computation is done before the call. The same is true for SSA names. */
4350 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4351 && COMPLETE_TYPE_P (TREE_TYPE (from))
4352 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4353 && ! (((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4354 && REG_P (DECL_RTL (to)))
4355 || TREE_CODE (to) == SSA_NAME))
4360 value = expand_normal (from);
4362 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4364 /* Handle calls that return values in multiple non-contiguous locations.
4365 The Irix 6 ABI has examples of this. */
4366 if (GET_CODE (to_rtx) == PARALLEL)
4367 emit_group_load (to_rtx, value, TREE_TYPE (from),
4368 int_size_in_bytes (TREE_TYPE (from)));
4369 else if (GET_MODE (to_rtx) == BLKmode)
4370 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4373 if (POINTER_TYPE_P (TREE_TYPE (to)))
4374 value = convert_memory_address (GET_MODE (to_rtx), value);
4375 emit_move_insn (to_rtx, value);
4377 preserve_temp_slots (to_rtx);
4383 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4384 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4387 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4389 /* Don't move directly into a return register. */
4390 if (TREE_CODE (to) == RESULT_DECL
4391 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4396 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4398 if (GET_CODE (to_rtx) == PARALLEL)
4399 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4400 int_size_in_bytes (TREE_TYPE (from)));
4402 emit_move_insn (to_rtx, temp);
4404 preserve_temp_slots (to_rtx);
4410 /* In case we are returning the contents of an object which overlaps
4411 the place the value is being stored, use a safe function when copying
4412 a value through a pointer into a structure value return block. */
4413 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4414 && ADDR_SPACE_GENERIC_P
4415 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4416 && cfun->returns_struct
4417 && !cfun->returns_pcc_struct)
4422 size = expr_size (from);
4423 from_rtx = expand_normal (from);
4425 emit_library_call (memmove_libfunc, LCT_NORMAL,
4426 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4427 XEXP (from_rtx, 0), Pmode,
4428 convert_to_mode (TYPE_MODE (sizetype),
4429 size, TYPE_UNSIGNED (sizetype)),
4430 TYPE_MODE (sizetype));
4432 preserve_temp_slots (to_rtx);
4438 /* Compute FROM and store the value in the rtx we got. */
4441 result = store_expr (from, to_rtx, 0, nontemporal);
4442 preserve_temp_slots (result);
4448 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4449 succeeded, false otherwise. */
4452 emit_storent_insn (rtx to, rtx from)
4454 enum machine_mode mode = GET_MODE (to), imode;
4455 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4458 if (code == CODE_FOR_nothing)
4461 imode = insn_data[code].operand[0].mode;
4462 if (!insn_data[code].operand[0].predicate (to, imode))
4465 imode = insn_data[code].operand[1].mode;
4466 if (!insn_data[code].operand[1].predicate (from, imode))
4468 from = copy_to_mode_reg (imode, from);
4469 if (!insn_data[code].operand[1].predicate (from, imode))
4473 pattern = GEN_FCN (code) (to, from);
4474 if (pattern == NULL_RTX)
4477 emit_insn (pattern);
4481 /* Generate code for computing expression EXP,
4482 and storing the value into TARGET.
4484 If the mode is BLKmode then we may return TARGET itself.
4485 It turns out that in BLKmode it doesn't cause a problem.
4486 because C has no operators that could combine two different
4487 assignments into the same BLKmode object with different values
4488 with no sequence point. Will other languages need this to
4491 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4492 stack, and block moves may need to be treated specially.
4494 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4497 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4500 rtx alt_rtl = NULL_RTX;
4501 location_t loc = EXPR_LOCATION (exp);
4503 if (VOID_TYPE_P (TREE_TYPE (exp)))
4505 /* C++ can generate ?: expressions with a throw expression in one
4506 branch and an rvalue in the other. Here, we resolve attempts to
4507 store the throw expression's nonexistent result. */
4508 gcc_assert (!call_param_p);
4509 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4512 if (TREE_CODE (exp) == COMPOUND_EXPR)
4514 /* Perform first part of compound expression, then assign from second
4516 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4517 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4518 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4521 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4523 /* For conditional expression, get safe form of the target. Then
4524 test the condition, doing the appropriate assignment on either
4525 side. This avoids the creation of unnecessary temporaries.
4526 For non-BLKmode, it is more efficient not to do this. */
4528 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4530 do_pending_stack_adjust ();
4532 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4533 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4535 emit_jump_insn (gen_jump (lab2));
4538 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4545 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4546 /* If this is a scalar in a register that is stored in a wider mode
4547 than the declared mode, compute the result into its declared mode
4548 and then convert to the wider mode. Our value is the computed
4551 rtx inner_target = 0;
4553 /* We can do the conversion inside EXP, which will often result
4554 in some optimizations. Do the conversion in two steps: first
4555 change the signedness, if needed, then the extend. But don't
4556 do this if the type of EXP is a subtype of something else
4557 since then the conversion might involve more than just
4558 converting modes. */
4559 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4560 && TREE_TYPE (TREE_TYPE (exp)) == 0
4561 && GET_MODE_PRECISION (GET_MODE (target))
4562 == TYPE_PRECISION (TREE_TYPE (exp)))
4564 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4565 != SUBREG_PROMOTED_UNSIGNED_P (target))
4567 /* Some types, e.g. Fortran's logical*4, won't have a signed
4568 version, so use the mode instead. */
4570 = (signed_or_unsigned_type_for
4571 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4573 ntype = lang_hooks.types.type_for_mode
4574 (TYPE_MODE (TREE_TYPE (exp)),
4575 SUBREG_PROMOTED_UNSIGNED_P (target));
4577 exp = fold_convert_loc (loc, ntype, exp);
4580 exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4581 (GET_MODE (SUBREG_REG (target)),
4582 SUBREG_PROMOTED_UNSIGNED_P (target)),
4585 inner_target = SUBREG_REG (target);
4588 temp = expand_expr (exp, inner_target, VOIDmode,
4589 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4591 /* If TEMP is a VOIDmode constant, use convert_modes to make
4592 sure that we properly convert it. */
4593 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4595 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4596 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4597 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4598 GET_MODE (target), temp,
4599 SUBREG_PROMOTED_UNSIGNED_P (target));
4602 convert_move (SUBREG_REG (target), temp,
4603 SUBREG_PROMOTED_UNSIGNED_P (target));
4607 else if (TREE_CODE (exp) == STRING_CST
4608 && !nontemporal && !call_param_p
4609 && TREE_STRING_LENGTH (exp) > 0
4610 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4612 /* Optimize initialization of an array with a STRING_CST. */
4613 HOST_WIDE_INT exp_len, str_copy_len;
4616 exp_len = int_expr_size (exp);
4620 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4621 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4624 str_copy_len = TREE_STRING_LENGTH (exp);
4625 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4627 str_copy_len += STORE_MAX_PIECES - 1;
4628 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4630 str_copy_len = MIN (str_copy_len, exp_len);
4631 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4632 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4633 MEM_ALIGN (target), false))
4638 dest_mem = store_by_pieces (dest_mem,
4639 str_copy_len, builtin_strncpy_read_str,
4640 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4641 MEM_ALIGN (target), false,
4642 exp_len > str_copy_len ? 1 : 0);
4643 if (exp_len > str_copy_len)
4644 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4645 GEN_INT (exp_len - str_copy_len),
4654 /* If we want to use a nontemporal store, force the value to
4656 tmp_target = nontemporal ? NULL_RTX : target;
4657 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4659 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4663 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4664 the same as that of TARGET, adjust the constant. This is needed, for
4665 example, in case it is a CONST_DOUBLE and we want only a word-sized
4667 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4668 && TREE_CODE (exp) != ERROR_MARK
4669 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4670 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4671 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4673 /* If value was not generated in the target, store it there.
4674 Convert the value to TARGET's type first if necessary and emit the
4675 pending incrementations that have been queued when expanding EXP.
4676 Note that we cannot emit the whole queue blindly because this will
4677 effectively disable the POST_INC optimization later.
4679 If TEMP and TARGET compare equal according to rtx_equal_p, but
4680 one or both of them are volatile memory refs, we have to distinguish
4682 - expand_expr has used TARGET. In this case, we must not generate
4683 another copy. This can be detected by TARGET being equal according
4685 - expand_expr has not used TARGET - that means that the source just
4686 happens to have the same RTX form. Since temp will have been created
4687 by expand_expr, it will compare unequal according to == .
4688 We must generate a copy in this case, to reach the correct number
4689 of volatile memory references. */
4691 if ((! rtx_equal_p (temp, target)
4692 || (temp != target && (side_effects_p (temp)
4693 || side_effects_p (target))))
4694 && TREE_CODE (exp) != ERROR_MARK
4695 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4696 but TARGET is not valid memory reference, TEMP will differ
4697 from TARGET although it is really the same location. */
4698 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4699 /* If there's nothing to copy, don't bother. Don't call
4700 expr_size unless necessary, because some front-ends (C++)
4701 expr_size-hook must not be given objects that are not
4702 supposed to be bit-copied or bit-initialized. */
4703 && expr_size (exp) != const0_rtx)
4705 if (GET_MODE (temp) != GET_MODE (target)
4706 && GET_MODE (temp) != VOIDmode)
4708 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4709 if (GET_MODE (target) == BLKmode
4710 || GET_MODE (temp) == BLKmode)
4711 emit_block_move (target, temp, expr_size (exp),
4713 ? BLOCK_OP_CALL_PARM
4714 : BLOCK_OP_NORMAL));
4716 convert_move (target, temp, unsignedp);
4719 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4721 /* Handle copying a string constant into an array. The string
4722 constant may be shorter than the array. So copy just the string's
4723 actual length, and clear the rest. First get the size of the data
4724 type of the string, which is actually the size of the target. */
4725 rtx size = expr_size (exp);
4727 if (CONST_INT_P (size)
4728 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4729 emit_block_move (target, temp, size,
4731 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4734 /* Compute the size of the data to copy from the string. */
4736 = size_binop_loc (loc, MIN_EXPR,
4737 make_tree (sizetype, size),
4738 size_int (TREE_STRING_LENGTH (exp)));
4740 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4742 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4745 /* Copy that much. */
4746 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4747 TYPE_UNSIGNED (sizetype));
4748 emit_block_move (target, temp, copy_size_rtx,
4750 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4752 /* Figure out how much is left in TARGET that we have to clear.
4753 Do all calculations in ptr_mode. */
4754 if (CONST_INT_P (copy_size_rtx))
4756 size = plus_constant (size, -INTVAL (copy_size_rtx));
4757 target = adjust_address (target, BLKmode,
4758 INTVAL (copy_size_rtx));
4762 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4763 copy_size_rtx, NULL_RTX, 0,
4766 #ifdef POINTERS_EXTEND_UNSIGNED
4767 if (GET_MODE (copy_size_rtx) != Pmode)
4768 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4769 TYPE_UNSIGNED (sizetype));
4772 target = offset_address (target, copy_size_rtx,
4773 highest_pow2_factor (copy_size));
4774 label = gen_label_rtx ();
4775 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4776 GET_MODE (size), 0, label);
4779 if (size != const0_rtx)
4780 clear_storage (target, size, BLOCK_OP_NORMAL);
4786 /* Handle calls that return values in multiple non-contiguous locations.
4787 The Irix 6 ABI has examples of this. */
4788 else if (GET_CODE (target) == PARALLEL)
4789 emit_group_load (target, temp, TREE_TYPE (exp),
4790 int_size_in_bytes (TREE_TYPE (exp)));
4791 else if (GET_MODE (temp) == BLKmode)
4792 emit_block_move (target, temp, expr_size (exp),
4794 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4795 else if (nontemporal
4796 && emit_storent_insn (target, temp))
4797 /* If we managed to emit a nontemporal store, there is nothing else to
4802 temp = force_operand (temp, target);
4804 emit_move_insn (target, temp);
4811 /* Helper for categorize_ctor_elements. Identical interface. */
4814 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4815 HOST_WIDE_INT *p_elt_count,
4818 unsigned HOST_WIDE_INT idx;
4819 HOST_WIDE_INT nz_elts, elt_count;
4820 tree value, purpose;
4822 /* Whether CTOR is a valid constant initializer, in accordance with what
4823 initializer_constant_valid_p does. If inferred from the constructor
4824 elements, true until proven otherwise. */
4825 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4826 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4831 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4836 if (TREE_CODE (purpose) == RANGE_EXPR)
4838 tree lo_index = TREE_OPERAND (purpose, 0);
4839 tree hi_index = TREE_OPERAND (purpose, 1);
4841 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4842 mult = (tree_low_cst (hi_index, 1)
4843 - tree_low_cst (lo_index, 1) + 1);
4846 switch (TREE_CODE (value))
4850 HOST_WIDE_INT nz = 0, ic = 0;
4853 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4855 nz_elts += mult * nz;
4856 elt_count += mult * ic;
4858 if (const_from_elts_p && const_p)
4859 const_p = const_elt_p;
4866 if (!initializer_zerop (value))
4872 nz_elts += mult * TREE_STRING_LENGTH (value);
4873 elt_count += mult * TREE_STRING_LENGTH (value);
4877 if (!initializer_zerop (TREE_REALPART (value)))
4879 if (!initializer_zerop (TREE_IMAGPART (value)))
4887 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4889 if (!initializer_zerop (TREE_VALUE (v)))
4900 if (const_from_elts_p && const_p)
4901 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4908 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4909 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4912 bool clear_this = true;
4914 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4916 /* We don't expect more than one element of the union to be
4917 initialized. Not sure what we should do otherwise... */
4918 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4921 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4922 CONSTRUCTOR_ELTS (ctor),
4925 /* ??? We could look at each element of the union, and find the
4926 largest element. Which would avoid comparing the size of the
4927 initialized element against any tail padding in the union.
4928 Doesn't seem worth the effort... */
4929 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4930 TYPE_SIZE (init_sub_type)) == 1)
4932 /* And now we have to find out if the element itself is fully
4933 constructed. E.g. for union { struct { int a, b; } s; } u
4934 = { .s = { .a = 1 } }. */
4935 if (elt_count == count_type_elements (init_sub_type, false))
4940 *p_must_clear = clear_this;
4943 *p_nz_elts += nz_elts;
4944 *p_elt_count += elt_count;
4949 /* Examine CTOR to discover:
4950 * how many scalar fields are set to nonzero values,
4951 and place it in *P_NZ_ELTS;
4952 * how many scalar fields in total are in CTOR,
4953 and place it in *P_ELT_COUNT.
4954 * if a type is a union, and the initializer from the constructor
4955 is not the largest element in the union, then set *p_must_clear.
4957 Return whether or not CTOR is a valid static constant initializer, the same
4958 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4961 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4962 HOST_WIDE_INT *p_elt_count,
4967 *p_must_clear = false;
4970 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4973 /* Count the number of scalars in TYPE. Return -1 on overflow or
4974 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4975 array member at the end of the structure. */
4978 count_type_elements (const_tree type, bool allow_flexarr)
4980 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4981 switch (TREE_CODE (type))
4985 tree telts = array_type_nelts (type);
4986 if (telts && host_integerp (telts, 1))
4988 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4989 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4992 else if (max / n > m)
5000 HOST_WIDE_INT n = 0, t;
5003 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5004 if (TREE_CODE (f) == FIELD_DECL)
5006 t = count_type_elements (TREE_TYPE (f), false);
5009 /* Check for structures with flexible array member. */
5010 tree tf = TREE_TYPE (f);
5012 && TREE_CHAIN (f) == NULL
5013 && TREE_CODE (tf) == ARRAY_TYPE
5015 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5016 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5017 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5018 && int_size_in_bytes (type) >= 0)
5030 case QUAL_UNION_TYPE:
5037 return TYPE_VECTOR_SUBPARTS (type);
5041 case FIXED_POINT_TYPE:
5046 case REFERENCE_TYPE:
5061 /* Return 1 if EXP contains mostly (3/4) zeros. */
5064 mostly_zeros_p (const_tree exp)
5066 if (TREE_CODE (exp) == CONSTRUCTOR)
5069 HOST_WIDE_INT nz_elts, count, elts;
5072 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5076 elts = count_type_elements (TREE_TYPE (exp), false);
5078 return nz_elts < elts / 4;
5081 return initializer_zerop (exp);
5084 /* Return 1 if EXP contains all zeros. */
5087 all_zeros_p (const_tree exp)
5089 if (TREE_CODE (exp) == CONSTRUCTOR)
5092 HOST_WIDE_INT nz_elts, count;
5095 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5096 return nz_elts == 0;
5099 return initializer_zerop (exp);
5102 /* Helper function for store_constructor.
5103 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5104 TYPE is the type of the CONSTRUCTOR, not the element type.
5105 CLEARED is as for store_constructor.
5106 ALIAS_SET is the alias set to use for any stores.
5108 This provides a recursive shortcut back to store_constructor when it isn't
5109 necessary to go through store_field. This is so that we can pass through
5110 the cleared field to let store_constructor know that we may not have to
5111 clear a substructure if the outer structure has already been cleared. */
5114 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5115 HOST_WIDE_INT bitpos, enum machine_mode mode,
5116 tree exp, tree type, int cleared,
5117 alias_set_type alias_set)
5119 if (TREE_CODE (exp) == CONSTRUCTOR
5120 /* We can only call store_constructor recursively if the size and
5121 bit position are on a byte boundary. */
5122 && bitpos % BITS_PER_UNIT == 0
5123 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5124 /* If we have a nonzero bitpos for a register target, then we just
5125 let store_field do the bitfield handling. This is unlikely to
5126 generate unnecessary clear instructions anyways. */
5127 && (bitpos == 0 || MEM_P (target)))
5131 = adjust_address (target,
5132 GET_MODE (target) == BLKmode
5134 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5135 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5138 /* Update the alias set, if required. */
5139 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5140 && MEM_ALIAS_SET (target) != 0)
5142 target = copy_rtx (target);
5143 set_mem_alias_set (target, alias_set);
5146 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5149 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5152 /* Store the value of constructor EXP into the rtx TARGET.
5153 TARGET is either a REG or a MEM; we know it cannot conflict, since
5154 safe_from_p has been called.
5155 CLEARED is true if TARGET is known to have been zero'd.
5156 SIZE is the number of bytes of TARGET we are allowed to modify: this
5157 may not be the same as the size of EXP if we are assigning to a field
5158 which has been packed to exclude padding bits. */
5161 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5163 tree type = TREE_TYPE (exp);
5164 #ifdef WORD_REGISTER_OPERATIONS
5165 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5168 switch (TREE_CODE (type))
5172 case QUAL_UNION_TYPE:
5174 unsigned HOST_WIDE_INT idx;
5177 /* If size is zero or the target is already cleared, do nothing. */
5178 if (size == 0 || cleared)
5180 /* We either clear the aggregate or indicate the value is dead. */
5181 else if ((TREE_CODE (type) == UNION_TYPE
5182 || TREE_CODE (type) == QUAL_UNION_TYPE)
5183 && ! CONSTRUCTOR_ELTS (exp))
5184 /* If the constructor is empty, clear the union. */
5186 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5190 /* If we are building a static constructor into a register,
5191 set the initial value as zero so we can fold the value into
5192 a constant. But if more than one register is involved,
5193 this probably loses. */
5194 else if (REG_P (target) && TREE_STATIC (exp)
5195 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5197 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5201 /* If the constructor has fewer fields than the structure or
5202 if we are initializing the structure to mostly zeros, clear
5203 the whole structure first. Don't do this if TARGET is a
5204 register whose mode size isn't equal to SIZE since
5205 clear_storage can't handle this case. */
5207 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5208 != fields_length (type))
5209 || mostly_zeros_p (exp))
5211 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5214 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5218 if (REG_P (target) && !cleared)
5219 emit_clobber (target);
5221 /* Store each element of the constructor into the
5222 corresponding field of TARGET. */
5223 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5225 enum machine_mode mode;
5226 HOST_WIDE_INT bitsize;
5227 HOST_WIDE_INT bitpos = 0;
5229 rtx to_rtx = target;
5231 /* Just ignore missing fields. We cleared the whole
5232 structure, above, if any fields are missing. */
5236 if (cleared && initializer_zerop (value))
5239 if (host_integerp (DECL_SIZE (field), 1))
5240 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5244 mode = DECL_MODE (field);
5245 if (DECL_BIT_FIELD (field))
5248 offset = DECL_FIELD_OFFSET (field);
5249 if (host_integerp (offset, 0)
5250 && host_integerp (bit_position (field), 0))
5252 bitpos = int_bit_position (field);
5256 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5263 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5264 make_tree (TREE_TYPE (exp),
5267 offset_rtx = expand_normal (offset);
5268 gcc_assert (MEM_P (to_rtx));
5270 #ifdef POINTERS_EXTEND_UNSIGNED
5271 if (GET_MODE (offset_rtx) != Pmode)
5272 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5274 if (GET_MODE (offset_rtx) != ptr_mode)
5275 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5278 to_rtx = offset_address (to_rtx, offset_rtx,
5279 highest_pow2_factor (offset));
5282 #ifdef WORD_REGISTER_OPERATIONS
5283 /* If this initializes a field that is smaller than a
5284 word, at the start of a word, try to widen it to a full
5285 word. This special case allows us to output C++ member
5286 function initializations in a form that the optimizers
5289 && bitsize < BITS_PER_WORD
5290 && bitpos % BITS_PER_WORD == 0
5291 && GET_MODE_CLASS (mode) == MODE_INT
5292 && TREE_CODE (value) == INTEGER_CST
5294 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5296 tree type = TREE_TYPE (value);
5298 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5300 type = lang_hooks.types.type_for_size
5301 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5302 value = fold_convert (type, value);
5305 if (BYTES_BIG_ENDIAN)
5307 = fold_build2 (LSHIFT_EXPR, type, value,
5308 build_int_cst (type,
5309 BITS_PER_WORD - bitsize));
5310 bitsize = BITS_PER_WORD;
5315 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5316 && DECL_NONADDRESSABLE_P (field))
5318 to_rtx = copy_rtx (to_rtx);
5319 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5322 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5323 value, type, cleared,
5324 get_alias_set (TREE_TYPE (field)));
5331 unsigned HOST_WIDE_INT i;
5334 tree elttype = TREE_TYPE (type);
5336 HOST_WIDE_INT minelt = 0;
5337 HOST_WIDE_INT maxelt = 0;
5339 domain = TYPE_DOMAIN (type);
5340 const_bounds_p = (TYPE_MIN_VALUE (domain)
5341 && TYPE_MAX_VALUE (domain)
5342 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5343 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5345 /* If we have constant bounds for the range of the type, get them. */
5348 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5349 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5352 /* If the constructor has fewer elements than the array, clear
5353 the whole array first. Similarly if this is static
5354 constructor of a non-BLKmode object. */
5357 else if (REG_P (target) && TREE_STATIC (exp))
5361 unsigned HOST_WIDE_INT idx;
5363 HOST_WIDE_INT count = 0, zero_count = 0;
5364 need_to_clear = ! const_bounds_p;
5366 /* This loop is a more accurate version of the loop in
5367 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5368 is also needed to check for missing elements. */
5369 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5371 HOST_WIDE_INT this_node_count;
5376 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5378 tree lo_index = TREE_OPERAND (index, 0);
5379 tree hi_index = TREE_OPERAND (index, 1);
5381 if (! host_integerp (lo_index, 1)
5382 || ! host_integerp (hi_index, 1))
5388 this_node_count = (tree_low_cst (hi_index, 1)
5389 - tree_low_cst (lo_index, 1) + 1);
5392 this_node_count = 1;
5394 count += this_node_count;
5395 if (mostly_zeros_p (value))
5396 zero_count += this_node_count;
5399 /* Clear the entire array first if there are any missing
5400 elements, or if the incidence of zero elements is >=
5403 && (count < maxelt - minelt + 1
5404 || 4 * zero_count >= 3 * count))
5408 if (need_to_clear && size > 0)
5411 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5413 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5417 if (!cleared && REG_P (target))
5418 /* Inform later passes that the old value is dead. */
5419 emit_clobber (target);
5421 /* Store each element of the constructor into the
5422 corresponding element of TARGET, determined by counting the
5424 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5426 enum machine_mode mode;
5427 HOST_WIDE_INT bitsize;
5428 HOST_WIDE_INT bitpos;
5429 rtx xtarget = target;
5431 if (cleared && initializer_zerop (value))
5434 mode = TYPE_MODE (elttype);
5435 if (mode == BLKmode)
5436 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5437 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5440 bitsize = GET_MODE_BITSIZE (mode);
5442 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5444 tree lo_index = TREE_OPERAND (index, 0);
5445 tree hi_index = TREE_OPERAND (index, 1);
5446 rtx index_r, pos_rtx;
5447 HOST_WIDE_INT lo, hi, count;
5450 /* If the range is constant and "small", unroll the loop. */
5452 && host_integerp (lo_index, 0)
5453 && host_integerp (hi_index, 0)
5454 && (lo = tree_low_cst (lo_index, 0),
5455 hi = tree_low_cst (hi_index, 0),
5456 count = hi - lo + 1,
5459 || (host_integerp (TYPE_SIZE (elttype), 1)
5460 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5463 lo -= minelt; hi -= minelt;
5464 for (; lo <= hi; lo++)
5466 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5469 && !MEM_KEEP_ALIAS_SET_P (target)
5470 && TREE_CODE (type) == ARRAY_TYPE
5471 && TYPE_NONALIASED_COMPONENT (type))
5473 target = copy_rtx (target);
5474 MEM_KEEP_ALIAS_SET_P (target) = 1;
5477 store_constructor_field
5478 (target, bitsize, bitpos, mode, value, type, cleared,
5479 get_alias_set (elttype));
5484 rtx loop_start = gen_label_rtx ();
5485 rtx loop_end = gen_label_rtx ();
5488 expand_normal (hi_index);
5490 index = build_decl (EXPR_LOCATION (exp),
5491 VAR_DECL, NULL_TREE, domain);
5492 index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5493 SET_DECL_RTL (index, index_r);
5494 store_expr (lo_index, index_r, 0, false);
5496 /* Build the head of the loop. */
5497 do_pending_stack_adjust ();
5498 emit_label (loop_start);
5500 /* Assign value to element index. */
5502 fold_convert (ssizetype,
5503 fold_build2 (MINUS_EXPR,
5506 TYPE_MIN_VALUE (domain)));
5509 size_binop (MULT_EXPR, position,
5510 fold_convert (ssizetype,
5511 TYPE_SIZE_UNIT (elttype)));
5513 pos_rtx = expand_normal (position);
5514 xtarget = offset_address (target, pos_rtx,
5515 highest_pow2_factor (position));
5516 xtarget = adjust_address (xtarget, mode, 0);
5517 if (TREE_CODE (value) == CONSTRUCTOR)
5518 store_constructor (value, xtarget, cleared,
5519 bitsize / BITS_PER_UNIT);
5521 store_expr (value, xtarget, 0, false);
5523 /* Generate a conditional jump to exit the loop. */
5524 exit_cond = build2 (LT_EXPR, integer_type_node,
5526 jumpif (exit_cond, loop_end);
5528 /* Update the loop counter, and jump to the head of
5530 expand_assignment (index,
5531 build2 (PLUS_EXPR, TREE_TYPE (index),
5532 index, integer_one_node),
5535 emit_jump (loop_start);
5537 /* Build the end of the loop. */
5538 emit_label (loop_end);
5541 else if ((index != 0 && ! host_integerp (index, 0))
5542 || ! host_integerp (TYPE_SIZE (elttype), 1))
5547 index = ssize_int (1);
5550 index = fold_convert (ssizetype,
5551 fold_build2 (MINUS_EXPR,
5554 TYPE_MIN_VALUE (domain)));
5557 size_binop (MULT_EXPR, index,
5558 fold_convert (ssizetype,
5559 TYPE_SIZE_UNIT (elttype)));
5560 xtarget = offset_address (target,
5561 expand_normal (position),
5562 highest_pow2_factor (position));
5563 xtarget = adjust_address (xtarget, mode, 0);
5564 store_expr (value, xtarget, 0, false);
5569 bitpos = ((tree_low_cst (index, 0) - minelt)
5570 * tree_low_cst (TYPE_SIZE (elttype), 1));
5572 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5574 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5575 && TREE_CODE (type) == ARRAY_TYPE
5576 && TYPE_NONALIASED_COMPONENT (type))
5578 target = copy_rtx (target);
5579 MEM_KEEP_ALIAS_SET_P (target) = 1;
5581 store_constructor_field (target, bitsize, bitpos, mode, value,
5582 type, cleared, get_alias_set (elttype));
5590 unsigned HOST_WIDE_INT idx;
5591 constructor_elt *ce;
5595 tree elttype = TREE_TYPE (type);
5596 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5597 enum machine_mode eltmode = TYPE_MODE (elttype);
5598 HOST_WIDE_INT bitsize;
5599 HOST_WIDE_INT bitpos;
5600 rtvec vector = NULL;
5602 alias_set_type alias;
5604 gcc_assert (eltmode != BLKmode);
5606 n_elts = TYPE_VECTOR_SUBPARTS (type);
5607 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5609 enum machine_mode mode = GET_MODE (target);
5611 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5612 if (icode != CODE_FOR_nothing)
5616 vector = rtvec_alloc (n_elts);
5617 for (i = 0; i < n_elts; i++)
5618 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5622 /* If the constructor has fewer elements than the vector,
5623 clear the whole array first. Similarly if this is static
5624 constructor of a non-BLKmode object. */
5627 else if (REG_P (target) && TREE_STATIC (exp))
5631 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5634 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5636 int n_elts_here = tree_low_cst
5637 (int_const_binop (TRUNC_DIV_EXPR,
5638 TYPE_SIZE (TREE_TYPE (value)),
5639 TYPE_SIZE (elttype), 0), 1);
5641 count += n_elts_here;
5642 if (mostly_zeros_p (value))
5643 zero_count += n_elts_here;
5646 /* Clear the entire vector first if there are any missing elements,
5647 or if the incidence of zero elements is >= 75%. */
5648 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5651 if (need_to_clear && size > 0 && !vector)
5654 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5656 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5660 /* Inform later passes that the old value is dead. */
5661 if (!cleared && !vector && REG_P (target))
5662 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5665 alias = MEM_ALIAS_SET (target);
5667 alias = get_alias_set (elttype);
5669 /* Store each element of the constructor into the corresponding
5670 element of TARGET, determined by counting the elements. */
5671 for (idx = 0, i = 0;
5672 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5673 idx++, i += bitsize / elt_size)
5675 HOST_WIDE_INT eltpos;
5676 tree value = ce->value;
5678 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5679 if (cleared && initializer_zerop (value))
5683 eltpos = tree_low_cst (ce->index, 1);
5689 /* Vector CONSTRUCTORs should only be built from smaller
5690 vectors in the case of BLKmode vectors. */
5691 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5692 RTVEC_ELT (vector, eltpos)
5693 = expand_normal (value);
5697 enum machine_mode value_mode =
5698 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5699 ? TYPE_MODE (TREE_TYPE (value))
5701 bitpos = eltpos * elt_size;
5702 store_constructor_field (target, bitsize, bitpos,
5703 value_mode, value, type,
5709 emit_insn (GEN_FCN (icode)
5711 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5720 /* Store the value of EXP (an expression tree)
5721 into a subfield of TARGET which has mode MODE and occupies
5722 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5723 If MODE is VOIDmode, it means that we are storing into a bit-field.
5725 Always return const0_rtx unless we have something particular to
5728 TYPE is the type of the underlying object,
5730 ALIAS_SET is the alias set for the destination. This value will
5731 (in general) be different from that for TARGET, since TARGET is a
5732 reference to the containing structure.
5734 If NONTEMPORAL is true, try generating a nontemporal store. */
5737 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5738 enum machine_mode mode, tree exp, tree type,
5739 alias_set_type alias_set, bool nontemporal)
5741 HOST_WIDE_INT width_mask = 0;
5743 if (TREE_CODE (exp) == ERROR_MARK)
5746 /* If we have nothing to store, do nothing unless the expression has
5749 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5750 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5751 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5753 /* If we are storing into an unaligned field of an aligned union that is
5754 in a register, we may have the mode of TARGET being an integer mode but
5755 MODE == BLKmode. In that case, get an aligned object whose size and
5756 alignment are the same as TARGET and store TARGET into it (we can avoid
5757 the store if the field being stored is the entire width of TARGET). Then
5758 call ourselves recursively to store the field into a BLKmode version of
5759 that object. Finally, load from the object into TARGET. This is not
5760 very efficient in general, but should only be slightly more expensive
5761 than the otherwise-required unaligned accesses. Perhaps this can be
5762 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5763 twice, once with emit_move_insn and once via store_field. */
5766 && (REG_P (target) || GET_CODE (target) == SUBREG))
5768 rtx object = assign_temp (type, 0, 1, 1);
5769 rtx blk_object = adjust_address (object, BLKmode, 0);
5771 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5772 emit_move_insn (object, target);
5774 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5777 emit_move_insn (target, object);
5779 /* We want to return the BLKmode version of the data. */
5783 if (GET_CODE (target) == CONCAT)
5785 /* We're storing into a struct containing a single __complex. */
5787 gcc_assert (!bitpos);
5788 return store_expr (exp, target, 0, nontemporal);
5791 /* If the structure is in a register or if the component
5792 is a bit field, we cannot use addressing to access it.
5793 Use bit-field techniques or SUBREG to store in it. */
5795 if (mode == VOIDmode
5796 || (mode != BLKmode && ! direct_store[(int) mode]
5797 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5798 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5800 || GET_CODE (target) == SUBREG
5801 /* If the field isn't aligned enough to store as an ordinary memref,
5802 store it as a bit field. */
5804 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5805 || bitpos % GET_MODE_ALIGNMENT (mode))
5806 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5807 || (bitpos % BITS_PER_UNIT != 0)))
5808 /* If the RHS and field are a constant size and the size of the
5809 RHS isn't the same size as the bitfield, we must use bitfield
5812 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5813 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5818 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5819 implies a mask operation. If the precision is the same size as
5820 the field we're storing into, that mask is redundant. This is
5821 particularly common with bit field assignments generated by the
5823 nop_def = get_def_for_expr (exp, NOP_EXPR);
5826 tree type = TREE_TYPE (exp);
5827 if (INTEGRAL_TYPE_P (type)
5828 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5829 && bitsize == TYPE_PRECISION (type))
5831 tree op = gimple_assign_rhs1 (nop_def);
5832 type = TREE_TYPE (op);
5833 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5838 temp = expand_normal (exp);
5840 /* If BITSIZE is narrower than the size of the type of EXP
5841 we will be narrowing TEMP. Normally, what's wanted are the
5842 low-order bits. However, if EXP's type is a record and this is
5843 big-endian machine, we want the upper BITSIZE bits. */
5844 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5845 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5846 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5847 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5848 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5852 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5854 if (mode != VOIDmode && mode != BLKmode
5855 && mode != TYPE_MODE (TREE_TYPE (exp)))
5856 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5858 /* If the modes of TEMP and TARGET are both BLKmode, both
5859 must be in memory and BITPOS must be aligned on a byte
5860 boundary. If so, we simply do a block copy. Likewise
5861 for a BLKmode-like TARGET. */
5862 if (GET_MODE (temp) == BLKmode
5863 && (GET_MODE (target) == BLKmode
5865 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5866 && (bitpos % BITS_PER_UNIT) == 0
5867 && (bitsize % BITS_PER_UNIT) == 0)))
5869 gcc_assert (MEM_P (target) && MEM_P (temp)
5870 && (bitpos % BITS_PER_UNIT) == 0);
5872 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5873 emit_block_move (target, temp,
5874 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5881 /* Store the value in the bitfield. */
5882 store_bit_field (target, bitsize, bitpos, mode, temp);
5888 /* Now build a reference to just the desired component. */
5889 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5891 if (to_rtx == target)
5892 to_rtx = copy_rtx (to_rtx);
5894 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5895 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5896 set_mem_alias_set (to_rtx, alias_set);
5898 return store_expr (exp, to_rtx, 0, nontemporal);
5902 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5903 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5904 codes and find the ultimate containing object, which we return.
5906 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5907 bit position, and *PUNSIGNEDP to the signedness of the field.
5908 If the position of the field is variable, we store a tree
5909 giving the variable offset (in units) in *POFFSET.
5910 This offset is in addition to the bit position.
5911 If the position is not variable, we store 0 in *POFFSET.
5913 If any of the extraction expressions is volatile,
5914 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5916 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5917 Otherwise, it is a mode that can be used to access the field.
5919 If the field describes a variable-sized object, *PMODE is set to
5920 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5921 this case, but the address of the object can be found.
5923 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5924 look through nodes that serve as markers of a greater alignment than
5925 the one that can be deduced from the expression. These nodes make it
5926 possible for front-ends to prevent temporaries from being created by
5927 the middle-end on alignment considerations. For that purpose, the
5928 normal operating mode at high-level is to always pass FALSE so that
5929 the ultimate containing object is really returned; moreover, the
5930 associated predicate handled_component_p will always return TRUE
5931 on these nodes, thus indicating that they are essentially handled
5932 by get_inner_reference. TRUE should only be passed when the caller
5933 is scanning the expression in order to build another representation
5934 and specifically knows how to handle these nodes; as such, this is
5935 the normal operating mode in the RTL expanders. */
5938 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5939 HOST_WIDE_INT *pbitpos, tree *poffset,
5940 enum machine_mode *pmode, int *punsignedp,
5941 int *pvolatilep, bool keep_aligning)
5944 enum machine_mode mode = VOIDmode;
5945 bool blkmode_bitfield = false;
5946 tree offset = size_zero_node;
5947 tree bit_offset = bitsize_zero_node;
5949 /* First get the mode, signedness, and size. We do this from just the
5950 outermost expression. */
5951 if (TREE_CODE (exp) == COMPONENT_REF)
5953 tree field = TREE_OPERAND (exp, 1);
5954 size_tree = DECL_SIZE (field);
5955 if (!DECL_BIT_FIELD (field))
5956 mode = DECL_MODE (field);
5957 else if (DECL_MODE (field) == BLKmode)
5958 blkmode_bitfield = true;
5960 *punsignedp = DECL_UNSIGNED (field);
5962 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5964 size_tree = TREE_OPERAND (exp, 1);
5965 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5966 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5968 /* For vector types, with the correct size of access, use the mode of
5970 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5971 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5972 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5973 mode = TYPE_MODE (TREE_TYPE (exp));
5977 mode = TYPE_MODE (TREE_TYPE (exp));
5978 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5980 if (mode == BLKmode)
5981 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5983 *pbitsize = GET_MODE_BITSIZE (mode);
5988 if (! host_integerp (size_tree, 1))
5989 mode = BLKmode, *pbitsize = -1;
5991 *pbitsize = tree_low_cst (size_tree, 1);
5994 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5995 and find the ultimate containing object. */
5998 switch (TREE_CODE (exp))
6001 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6002 TREE_OPERAND (exp, 2));
6007 tree field = TREE_OPERAND (exp, 1);
6008 tree this_offset = component_ref_field_offset (exp);
6010 /* If this field hasn't been filled in yet, don't go past it.
6011 This should only happen when folding expressions made during
6012 type construction. */
6013 if (this_offset == 0)
6016 offset = size_binop (PLUS_EXPR, offset, this_offset);
6017 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6018 DECL_FIELD_BIT_OFFSET (field));
6020 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
6025 case ARRAY_RANGE_REF:
6027 tree index = TREE_OPERAND (exp, 1);
6028 tree low_bound = array_ref_low_bound (exp);
6029 tree unit_size = array_ref_element_size (exp);
6031 /* We assume all arrays have sizes that are a multiple of a byte.
6032 First subtract the lower bound, if any, in the type of the
6033 index, then convert to sizetype and multiply by the size of
6034 the array element. */
6035 if (! integer_zerop (low_bound))
6036 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6039 offset = size_binop (PLUS_EXPR, offset,
6040 size_binop (MULT_EXPR,
6041 fold_convert (sizetype, index),
6050 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6051 bitsize_int (*pbitsize));
6054 case VIEW_CONVERT_EXPR:
6055 if (keep_aligning && STRICT_ALIGNMENT
6056 && (TYPE_ALIGN (TREE_TYPE (exp))
6057 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6058 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6059 < BIGGEST_ALIGNMENT)
6060 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6061 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6069 /* If any reference in the chain is volatile, the effect is volatile. */
6070 if (TREE_THIS_VOLATILE (exp))
6073 exp = TREE_OPERAND (exp, 0);
6077 /* If OFFSET is constant, see if we can return the whole thing as a
6078 constant bit position. Make sure to handle overflow during
6080 if (host_integerp (offset, 0))
6082 double_int tem = double_int_mul (tree_to_double_int (offset),
6083 uhwi_to_double_int (BITS_PER_UNIT));
6084 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6085 if (double_int_fits_in_shwi_p (tem))
6087 *pbitpos = double_int_to_shwi (tem);
6088 *poffset = offset = NULL_TREE;
6092 /* Otherwise, split it up. */
6095 *pbitpos = tree_low_cst (bit_offset, 0);
6099 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6100 if (mode == VOIDmode
6102 && (*pbitpos % BITS_PER_UNIT) == 0
6103 && (*pbitsize % BITS_PER_UNIT) == 0)
6111 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6112 ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6113 EXP is marked as PACKED. */
6116 contains_packed_reference (const_tree exp)
6118 bool packed_p = false;
6122 switch (TREE_CODE (exp))
6126 tree field = TREE_OPERAND (exp, 1);
6127 packed_p = DECL_PACKED (field)
6128 || TYPE_PACKED (TREE_TYPE (field))
6129 || TYPE_PACKED (TREE_TYPE (exp));
6137 case ARRAY_RANGE_REF:
6140 case VIEW_CONVERT_EXPR:
6146 exp = TREE_OPERAND (exp, 0);
6152 /* Return a tree of sizetype representing the size, in bytes, of the element
6153 of EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6156 array_ref_element_size (tree exp)
6158 tree aligned_size = TREE_OPERAND (exp, 3);
6159 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6160 location_t loc = EXPR_LOCATION (exp);
6162 /* If a size was specified in the ARRAY_REF, it's the size measured
6163 in alignment units of the element type. So multiply by that value. */
6166 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6167 sizetype from another type of the same width and signedness. */
6168 if (TREE_TYPE (aligned_size) != sizetype)
6169 aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6170 return size_binop_loc (loc, MULT_EXPR, aligned_size,
6171 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6174 /* Otherwise, take the size from that of the element type. Substitute
6175 any PLACEHOLDER_EXPR that we have. */
6177 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6180 /* Return a tree representing the lower bound of the array mentioned in
6181 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6184 array_ref_low_bound (tree exp)
6186 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6188 /* If a lower bound is specified in EXP, use it. */
6189 if (TREE_OPERAND (exp, 2))
6190 return TREE_OPERAND (exp, 2);
6192 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6193 substituting for a PLACEHOLDER_EXPR as needed. */
6194 if (domain_type && TYPE_MIN_VALUE (domain_type))
6195 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6197 /* Otherwise, return a zero of the appropriate type. */
6198 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6201 /* Return a tree representing the upper bound of the array mentioned in
6202 EXP, an ARRAY_REF or an ARRAY_RANGE_REF. */
6205 array_ref_up_bound (tree exp)
6207 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6209 /* If there is a domain type and it has an upper bound, use it, substituting
6210 for a PLACEHOLDER_EXPR as needed. */
6211 if (domain_type && TYPE_MAX_VALUE (domain_type))
6212 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6214 /* Otherwise fail. */
6218 /* Return a tree representing the offset, in bytes, of the field referenced
6219 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6222 component_ref_field_offset (tree exp)
6224 tree aligned_offset = TREE_OPERAND (exp, 2);
6225 tree field = TREE_OPERAND (exp, 1);
6226 location_t loc = EXPR_LOCATION (exp);
6228 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6229 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6233 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6234 sizetype from another type of the same width and signedness. */
6235 if (TREE_TYPE (aligned_offset) != sizetype)
6236 aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6237 return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6238 size_int (DECL_OFFSET_ALIGN (field)
6242 /* Otherwise, take the offset from that of the field. Substitute
6243 any PLACEHOLDER_EXPR that we have. */
6245 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6248 /* Alignment in bits the TARGET of an assignment may be assumed to have. */
6250 static unsigned HOST_WIDE_INT
6251 target_align (const_tree target)
6253 /* We might have a chain of nested references with intermediate misaligning
6254 bitfields components, so need to recurse to find out. */
6256 unsigned HOST_WIDE_INT this_align, outer_align;
6258 switch (TREE_CODE (target))
6264 this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6265 outer_align = target_align (TREE_OPERAND (target, 0));
6266 return MIN (this_align, outer_align);
6269 case ARRAY_RANGE_REF:
6270 this_align = TYPE_ALIGN (TREE_TYPE (target));
6271 outer_align = target_align (TREE_OPERAND (target, 0));
6272 return MIN (this_align, outer_align);
6275 case NON_LVALUE_EXPR:
6276 case VIEW_CONVERT_EXPR:
6277 this_align = TYPE_ALIGN (TREE_TYPE (target));
6278 outer_align = target_align (TREE_OPERAND (target, 0));
6279 return MAX (this_align, outer_align);
6282 return TYPE_ALIGN (TREE_TYPE (target));
6287 /* Given an rtx VALUE that may contain additions and multiplications, return
6288 an equivalent value that just refers to a register, memory, or constant.
6289 This is done by generating instructions to perform the arithmetic and
6290 returning a pseudo-register containing the value.
6292 The returned value may be a REG, SUBREG, MEM or constant. */
6295 force_operand (rtx value, rtx target)
6298 /* Use subtarget as the target for operand 0 of a binary operation. */
6299 rtx subtarget = get_subtarget (target);
6300 enum rtx_code code = GET_CODE (value);
6302 /* Check for subreg applied to an expression produced by loop optimizer. */
6304 && !REG_P (SUBREG_REG (value))
6305 && !MEM_P (SUBREG_REG (value)))
6308 = simplify_gen_subreg (GET_MODE (value),
6309 force_reg (GET_MODE (SUBREG_REG (value)),
6310 force_operand (SUBREG_REG (value),
6312 GET_MODE (SUBREG_REG (value)),
6313 SUBREG_BYTE (value));
6314 code = GET_CODE (value);
6317 /* Check for a PIC address load. */
6318 if ((code == PLUS || code == MINUS)
6319 && XEXP (value, 0) == pic_offset_table_rtx
6320 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6321 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6322 || GET_CODE (XEXP (value, 1)) == CONST))
6325 subtarget = gen_reg_rtx (GET_MODE (value));
6326 emit_move_insn (subtarget, value);
6330 if (ARITHMETIC_P (value))
6332 op2 = XEXP (value, 1);
6333 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6335 if (code == MINUS && CONST_INT_P (op2))
6338 op2 = negate_rtx (GET_MODE (value), op2);
6341 /* Check for an addition with OP2 a constant integer and our first
6342 operand a PLUS of a virtual register and something else. In that
6343 case, we want to emit the sum of the virtual register and the
6344 constant first and then add the other value. This allows virtual
6345 register instantiation to simply modify the constant rather than
6346 creating another one around this addition. */
6347 if (code == PLUS && CONST_INT_P (op2)
6348 && GET_CODE (XEXP (value, 0)) == PLUS
6349 && REG_P (XEXP (XEXP (value, 0), 0))
6350 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6351 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6353 rtx temp = expand_simple_binop (GET_MODE (value), code,
6354 XEXP (XEXP (value, 0), 0), op2,
6355 subtarget, 0, OPTAB_LIB_WIDEN);
6356 return expand_simple_binop (GET_MODE (value), code, temp,
6357 force_operand (XEXP (XEXP (value,
6359 target, 0, OPTAB_LIB_WIDEN);
6362 op1 = force_operand (XEXP (value, 0), subtarget);
6363 op2 = force_operand (op2, NULL_RTX);
6367 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6369 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6370 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6371 target, 1, OPTAB_LIB_WIDEN);
6373 return expand_divmod (0,
6374 FLOAT_MODE_P (GET_MODE (value))
6375 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6376 GET_MODE (value), op1, op2, target, 0);
6378 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6381 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6384 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6387 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6388 target, 0, OPTAB_LIB_WIDEN);
6390 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6391 target, 1, OPTAB_LIB_WIDEN);
6394 if (UNARY_P (value))
6397 target = gen_reg_rtx (GET_MODE (value));
6398 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6405 case FLOAT_TRUNCATE:
6406 convert_move (target, op1, code == ZERO_EXTEND);
6411 expand_fix (target, op1, code == UNSIGNED_FIX);
6415 case UNSIGNED_FLOAT:
6416 expand_float (target, op1, code == UNSIGNED_FLOAT);
6420 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6424 #ifdef INSN_SCHEDULING
6425 /* On machines that have insn scheduling, we want all memory reference to be
6426 explicit, so we need to deal with such paradoxical SUBREGs. */
6427 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6428 && (GET_MODE_SIZE (GET_MODE (value))
6429 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6431 = simplify_gen_subreg (GET_MODE (value),
6432 force_reg (GET_MODE (SUBREG_REG (value)),
6433 force_operand (SUBREG_REG (value),
6435 GET_MODE (SUBREG_REG (value)),
6436 SUBREG_BYTE (value));
6442 /* Subroutine of expand_expr: return nonzero iff there is no way that
6443 EXP can reference X, which is being modified. TOP_P is nonzero if this
6444 call is going to be used to determine whether we need a temporary
6445 for EXP, as opposed to a recursive call to this function.
6447 It is always safe for this routine to return zero since it merely
6448 searches for optimization opportunities. */
6451 safe_from_p (const_rtx x, tree exp, int top_p)
6457 /* If EXP has varying size, we MUST use a target since we currently
6458 have no way of allocating temporaries of variable size
6459 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6460 So we assume here that something at a higher level has prevented a
6461 clash. This is somewhat bogus, but the best we can do. Only
6462 do this when X is BLKmode and when we are at the top level. */
6463 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6464 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6465 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6466 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6467 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6469 && GET_MODE (x) == BLKmode)
6470 /* If X is in the outgoing argument area, it is always safe. */
6472 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6473 || (GET_CODE (XEXP (x, 0)) == PLUS
6474 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6477 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6478 find the underlying pseudo. */
6479 if (GET_CODE (x) == SUBREG)
6482 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6486 /* Now look at our tree code and possibly recurse. */
6487 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6489 case tcc_declaration:
6490 exp_rtl = DECL_RTL_IF_SET (exp);
6496 case tcc_exceptional:
6497 if (TREE_CODE (exp) == TREE_LIST)
6501 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6503 exp = TREE_CHAIN (exp);
6506 if (TREE_CODE (exp) != TREE_LIST)
6507 return safe_from_p (x, exp, 0);
6510 else if (TREE_CODE (exp) == CONSTRUCTOR)
6512 constructor_elt *ce;
6513 unsigned HOST_WIDE_INT idx;
6516 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6518 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6519 || !safe_from_p (x, ce->value, 0))
6523 else if (TREE_CODE (exp) == ERROR_MARK)
6524 return 1; /* An already-visited SAVE_EXPR? */
6529 /* The only case we look at here is the DECL_INITIAL inside a
6531 return (TREE_CODE (exp) != DECL_EXPR
6532 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6533 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6534 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6537 case tcc_comparison:
6538 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6543 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6545 case tcc_expression:
6548 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6549 the expression. If it is set, we conflict iff we are that rtx or
6550 both are in memory. Otherwise, we check all operands of the
6551 expression recursively. */
6553 switch (TREE_CODE (exp))
6556 /* If the operand is static or we are static, we can't conflict.
6557 Likewise if we don't conflict with the operand at all. */
6558 if (staticp (TREE_OPERAND (exp, 0))
6559 || TREE_STATIC (exp)
6560 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6563 /* Otherwise, the only way this can conflict is if we are taking
6564 the address of a DECL a that address if part of X, which is
6566 exp = TREE_OPERAND (exp, 0);
6569 if (!DECL_RTL_SET_P (exp)
6570 || !MEM_P (DECL_RTL (exp)))
6573 exp_rtl = XEXP (DECL_RTL (exp), 0);
6577 case MISALIGNED_INDIRECT_REF:
6578 case ALIGN_INDIRECT_REF:
6581 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6582 get_alias_set (exp)))
6587 /* Assume that the call will clobber all hard registers and
6589 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6594 case WITH_CLEANUP_EXPR:
6595 case CLEANUP_POINT_EXPR:
6596 /* Lowered by gimplify.c. */
6600 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6606 /* If we have an rtx, we do not need to scan our operands. */
6610 nops = TREE_OPERAND_LENGTH (exp);
6611 for (i = 0; i < nops; i++)
6612 if (TREE_OPERAND (exp, i) != 0
6613 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6619 /* Should never get a type here. */
6623 /* If we have an rtl, find any enclosed object. Then see if we conflict
6627 if (GET_CODE (exp_rtl) == SUBREG)
6629 exp_rtl = SUBREG_REG (exp_rtl);
6631 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6635 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6636 are memory and they conflict. */
6637 return ! (rtx_equal_p (x, exp_rtl)
6638 || (MEM_P (x) && MEM_P (exp_rtl)
6639 && true_dependence (exp_rtl, VOIDmode, x,
6640 rtx_addr_varies_p)));
6643 /* If we reach here, it is safe. */
6648 /* Return the highest power of two that EXP is known to be a multiple of.
6649 This is used in updating alignment of MEMs in array references. */
6651 unsigned HOST_WIDE_INT
6652 highest_pow2_factor (const_tree exp)
6654 unsigned HOST_WIDE_INT c0, c1;
6656 switch (TREE_CODE (exp))
6659 /* We can find the lowest bit that's a one. If the low
6660 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6661 We need to handle this case since we can find it in a COND_EXPR,
6662 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6663 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6665 if (TREE_OVERFLOW (exp))
6666 return BIGGEST_ALIGNMENT;
6669 /* Note: tree_low_cst is intentionally not used here,
6670 we don't care about the upper bits. */
6671 c0 = TREE_INT_CST_LOW (exp);
6673 return c0 ? c0 : BIGGEST_ALIGNMENT;
6677 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6678 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6679 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6680 return MIN (c0, c1);
6683 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6684 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6687 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6689 if (integer_pow2p (TREE_OPERAND (exp, 1))
6690 && host_integerp (TREE_OPERAND (exp, 1), 1))
6692 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6693 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6694 return MAX (1, c0 / c1);
6699 /* The highest power of two of a bit-and expression is the maximum of
6700 that of its operands. We typically get here for a complex LHS and
6701 a constant negative power of two on the RHS to force an explicit
6702 alignment, so don't bother looking at the LHS. */
6703 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6707 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6710 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6713 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6714 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6715 return MIN (c0, c1);
6724 /* Similar, except that the alignment requirements of TARGET are
6725 taken into account. Assume it is at least as aligned as its
6726 type, unless it is a COMPONENT_REF in which case the layout of
6727 the structure gives the alignment. */
6729 static unsigned HOST_WIDE_INT
6730 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6732 unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6733 unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6735 return MAX (factor, talign);
6738 /* Return &VAR expression for emulated thread local VAR. */
6741 emutls_var_address (tree var)
6743 tree emuvar = emutls_decl (var);
6744 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6745 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6746 tree arglist = build_tree_list (NULL_TREE, arg);
6747 tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6748 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6752 /* Subroutine of expand_expr. Expand the two operands of a binary
6753 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6754 The value may be stored in TARGET if TARGET is nonzero. The
6755 MODIFIER argument is as documented by expand_expr. */
6758 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6759 enum expand_modifier modifier)
6761 if (! safe_from_p (target, exp1, 1))
6763 if (operand_equal_p (exp0, exp1, 0))
6765 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6766 *op1 = copy_rtx (*op0);
6770 /* If we need to preserve evaluation order, copy exp0 into its own
6771 temporary variable so that it can't be clobbered by exp1. */
6772 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6773 exp0 = save_expr (exp0);
6774 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6775 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6780 /* Return a MEM that contains constant EXP. DEFER is as for
6781 output_constant_def and MODIFIER is as for expand_expr. */
6784 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6788 mem = output_constant_def (exp, defer);
6789 if (modifier != EXPAND_INITIALIZER)
6790 mem = use_anchored_address (mem);
6794 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6795 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6798 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6799 enum expand_modifier modifier)
6801 rtx result, subtarget;
6803 HOST_WIDE_INT bitsize, bitpos;
6804 int volatilep, unsignedp;
6805 enum machine_mode mode1;
6807 /* If we are taking the address of a constant and are at the top level,
6808 we have to use output_constant_def since we can't call force_const_mem
6810 /* ??? This should be considered a front-end bug. We should not be
6811 generating ADDR_EXPR of something that isn't an LVALUE. The only
6812 exception here is STRING_CST. */
6813 if (CONSTANT_CLASS_P (exp))
6814 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6816 /* Everything must be something allowed by is_gimple_addressable. */
6817 switch (TREE_CODE (exp))
6820 /* This case will happen via recursion for &a->b. */
6821 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6824 /* Recurse and make the output_constant_def clause above handle this. */
6825 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6829 /* The real part of the complex number is always first, therefore
6830 the address is the same as the address of the parent object. */
6833 inner = TREE_OPERAND (exp, 0);
6837 /* The imaginary part of the complex number is always second.
6838 The expression is therefore always offset by the size of the
6841 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6842 inner = TREE_OPERAND (exp, 0);
6846 /* TLS emulation hook - replace __thread VAR's &VAR with
6847 __emutls_get_address (&_emutls.VAR). */
6848 if (! targetm.have_tls
6849 && TREE_CODE (exp) == VAR_DECL
6850 && DECL_THREAD_LOCAL_P (exp))
6852 exp = emutls_var_address (exp);
6853 return expand_expr (exp, target, tmode, modifier);
6858 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6859 expand_expr, as that can have various side effects; LABEL_DECLs for
6860 example, may not have their DECL_RTL set yet. Expand the rtl of
6861 CONSTRUCTORs too, which should yield a memory reference for the
6862 constructor's contents. Assume language specific tree nodes can
6863 be expanded in some interesting way. */
6864 gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
6866 || TREE_CODE (exp) == CONSTRUCTOR
6867 || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
6869 result = expand_expr (exp, target, tmode,
6870 modifier == EXPAND_INITIALIZER
6871 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6873 /* If the DECL isn't in memory, then the DECL wasn't properly
6874 marked TREE_ADDRESSABLE, which will be either a front-end
6875 or a tree optimizer bug. */
6876 gcc_assert (MEM_P (result));
6877 result = XEXP (result, 0);
6879 /* ??? Is this needed anymore? */
6880 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6882 assemble_external (exp);
6883 TREE_USED (exp) = 1;
6886 if (modifier != EXPAND_INITIALIZER
6887 && modifier != EXPAND_CONST_ADDRESS)
6888 result = force_operand (result, target);
6892 /* Pass FALSE as the last argument to get_inner_reference although
6893 we are expanding to RTL. The rationale is that we know how to
6894 handle "aligning nodes" here: we can just bypass them because
6895 they won't change the final object whose address will be returned
6896 (they actually exist only for that purpose). */
6897 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6898 &mode1, &unsignedp, &volatilep, false);
6902 /* We must have made progress. */
6903 gcc_assert (inner != exp);
6905 subtarget = offset || bitpos ? NULL_RTX : target;
6906 /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
6907 inner alignment, force the inner to be sufficiently aligned. */
6908 if (CONSTANT_CLASS_P (inner)
6909 && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
6911 inner = copy_node (inner);
6912 TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
6913 TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
6914 TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
6916 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6922 if (modifier != EXPAND_NORMAL)
6923 result = force_operand (result, NULL);
6924 tmp = expand_expr (offset, NULL_RTX, tmode,
6925 modifier == EXPAND_INITIALIZER
6926 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6928 result = convert_memory_address (tmode, result);
6929 tmp = convert_memory_address (tmode, tmp);
6931 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6932 result = gen_rtx_PLUS (tmode, result, tmp);
6935 subtarget = bitpos ? NULL_RTX : target;
6936 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6937 1, OPTAB_LIB_WIDEN);
6943 /* Someone beforehand should have rejected taking the address
6944 of such an object. */
6945 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6947 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6948 if (modifier < EXPAND_SUM)
6949 result = force_operand (result, target);
6955 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6956 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6959 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6960 enum expand_modifier modifier)
6962 enum machine_mode rmode;
6965 /* Target mode of VOIDmode says "whatever's natural". */
6966 if (tmode == VOIDmode)
6967 tmode = TYPE_MODE (TREE_TYPE (exp));
6969 /* We can get called with some Weird Things if the user does silliness
6970 like "(short) &a". In that case, convert_memory_address won't do
6971 the right thing, so ignore the given target mode. */
6972 if (tmode != Pmode && tmode != ptr_mode)
6975 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6978 /* Despite expand_expr claims concerning ignoring TMODE when not
6979 strictly convenient, stuff breaks if we don't honor it. Note
6980 that combined with the above, we only do this for pointer modes. */
6981 rmode = GET_MODE (result);
6982 if (rmode == VOIDmode)
6985 result = convert_memory_address (tmode, result);
6990 /* Generate code for computing CONSTRUCTOR EXP.
6991 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6992 is TRUE, instead of creating a temporary variable in memory
6993 NULL is returned and the caller needs to handle it differently. */
6996 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6997 bool avoid_temp_mem)
6999 tree type = TREE_TYPE (exp);
7000 enum machine_mode mode = TYPE_MODE (type);
7002 /* Try to avoid creating a temporary at all. This is possible
7003 if all of the initializer is zero.
7004 FIXME: try to handle all [0..255] initializers we can handle
7006 if (TREE_STATIC (exp)
7007 && !TREE_ADDRESSABLE (exp)
7008 && target != 0 && mode == BLKmode
7009 && all_zeros_p (exp))
7011 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7015 /* All elts simple constants => refer to a constant in memory. But
7016 if this is a non-BLKmode mode, let it store a field at a time
7017 since that should make a CONST_INT or CONST_DOUBLE when we
7018 fold. Likewise, if we have a target we can use, it is best to
7019 store directly into the target unless the type is large enough
7020 that memcpy will be used. If we are making an initializer and
7021 all operands are constant, put it in memory as well.
7023 FIXME: Avoid trying to fill vector constructors piece-meal.
7024 Output them with output_constant_def below unless we're sure
7025 they're zeros. This should go away when vector initializers
7026 are treated like VECTOR_CST instead of arrays. */
7027 if ((TREE_STATIC (exp)
7028 && ((mode == BLKmode
7029 && ! (target != 0 && safe_from_p (target, exp, 1)))
7030 || TREE_ADDRESSABLE (exp)
7031 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7032 && (! MOVE_BY_PIECES_P
7033 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7035 && ! mostly_zeros_p (exp))))
7036 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7037 && TREE_CONSTANT (exp)))
7044 constructor = expand_expr_constant (exp, 1, modifier);
7046 if (modifier != EXPAND_CONST_ADDRESS
7047 && modifier != EXPAND_INITIALIZER
7048 && modifier != EXPAND_SUM)
7049 constructor = validize_mem (constructor);
7054 /* Handle calls that pass values in multiple non-contiguous
7055 locations. The Irix 6 ABI has examples of this. */
7056 if (target == 0 || ! safe_from_p (target, exp, 1)
7057 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7063 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7064 | (TREE_READONLY (exp)
7065 * TYPE_QUAL_CONST))),
7066 0, TREE_ADDRESSABLE (exp), 1);
7069 store_constructor (exp, target, 0, int_expr_size (exp));
7074 /* expand_expr: generate code for computing expression EXP.
7075 An rtx for the computed value is returned. The value is never null.
7076 In the case of a void EXP, const0_rtx is returned.
7078 The value may be stored in TARGET if TARGET is nonzero.
7079 TARGET is just a suggestion; callers must assume that
7080 the rtx returned may not be the same as TARGET.
7082 If TARGET is CONST0_RTX, it means that the value will be ignored.
7084 If TMODE is not VOIDmode, it suggests generating the
7085 result in mode TMODE. But this is done only when convenient.
7086 Otherwise, TMODE is ignored and the value generated in its natural mode.
7087 TMODE is just a suggestion; callers must assume that
7088 the rtx returned may not have mode TMODE.
7090 Note that TARGET may have neither TMODE nor MODE. In that case, it
7091 probably will not be used.
7093 If MODIFIER is EXPAND_SUM then when EXP is an addition
7094 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7095 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7096 products as above, or REG or MEM, or constant.
7097 Ordinarily in such cases we would output mul or add instructions
7098 and then return a pseudo reg containing the sum.
7100 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7101 it also marks a label as absolutely required (it can't be dead).
7102 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7103 This is used for outputting expressions used in initializers.
7105 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7106 with a constant address even if that address is not normally legitimate.
7107 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7109 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7110 a call parameter. Such targets require special care as we haven't yet
7111 marked TARGET so that it's safe from being trashed by libcalls. We
7112 don't want to use TARGET for anything but the final result;
7113 Intermediate values must go elsewhere. Additionally, calls to
7114 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7116 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7117 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7118 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7119 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7123 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7124 enum expand_modifier modifier, rtx *alt_rtl)
7127 rtx ret, last = NULL;
7129 /* Handle ERROR_MARK before anybody tries to access its type. */
7130 if (TREE_CODE (exp) == ERROR_MARK
7131 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7133 ret = CONST0_RTX (tmode);
7134 return ret ? ret : const0_rtx;
7137 if (flag_non_call_exceptions)
7139 lp_nr = lookup_expr_eh_lp (exp);
7141 last = get_last_insn ();
7144 /* If this is an expression of some kind and it has an associated line
7145 number, then emit the line number before expanding the expression.
7147 We need to save and restore the file and line information so that
7148 errors discovered during expansion are emitted with the right
7149 information. It would be better of the diagnostic routines
7150 used the file/line information embedded in the tree nodes rather
7152 if (cfun && EXPR_HAS_LOCATION (exp))
7154 location_t saved_location = input_location;
7155 input_location = EXPR_LOCATION (exp);
7156 set_curr_insn_source_location (input_location);
7158 /* Record where the insns produced belong. */
7159 set_curr_insn_block (TREE_BLOCK (exp));
7161 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7163 input_location = saved_location;
7167 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7170 /* If using non-call exceptions, mark all insns that may trap.
7171 expand_call() will mark CALL_INSNs before we get to this code,
7172 but it doesn't handle libcalls, and these may trap. */
7176 for (insn = next_real_insn (last); insn;
7177 insn = next_real_insn (insn))
7179 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7180 /* If we want exceptions for non-call insns, any
7181 may_trap_p instruction may throw. */
7182 && GET_CODE (PATTERN (insn)) != CLOBBER
7183 && GET_CODE (PATTERN (insn)) != USE
7184 && insn_could_throw_p (insn))
7185 make_reg_eh_region_note (insn, 0, lp_nr);
7193 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7194 enum expand_modifier modifier)
7196 rtx op0, op1, op2, temp;
7199 enum machine_mode mode;
7200 enum tree_code code = ops->code;
7202 rtx subtarget, original_target;
7204 tree subexp0, subexp1;
7205 bool reduce_bit_field;
7206 gimple subexp0_def, subexp1_def;
7208 location_t loc = ops->location;
7209 tree treeop0, treeop1, treeop2;
7210 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7211 ? reduce_to_bit_field_precision ((expr), \
7217 mode = TYPE_MODE (type);
7218 unsignedp = TYPE_UNSIGNED (type);
7224 /* We should be called only on simple (binary or unary) expressions,
7225 exactly those that are valid in gimple expressions that aren't
7226 GIMPLE_SINGLE_RHS (or invalid). */
7227 gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7228 || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7230 ignore = (target == const0_rtx
7231 || ((CONVERT_EXPR_CODE_P (code)
7232 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7233 && TREE_CODE (type) == VOID_TYPE));
7235 /* We should be called only if we need the result. */
7236 gcc_assert (!ignore);
7238 /* An operation in what may be a bit-field type needs the
7239 result to be reduced to the precision of the bit-field type,
7240 which is narrower than that of the type's mode. */
7241 reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7242 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7244 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7247 /* Use subtarget as the target for operand 0 of a binary operation. */
7248 subtarget = get_subtarget (target);
7249 original_target = target;
7253 case NON_LVALUE_EXPR:
7256 if (treeop0 == error_mark_node)
7259 if (TREE_CODE (type) == UNION_TYPE)
7261 tree valtype = TREE_TYPE (treeop0);
7263 /* If both input and output are BLKmode, this conversion isn't doing
7264 anything except possibly changing memory attribute. */
7265 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7267 rtx result = expand_expr (treeop0, target, tmode,
7270 result = copy_rtx (result);
7271 set_mem_attributes (result, type, 0);
7277 if (TYPE_MODE (type) != BLKmode)
7278 target = gen_reg_rtx (TYPE_MODE (type));
7280 target = assign_temp (type, 0, 1, 1);
7284 /* Store data into beginning of memory target. */
7285 store_expr (treeop0,
7286 adjust_address (target, TYPE_MODE (valtype), 0),
7287 modifier == EXPAND_STACK_PARM,
7292 gcc_assert (REG_P (target));
7294 /* Store this field into a union of the proper type. */
7295 store_field (target,
7296 MIN ((int_size_in_bytes (TREE_TYPE
7299 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7300 0, TYPE_MODE (valtype), treeop0,
7304 /* Return the entire union. */
7308 if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7310 op0 = expand_expr (treeop0, target, VOIDmode,
7313 /* If the signedness of the conversion differs and OP0 is
7314 a promoted SUBREG, clear that indication since we now
7315 have to do the proper extension. */
7316 if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7317 && GET_CODE (op0) == SUBREG)
7318 SUBREG_PROMOTED_VAR_P (op0) = 0;
7320 return REDUCE_BIT_FIELD (op0);
7323 op0 = expand_expr (treeop0, NULL_RTX, mode,
7324 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7325 if (GET_MODE (op0) == mode)
7328 /* If OP0 is a constant, just convert it into the proper mode. */
7329 else if (CONSTANT_P (op0))
7331 tree inner_type = TREE_TYPE (treeop0);
7332 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7334 if (modifier == EXPAND_INITIALIZER)
7335 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7336 subreg_lowpart_offset (mode,
7339 op0= convert_modes (mode, inner_mode, op0,
7340 TYPE_UNSIGNED (inner_type));
7343 else if (modifier == EXPAND_INITIALIZER)
7344 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7346 else if (target == 0)
7347 op0 = convert_to_mode (mode, op0,
7348 TYPE_UNSIGNED (TREE_TYPE
7352 convert_move (target, op0,
7353 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7357 return REDUCE_BIT_FIELD (op0);
7359 case ADDR_SPACE_CONVERT_EXPR:
7361 tree treeop0_type = TREE_TYPE (treeop0);
7363 addr_space_t as_from;
7365 gcc_assert (POINTER_TYPE_P (type));
7366 gcc_assert (POINTER_TYPE_P (treeop0_type));
7368 as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7369 as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7371 /* Conversions between pointers to the same address space should
7372 have been implemented via CONVERT_EXPR / NOP_EXPR. */
7373 gcc_assert (as_to != as_from);
7375 /* Ask target code to handle conversion between pointers
7376 to overlapping address spaces. */
7377 if (targetm.addr_space.subset_p (as_to, as_from)
7378 || targetm.addr_space.subset_p (as_from, as_to))
7380 op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7381 op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7386 /* For disjoint address spaces, converting anything but
7387 a null pointer invokes undefined behaviour. We simply
7388 always return a null pointer here. */
7389 return CONST0_RTX (mode);
7392 case POINTER_PLUS_EXPR:
7393 /* Even though the sizetype mode and the pointer's mode can be different
7394 expand is able to handle this correctly and get the correct result out
7395 of the PLUS_EXPR code. */
7396 /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7397 if sizetype precision is smaller than pointer precision. */
7398 if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7399 treeop1 = fold_convert_loc (loc, type,
7400 fold_convert_loc (loc, ssizetype,
7404 /* Check if this is a case for multiplication and addition. */
7405 if ((TREE_CODE (type) == INTEGER_TYPE
7406 || TREE_CODE (type) == FIXED_POINT_TYPE)
7407 && (subexp0_def = get_def_for_expr (treeop0,
7410 tree subsubexp0, subsubexp1;
7411 gimple subsubexp0_def, subsubexp1_def;
7412 enum tree_code this_code;
7414 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7415 : FIXED_CONVERT_EXPR;
7416 subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7417 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7418 subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7419 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7420 if (subsubexp0_def && subsubexp1_def
7421 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7422 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7423 && (TYPE_PRECISION (TREE_TYPE (top0))
7424 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7425 && (TYPE_PRECISION (TREE_TYPE (top0))
7426 == TYPE_PRECISION (TREE_TYPE (top1)))
7427 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7428 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7430 tree op0type = TREE_TYPE (top0);
7431 enum machine_mode innermode = TYPE_MODE (op0type);
7432 bool zextend_p = TYPE_UNSIGNED (op0type);
7433 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7435 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7437 this_optab = zextend_p ? usmadd_widen_optab
7438 : ssmadd_widen_optab;
7439 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7440 && (optab_handler (this_optab, mode)->insn_code
7441 != CODE_FOR_nothing))
7443 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7445 op2 = expand_expr (treeop1, subtarget,
7446 VOIDmode, EXPAND_NORMAL);
7447 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7450 return REDUCE_BIT_FIELD (temp);
7455 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7456 something else, make sure we add the register to the constant and
7457 then to the other thing. This case can occur during strength
7458 reduction and doing it this way will produce better code if the
7459 frame pointer or argument pointer is eliminated.
7461 fold-const.c will ensure that the constant is always in the inner
7462 PLUS_EXPR, so the only case we need to do anything about is if
7463 sp, ap, or fp is our second argument, in which case we must swap
7464 the innermost first argument and our second argument. */
7466 if (TREE_CODE (treeop0) == PLUS_EXPR
7467 && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7468 && TREE_CODE (treeop1) == VAR_DECL
7469 && (DECL_RTL (treeop1) == frame_pointer_rtx
7470 || DECL_RTL (treeop1) == stack_pointer_rtx
7471 || DECL_RTL (treeop1) == arg_pointer_rtx))
7475 treeop1 = TREE_OPERAND (treeop0, 0);
7476 TREE_OPERAND (treeop0, 0) = t;
7479 /* If the result is to be ptr_mode and we are adding an integer to
7480 something, we might be forming a constant. So try to use
7481 plus_constant. If it produces a sum and we can't accept it,
7482 use force_operand. This allows P = &ARR[const] to generate
7483 efficient code on machines where a SYMBOL_REF is not a valid
7486 If this is an EXPAND_SUM call, always return the sum. */
7487 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7488 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7490 if (modifier == EXPAND_STACK_PARM)
7492 if (TREE_CODE (treeop0) == INTEGER_CST
7493 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7494 && TREE_CONSTANT (treeop1))
7498 op1 = expand_expr (treeop1, subtarget, VOIDmode,
7500 /* Use immed_double_const to ensure that the constant is
7501 truncated according to the mode of OP1, then sign extended
7502 to a HOST_WIDE_INT. Using the constant directly can result
7503 in non-canonical RTL in a 64x32 cross compile. */
7505 = immed_double_const (TREE_INT_CST_LOW (treeop0),
7507 TYPE_MODE (TREE_TYPE (treeop1)));
7508 op1 = plus_constant (op1, INTVAL (constant_part));
7509 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7510 op1 = force_operand (op1, target);
7511 return REDUCE_BIT_FIELD (op1);
7514 else if (TREE_CODE (treeop1) == INTEGER_CST
7515 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7516 && TREE_CONSTANT (treeop0))
7520 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7521 (modifier == EXPAND_INITIALIZER
7522 ? EXPAND_INITIALIZER : EXPAND_SUM));
7523 if (! CONSTANT_P (op0))
7525 op1 = expand_expr (treeop1, NULL_RTX,
7526 VOIDmode, modifier);
7527 /* Return a PLUS if modifier says it's OK. */
7528 if (modifier == EXPAND_SUM
7529 || modifier == EXPAND_INITIALIZER)
7530 return simplify_gen_binary (PLUS, mode, op0, op1);
7533 /* Use immed_double_const to ensure that the constant is
7534 truncated according to the mode of OP1, then sign extended
7535 to a HOST_WIDE_INT. Using the constant directly can result
7536 in non-canonical RTL in a 64x32 cross compile. */
7538 = immed_double_const (TREE_INT_CST_LOW (treeop1),
7540 TYPE_MODE (TREE_TYPE (treeop0)));
7541 op0 = plus_constant (op0, INTVAL (constant_part));
7542 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7543 op0 = force_operand (op0, target);
7544 return REDUCE_BIT_FIELD (op0);
7548 /* No sense saving up arithmetic to be done
7549 if it's all in the wrong mode to form part of an address.
7550 And force_operand won't know whether to sign-extend or
7552 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7553 || mode != ptr_mode)
7555 expand_operands (treeop0, treeop1,
7556 subtarget, &op0, &op1, EXPAND_NORMAL);
7557 if (op0 == const0_rtx)
7559 if (op1 == const0_rtx)
7564 expand_operands (treeop0, treeop1,
7565 subtarget, &op0, &op1, modifier);
7566 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7569 /* Check if this is a case for multiplication and subtraction. */
7570 if ((TREE_CODE (type) == INTEGER_TYPE
7571 || TREE_CODE (type) == FIXED_POINT_TYPE)
7572 && (subexp1_def = get_def_for_expr (treeop1,
7575 tree subsubexp0, subsubexp1;
7576 gimple subsubexp0_def, subsubexp1_def;
7577 enum tree_code this_code;
7579 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7580 : FIXED_CONVERT_EXPR;
7581 subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7582 subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7583 subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7584 subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7585 if (subsubexp0_def && subsubexp1_def
7586 && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7587 && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7588 && (TYPE_PRECISION (TREE_TYPE (top0))
7589 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7590 && (TYPE_PRECISION (TREE_TYPE (top0))
7591 == TYPE_PRECISION (TREE_TYPE (top1)))
7592 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7593 == TYPE_UNSIGNED (TREE_TYPE (top1))))
7595 tree op0type = TREE_TYPE (top0);
7596 enum machine_mode innermode = TYPE_MODE (op0type);
7597 bool zextend_p = TYPE_UNSIGNED (op0type);
7598 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7600 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7602 this_optab = zextend_p ? usmsub_widen_optab
7603 : ssmsub_widen_optab;
7604 if (mode == GET_MODE_2XWIDER_MODE (innermode)
7605 && (optab_handler (this_optab, mode)->insn_code
7606 != CODE_FOR_nothing))
7608 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7610 op2 = expand_expr (treeop0, subtarget,
7611 VOIDmode, EXPAND_NORMAL);
7612 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7615 return REDUCE_BIT_FIELD (temp);
7620 /* For initializers, we are allowed to return a MINUS of two
7621 symbolic constants. Here we handle all cases when both operands
7623 /* Handle difference of two symbolic constants,
7624 for the sake of an initializer. */
7625 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7626 && really_constant_p (treeop0)
7627 && really_constant_p (treeop1))
7629 expand_operands (treeop0, treeop1,
7630 NULL_RTX, &op0, &op1, modifier);
7632 /* If the last operand is a CONST_INT, use plus_constant of
7633 the negated constant. Else make the MINUS. */
7634 if (CONST_INT_P (op1))
7635 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7637 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7640 /* No sense saving up arithmetic to be done
7641 if it's all in the wrong mode to form part of an address.
7642 And force_operand won't know whether to sign-extend or
7644 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7645 || mode != ptr_mode)
7648 expand_operands (treeop0, treeop1,
7649 subtarget, &op0, &op1, modifier);
7651 /* Convert A - const to A + (-const). */
7652 if (CONST_INT_P (op1))
7654 op1 = negate_rtx (mode, op1);
7655 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7661 /* If this is a fixed-point operation, then we cannot use the code
7662 below because "expand_mult" doesn't support sat/no-sat fixed-point
7664 if (ALL_FIXED_POINT_MODE_P (mode))
7667 /* If first operand is constant, swap them.
7668 Thus the following special case checks need only
7669 check the second operand. */
7670 if (TREE_CODE (treeop0) == INTEGER_CST)
7677 /* Attempt to return something suitable for generating an
7678 indexed address, for machines that support that. */
7680 if (modifier == EXPAND_SUM && mode == ptr_mode
7681 && host_integerp (treeop1, 0))
7683 tree exp1 = treeop1;
7685 op0 = expand_expr (treeop0, subtarget, VOIDmode,
7689 op0 = force_operand (op0, NULL_RTX);
7691 op0 = copy_to_mode_reg (mode, op0);
7693 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7694 gen_int_mode (tree_low_cst (exp1, 0),
7695 TYPE_MODE (TREE_TYPE (exp1)))));
7698 if (modifier == EXPAND_STACK_PARM)
7701 /* Check for multiplying things that have been extended
7702 from a narrower type. If this machine supports multiplying
7703 in that narrower type with a result in the desired type,
7704 do it that way, and avoid the explicit type-conversion. */
7708 subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
7709 subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
7710 top0 = top1 = NULL_TREE;
7712 /* First, check if we have a multiplication of one signed and one
7713 unsigned operand. */
7715 && (top0 = gimple_assign_rhs1 (subexp0_def))
7717 && (top1 = gimple_assign_rhs1 (subexp1_def))
7718 && TREE_CODE (type) == INTEGER_TYPE
7719 && (TYPE_PRECISION (TREE_TYPE (top0))
7720 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7721 && (TYPE_PRECISION (TREE_TYPE (top0))
7722 == TYPE_PRECISION (TREE_TYPE (top1)))
7723 && (TYPE_UNSIGNED (TREE_TYPE (top0))
7724 != TYPE_UNSIGNED (TREE_TYPE (top1))))
7726 enum machine_mode innermode
7727 = TYPE_MODE (TREE_TYPE (top0));
7728 this_optab = usmul_widen_optab;
7729 if (mode == GET_MODE_WIDER_MODE (innermode))
7731 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7733 if (TYPE_UNSIGNED (TREE_TYPE (top0)))
7734 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7737 expand_operands (top0, top1, NULL_RTX, &op1, &op0,
7744 /* Check for a multiplication with matching signedness. If
7745 valid, TOP0 and TOP1 were set in the previous if
7748 && TREE_CODE (type) == INTEGER_TYPE
7749 && (TYPE_PRECISION (TREE_TYPE (top0))
7750 < TYPE_PRECISION (TREE_TYPE (subexp0)))
7751 && ((TREE_CODE (subexp1) == INTEGER_CST
7752 && int_fits_type_p (subexp1, TREE_TYPE (top0))
7753 /* Don't use a widening multiply if a shift will do. */
7754 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
7755 > HOST_BITS_PER_WIDE_INT)
7756 || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
7759 && (TYPE_PRECISION (TREE_TYPE (top1))
7760 == TYPE_PRECISION (TREE_TYPE (top0))
7761 /* If both operands are extended, they must either both
7762 be zero-extended or both be sign-extended. */
7763 && (TYPE_UNSIGNED (TREE_TYPE (top1))
7764 == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
7766 tree op0type = TREE_TYPE (top0);
7767 enum machine_mode innermode = TYPE_MODE (op0type);
7768 bool zextend_p = TYPE_UNSIGNED (op0type);
7769 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7770 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7772 if (mode == GET_MODE_2XWIDER_MODE (innermode))
7774 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7776 if (TREE_CODE (subexp1) == INTEGER_CST)
7777 expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
7780 expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7784 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7785 && innermode == word_mode)
7788 op0 = expand_normal (top0);
7789 if (TREE_CODE (subexp1) == INTEGER_CST)
7790 op1 = convert_modes (innermode, mode,
7791 expand_normal (subexp1), unsignedp);
7793 op1 = expand_normal (top1);
7794 temp = expand_binop (mode, other_optab, op0, op1, target,
7795 unsignedp, OPTAB_LIB_WIDEN);
7796 hipart = gen_highpart (innermode, temp);
7797 htem = expand_mult_highpart_adjust (innermode, hipart,
7801 emit_move_insn (hipart, htem);
7802 return REDUCE_BIT_FIELD (temp);
7806 expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
7807 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7809 case TRUNC_DIV_EXPR:
7810 case FLOOR_DIV_EXPR:
7812 case ROUND_DIV_EXPR:
7813 case EXACT_DIV_EXPR:
7814 /* If this is a fixed-point operation, then we cannot use the code
7815 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7817 if (ALL_FIXED_POINT_MODE_P (mode))
7820 if (modifier == EXPAND_STACK_PARM)
7822 /* Possible optimization: compute the dividend with EXPAND_SUM
7823 then if the divisor is constant can optimize the case
7824 where some terms of the dividend have coeffs divisible by it. */
7825 expand_operands (treeop0, treeop1,
7826 subtarget, &op0, &op1, EXPAND_NORMAL);
7827 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7832 case TRUNC_MOD_EXPR:
7833 case FLOOR_MOD_EXPR:
7835 case ROUND_MOD_EXPR:
7836 if (modifier == EXPAND_STACK_PARM)
7838 expand_operands (treeop0, treeop1,
7839 subtarget, &op0, &op1, EXPAND_NORMAL);
7840 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7842 case FIXED_CONVERT_EXPR:
7843 op0 = expand_normal (treeop0);
7844 if (target == 0 || modifier == EXPAND_STACK_PARM)
7845 target = gen_reg_rtx (mode);
7847 if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7848 && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7849 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7850 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7852 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7855 case FIX_TRUNC_EXPR:
7856 op0 = expand_normal (treeop0);
7857 if (target == 0 || modifier == EXPAND_STACK_PARM)
7858 target = gen_reg_rtx (mode);
7859 expand_fix (target, op0, unsignedp);
7863 op0 = expand_normal (treeop0);
7864 if (target == 0 || modifier == EXPAND_STACK_PARM)
7865 target = gen_reg_rtx (mode);
7866 /* expand_float can't figure out what to do if FROM has VOIDmode.
7867 So give it the correct mode. With -O, cse will optimize this. */
7868 if (GET_MODE (op0) == VOIDmode)
7869 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
7871 expand_float (target, op0,
7872 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7876 op0 = expand_expr (treeop0, subtarget,
7877 VOIDmode, EXPAND_NORMAL);
7878 if (modifier == EXPAND_STACK_PARM)
7880 temp = expand_unop (mode,
7881 optab_for_tree_code (NEGATE_EXPR, type,
7885 return REDUCE_BIT_FIELD (temp);
7888 op0 = expand_expr (treeop0, subtarget,
7889 VOIDmode, EXPAND_NORMAL);
7890 if (modifier == EXPAND_STACK_PARM)
7893 /* ABS_EXPR is not valid for complex arguments. */
7894 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7895 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7897 /* Unsigned abs is simply the operand. Testing here means we don't
7898 risk generating incorrect code below. */
7899 if (TYPE_UNSIGNED (type))
7902 return expand_abs (mode, op0, target, unsignedp,
7903 safe_from_p (target, treeop0, 1));
7907 target = original_target;
7909 || modifier == EXPAND_STACK_PARM
7910 || (MEM_P (target) && MEM_VOLATILE_P (target))
7911 || GET_MODE (target) != mode
7913 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7914 target = gen_reg_rtx (mode);
7915 expand_operands (treeop0, treeop1,
7916 target, &op0, &op1, EXPAND_NORMAL);
7918 /* First try to do it with a special MIN or MAX instruction.
7919 If that does not win, use a conditional jump to select the proper
7921 this_optab = optab_for_tree_code (code, type, optab_default);
7922 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7927 /* At this point, a MEM target is no longer useful; we will get better
7930 if (! REG_P (target))
7931 target = gen_reg_rtx (mode);
7933 /* If op1 was placed in target, swap op0 and op1. */
7934 if (target != op0 && target == op1)
7941 /* We generate better code and avoid problems with op1 mentioning
7942 target by forcing op1 into a pseudo if it isn't a constant. */
7943 if (! CONSTANT_P (op1))
7944 op1 = force_reg (mode, op1);
7947 enum rtx_code comparison_code;
7950 if (code == MAX_EXPR)
7951 comparison_code = unsignedp ? GEU : GE;
7953 comparison_code = unsignedp ? LEU : LE;
7955 /* Canonicalize to comparisons against 0. */
7956 if (op1 == const1_rtx)
7958 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
7959 or (a != 0 ? a : 1) for unsigned.
7960 For MIN we are safe converting (a <= 1 ? a : 1)
7961 into (a <= 0 ? a : 1) */
7962 cmpop1 = const0_rtx;
7963 if (code == MAX_EXPR)
7964 comparison_code = unsignedp ? NE : GT;
7966 if (op1 == constm1_rtx && !unsignedp)
7968 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
7969 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
7970 cmpop1 = const0_rtx;
7971 if (code == MIN_EXPR)
7972 comparison_code = LT;
7974 #ifdef HAVE_conditional_move
7975 /* Use a conditional move if possible. */
7976 if (can_conditionally_move_p (mode))
7980 /* ??? Same problem as in expmed.c: emit_conditional_move
7981 forces a stack adjustment via compare_from_rtx, and we
7982 lose the stack adjustment if the sequence we are about
7983 to create is discarded. */
7984 do_pending_stack_adjust ();
7988 /* Try to emit the conditional move. */
7989 insn = emit_conditional_move (target, comparison_code,
7994 /* If we could do the conditional move, emit the sequence,
7998 rtx seq = get_insns ();
8004 /* Otherwise discard the sequence and fall back to code with
8010 emit_move_insn (target, op0);
8012 temp = gen_label_rtx ();
8013 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8014 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8016 emit_move_insn (target, op1);
8021 op0 = expand_expr (treeop0, subtarget,
8022 VOIDmode, EXPAND_NORMAL);
8023 if (modifier == EXPAND_STACK_PARM)
8025 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8029 /* ??? Can optimize bitwise operations with one arg constant.
8030 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8031 and (a bitwise1 b) bitwise2 b (etc)
8032 but that is probably not worth while. */
8034 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8035 boolean values when we want in all cases to compute both of them. In
8036 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8037 as actual zero-or-1 values and then bitwise anding. In cases where
8038 there cannot be any side effects, better code would be made by
8039 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8040 how to recognize those cases. */
8042 case TRUTH_AND_EXPR:
8043 code = BIT_AND_EXPR;
8048 code = BIT_IOR_EXPR;
8052 case TRUTH_XOR_EXPR:
8053 code = BIT_XOR_EXPR;
8059 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8060 || (GET_MODE_PRECISION (TYPE_MODE (type))
8061 == TYPE_PRECISION (type)));
8066 /* If this is a fixed-point operation, then we cannot use the code
8067 below because "expand_shift" doesn't support sat/no-sat fixed-point
8069 if (ALL_FIXED_POINT_MODE_P (mode))
8072 if (! safe_from_p (subtarget, treeop1, 1))
8074 if (modifier == EXPAND_STACK_PARM)
8076 op0 = expand_expr (treeop0, subtarget,
8077 VOIDmode, EXPAND_NORMAL);
8078 temp = expand_shift (code, mode, op0, treeop1, target,
8080 if (code == LSHIFT_EXPR)
8081 temp = REDUCE_BIT_FIELD (temp);
8084 /* Could determine the answer when only additive constants differ. Also,
8085 the addition of one can be handled by changing the condition. */
8092 case UNORDERED_EXPR:
8100 temp = do_store_flag (ops,
8101 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8102 tmode != VOIDmode ? tmode : mode);
8106 /* Use a compare and a jump for BLKmode comparisons, or for function
8107 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
8110 || modifier == EXPAND_STACK_PARM
8111 || ! safe_from_p (target, treeop0, 1)
8112 || ! safe_from_p (target, treeop1, 1)
8113 /* Make sure we don't have a hard reg (such as function's return
8114 value) live across basic blocks, if not optimizing. */
8115 || (!optimize && REG_P (target)
8116 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8117 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8119 emit_move_insn (target, const0_rtx);
8121 op1 = gen_label_rtx ();
8122 jumpifnot_1 (code, treeop0, treeop1, op1);
8124 emit_move_insn (target, const1_rtx);
8129 case TRUTH_NOT_EXPR:
8130 if (modifier == EXPAND_STACK_PARM)
8132 op0 = expand_expr (treeop0, target,
8133 VOIDmode, EXPAND_NORMAL);
8134 /* The parser is careful to generate TRUTH_NOT_EXPR
8135 only with operands that are always zero or one. */
8136 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8137 target, 1, OPTAB_LIB_WIDEN);
8142 /* Get the rtx code of the operands. */
8143 op0 = expand_normal (treeop0);
8144 op1 = expand_normal (treeop1);
8147 target = gen_reg_rtx (TYPE_MODE (type));
8149 /* Move the real (op0) and imaginary (op1) parts to their location. */
8150 write_complex_part (target, op0, false);
8151 write_complex_part (target, op1, true);
8155 case WIDEN_SUM_EXPR:
8157 tree oprnd0 = treeop0;
8158 tree oprnd1 = treeop1;
8160 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8161 target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8166 case REDUC_MAX_EXPR:
8167 case REDUC_MIN_EXPR:
8168 case REDUC_PLUS_EXPR:
8170 op0 = expand_normal (treeop0);
8171 this_optab = optab_for_tree_code (code, type, optab_default);
8172 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8177 case VEC_EXTRACT_EVEN_EXPR:
8178 case VEC_EXTRACT_ODD_EXPR:
8180 expand_operands (treeop0, treeop1,
8181 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8182 this_optab = optab_for_tree_code (code, type, optab_default);
8183 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8189 case VEC_INTERLEAVE_HIGH_EXPR:
8190 case VEC_INTERLEAVE_LOW_EXPR:
8192 expand_operands (treeop0, treeop1,
8193 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8194 this_optab = optab_for_tree_code (code, type, optab_default);
8195 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8201 case VEC_LSHIFT_EXPR:
8202 case VEC_RSHIFT_EXPR:
8204 target = expand_vec_shift_expr (ops, target);
8208 case VEC_UNPACK_HI_EXPR:
8209 case VEC_UNPACK_LO_EXPR:
8211 op0 = expand_normal (treeop0);
8212 this_optab = optab_for_tree_code (code, type, optab_default);
8213 temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8219 case VEC_UNPACK_FLOAT_HI_EXPR:
8220 case VEC_UNPACK_FLOAT_LO_EXPR:
8222 op0 = expand_normal (treeop0);
8223 /* The signedness is determined from input operand. */
8224 this_optab = optab_for_tree_code (code,
8225 TREE_TYPE (treeop0),
8227 temp = expand_widen_pattern_expr
8228 (ops, op0, NULL_RTX, NULL_RTX,
8229 target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8235 case VEC_WIDEN_MULT_HI_EXPR:
8236 case VEC_WIDEN_MULT_LO_EXPR:
8238 tree oprnd0 = treeop0;
8239 tree oprnd1 = treeop1;
8241 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8242 target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8244 gcc_assert (target);
8248 case VEC_PACK_TRUNC_EXPR:
8249 case VEC_PACK_SAT_EXPR:
8250 case VEC_PACK_FIX_TRUNC_EXPR:
8251 mode = TYPE_MODE (TREE_TYPE (treeop0));
8258 /* Here to do an ordinary binary operator. */
8260 expand_operands (treeop0, treeop1,
8261 subtarget, &op0, &op1, EXPAND_NORMAL);
8263 this_optab = optab_for_tree_code (code, type, optab_default);
8265 if (modifier == EXPAND_STACK_PARM)
8267 temp = expand_binop (mode, this_optab, op0, op1, target,
8268 unsignedp, OPTAB_LIB_WIDEN);
8270 return REDUCE_BIT_FIELD (temp);
8272 #undef REDUCE_BIT_FIELD
8275 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8276 enum expand_modifier modifier, rtx *alt_rtl)
8278 rtx op0, op1, temp, decl_rtl;
8281 enum machine_mode mode;
8282 enum tree_code code = TREE_CODE (exp);
8284 rtx subtarget, original_target;
8287 bool reduce_bit_field;
8288 location_t loc = EXPR_LOCATION (exp);
8289 struct separate_ops ops;
8290 tree treeop0, treeop1, treeop2;
8292 type = TREE_TYPE (exp);
8293 mode = TYPE_MODE (type);
8294 unsignedp = TYPE_UNSIGNED (type);
8296 treeop0 = treeop1 = treeop2 = NULL_TREE;
8297 if (!VL_EXP_CLASS_P (exp))
8298 switch (TREE_CODE_LENGTH (code))
8301 case 3: treeop2 = TREE_OPERAND (exp, 2);
8302 case 2: treeop1 = TREE_OPERAND (exp, 1);
8303 case 1: treeop0 = TREE_OPERAND (exp, 0);
8313 ignore = (target == const0_rtx
8314 || ((CONVERT_EXPR_CODE_P (code)
8315 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8316 && TREE_CODE (type) == VOID_TYPE));
8318 /* An operation in what may be a bit-field type needs the
8319 result to be reduced to the precision of the bit-field type,
8320 which is narrower than that of the type's mode. */
8321 reduce_bit_field = (!ignore
8322 && TREE_CODE (type) == INTEGER_TYPE
8323 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8325 /* If we are going to ignore this result, we need only do something
8326 if there is a side-effect somewhere in the expression. If there
8327 is, short-circuit the most common cases here. Note that we must
8328 not call expand_expr with anything but const0_rtx in case this
8329 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
8333 if (! TREE_SIDE_EFFECTS (exp))
8336 /* Ensure we reference a volatile object even if value is ignored, but
8337 don't do this if all we are doing is taking its address. */
8338 if (TREE_THIS_VOLATILE (exp)
8339 && TREE_CODE (exp) != FUNCTION_DECL
8340 && mode != VOIDmode && mode != BLKmode
8341 && modifier != EXPAND_CONST_ADDRESS)
8343 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8345 temp = copy_to_reg (temp);
8349 if (TREE_CODE_CLASS (code) == tcc_unary
8350 || code == COMPONENT_REF || code == INDIRECT_REF)
8351 return expand_expr (treeop0, const0_rtx, VOIDmode,
8354 else if (TREE_CODE_CLASS (code) == tcc_binary
8355 || TREE_CODE_CLASS (code) == tcc_comparison
8356 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8358 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8359 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8362 else if (code == BIT_FIELD_REF)
8364 expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8365 expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8366 expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8373 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8376 /* Use subtarget as the target for operand 0 of a binary operation. */
8377 subtarget = get_subtarget (target);
8378 original_target = target;
8384 tree function = decl_function_context (exp);
8386 temp = label_rtx (exp);
8387 temp = gen_rtx_LABEL_REF (Pmode, temp);
8389 if (function != current_function_decl
8391 LABEL_REF_NONLOCAL_P (temp) = 1;
8393 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8398 /* ??? ivopts calls expander, without any preparation from
8399 out-of-ssa. So fake instructions as if this was an access to the
8400 base variable. This unnecessarily allocates a pseudo, see how we can
8401 reuse it, if partition base vars have it set already. */
8402 if (!currently_expanding_to_rtl)
8403 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8405 gimple g = get_gimple_for_ssa_name (exp);
8407 return expand_expr_real_1 (gimple_assign_rhs_to_tree (g), target,
8408 tmode, modifier, NULL);
8410 decl_rtl = get_rtx_for_ssa_name (exp);
8411 exp = SSA_NAME_VAR (exp);
8412 goto expand_decl_rtl;
8416 /* If a static var's type was incomplete when the decl was written,
8417 but the type is complete now, lay out the decl now. */
8418 if (DECL_SIZE (exp) == 0
8419 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8420 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8421 layout_decl (exp, 0);
8423 /* TLS emulation hook - replace __thread vars with
8424 *__emutls_get_address (&_emutls.var). */
8425 if (! targetm.have_tls
8426 && TREE_CODE (exp) == VAR_DECL
8427 && DECL_THREAD_LOCAL_P (exp))
8429 exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8430 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8433 /* ... fall through ... */
8437 decl_rtl = DECL_RTL (exp);
8439 gcc_assert (decl_rtl);
8440 decl_rtl = copy_rtx (decl_rtl);
8442 /* Ensure variable marked as used even if it doesn't go through
8443 a parser. If it hasn't be used yet, write out an external
8445 if (! TREE_USED (exp))
8447 assemble_external (exp);
8448 TREE_USED (exp) = 1;
8451 /* Show we haven't gotten RTL for this yet. */
8454 /* Variables inherited from containing functions should have
8455 been lowered by this point. */
8456 context = decl_function_context (exp);
8457 gcc_assert (!context
8458 || context == current_function_decl
8459 || TREE_STATIC (exp)
8460 /* ??? C++ creates functions that are not TREE_STATIC. */
8461 || TREE_CODE (exp) == FUNCTION_DECL);
8463 /* This is the case of an array whose size is to be determined
8464 from its initializer, while the initializer is still being parsed.
8467 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8468 temp = validize_mem (decl_rtl);
8470 /* If DECL_RTL is memory, we are in the normal case and the
8471 address is not valid, get the address into a register. */
8473 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8476 *alt_rtl = decl_rtl;
8477 decl_rtl = use_anchored_address (decl_rtl);
8478 if (modifier != EXPAND_CONST_ADDRESS
8479 && modifier != EXPAND_SUM
8480 && !memory_address_addr_space_p (DECL_MODE (exp),
8482 MEM_ADDR_SPACE (decl_rtl)))
8483 temp = replace_equiv_address (decl_rtl,
8484 copy_rtx (XEXP (decl_rtl, 0)));
8487 /* If we got something, return it. But first, set the alignment
8488 if the address is a register. */
8491 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8492 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8497 /* If the mode of DECL_RTL does not match that of the decl, it
8498 must be a promoted value. We return a SUBREG of the wanted mode,
8499 but mark it so that we know that it was already extended. */
8501 if (REG_P (decl_rtl)
8502 && GET_MODE (decl_rtl) != DECL_MODE (exp))
8504 enum machine_mode pmode;
8506 /* Get the signedness used for this variable. Ensure we get the
8507 same mode we got when the variable was declared. */
8508 pmode = promote_decl_mode (exp, &unsignedp);
8509 gcc_assert (GET_MODE (decl_rtl) == pmode);
8511 temp = gen_lowpart_SUBREG (mode, decl_rtl);
8512 SUBREG_PROMOTED_VAR_P (temp) = 1;
8513 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8520 temp = immed_double_const (TREE_INT_CST_LOW (exp),
8521 TREE_INT_CST_HIGH (exp), mode);
8527 tree tmp = NULL_TREE;
8528 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8529 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8530 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8531 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8532 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8533 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8534 return const_vector_from_tree (exp);
8535 if (GET_MODE_CLASS (mode) == MODE_INT)
8537 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8539 tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8542 tmp = build_constructor_from_list (type,
8543 TREE_VECTOR_CST_ELTS (exp));
8544 return expand_expr (tmp, ignore ? const0_rtx : target,
8549 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8552 /* If optimized, generate immediate CONST_DOUBLE
8553 which will be turned into memory by reload if necessary.
8555 We used to force a register so that loop.c could see it. But
8556 this does not allow gen_* patterns to perform optimizations with
8557 the constants. It also produces two insns in cases like "x = 1.0;".
8558 On most machines, floating-point constants are not permitted in
8559 many insns, so we'd end up copying it to a register in any case.
8561 Now, we do the copying in expand_binop, if appropriate. */
8562 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8563 TYPE_MODE (TREE_TYPE (exp)));
8566 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8567 TYPE_MODE (TREE_TYPE (exp)));
8570 /* Handle evaluating a complex constant in a CONCAT target. */
8571 if (original_target && GET_CODE (original_target) == CONCAT)
8573 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8576 rtarg = XEXP (original_target, 0);
8577 itarg = XEXP (original_target, 1);
8579 /* Move the real and imaginary parts separately. */
8580 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8581 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8584 emit_move_insn (rtarg, op0);
8586 emit_move_insn (itarg, op1);
8588 return original_target;
8591 /* ... fall through ... */
8594 temp = expand_expr_constant (exp, 1, modifier);
8596 /* temp contains a constant address.
8597 On RISC machines where a constant address isn't valid,
8598 make some insns to get that address into a register. */
8599 if (modifier != EXPAND_CONST_ADDRESS
8600 && modifier != EXPAND_INITIALIZER
8601 && modifier != EXPAND_SUM
8602 && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8603 MEM_ADDR_SPACE (temp)))
8604 return replace_equiv_address (temp,
8605 copy_rtx (XEXP (temp, 0)));
8611 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8613 if (!SAVE_EXPR_RESOLVED_P (exp))
8615 /* We can indeed still hit this case, typically via builtin
8616 expanders calling save_expr immediately before expanding
8617 something. Assume this means that we only have to deal
8618 with non-BLKmode values. */
8619 gcc_assert (GET_MODE (ret) != BLKmode);
8621 val = build_decl (EXPR_LOCATION (exp),
8622 VAR_DECL, NULL, TREE_TYPE (exp));
8623 DECL_ARTIFICIAL (val) = 1;
8624 DECL_IGNORED_P (val) = 1;
8626 TREE_OPERAND (exp, 0) = treeop0;
8627 SAVE_EXPR_RESOLVED_P (exp) = 1;
8629 if (!CONSTANT_P (ret))
8630 ret = copy_to_reg (ret);
8631 SET_DECL_RTL (val, ret);
8639 /* If we don't need the result, just ensure we evaluate any
8643 unsigned HOST_WIDE_INT idx;
8646 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8647 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8652 return expand_constructor (exp, target, modifier, false);
8654 case MISALIGNED_INDIRECT_REF:
8655 case ALIGN_INDIRECT_REF:
8658 tree exp1 = treeop0;
8659 addr_space_t as = ADDR_SPACE_GENERIC;
8661 if (modifier != EXPAND_WRITE)
8665 t = fold_read_from_constant_string (exp);
8667 return expand_expr (t, target, tmode, modifier);
8670 if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8671 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8673 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8674 op0 = memory_address_addr_space (mode, op0, as);
8676 if (code == ALIGN_INDIRECT_REF)
8678 int align = TYPE_ALIGN_UNIT (type);
8679 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
8680 op0 = memory_address_addr_space (mode, op0, as);
8683 temp = gen_rtx_MEM (mode, op0);
8685 set_mem_attributes (temp, exp, 0);
8686 set_mem_addr_space (temp, as);
8688 /* Resolve the misalignment now, so that we don't have to remember
8689 to resolve it later. Of course, this only works for reads. */
8690 if (code == MISALIGNED_INDIRECT_REF)
8695 gcc_assert (modifier == EXPAND_NORMAL
8696 || modifier == EXPAND_STACK_PARM);
8698 /* The vectorizer should have already checked the mode. */
8699 icode = optab_handler (movmisalign_optab, mode)->insn_code;
8700 gcc_assert (icode != CODE_FOR_nothing);
8702 /* We've already validated the memory, and we're creating a
8703 new pseudo destination. The predicates really can't fail. */
8704 reg = gen_reg_rtx (mode);
8706 /* Nor can the insn generator. */
8707 insn = GEN_FCN (icode) (reg, temp);
8716 case TARGET_MEM_REF:
8718 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8719 struct mem_address addr;
8721 get_address_description (exp, &addr);
8722 op0 = addr_for_mem_ref (&addr, true);
8723 op0 = memory_address_addr_space (mode, op0, as);
8724 temp = gen_rtx_MEM (mode, op0);
8725 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8726 set_mem_addr_space (temp, as);
8733 tree array = treeop0;
8734 tree index = treeop1;
8736 /* Fold an expression like: "foo"[2].
8737 This is not done in fold so it won't happen inside &.
8738 Don't fold if this is for wide characters since it's too
8739 difficult to do correctly and this is a very rare case. */
8741 if (modifier != EXPAND_CONST_ADDRESS
8742 && modifier != EXPAND_INITIALIZER
8743 && modifier != EXPAND_MEMORY)
8745 tree t = fold_read_from_constant_string (exp);
8748 return expand_expr (t, target, tmode, modifier);
8751 /* If this is a constant index into a constant array,
8752 just get the value from the array. Handle both the cases when
8753 we have an explicit constructor and when our operand is a variable
8754 that was declared const. */
8756 if (modifier != EXPAND_CONST_ADDRESS
8757 && modifier != EXPAND_INITIALIZER
8758 && modifier != EXPAND_MEMORY
8759 && TREE_CODE (array) == CONSTRUCTOR
8760 && ! TREE_SIDE_EFFECTS (array)
8761 && TREE_CODE (index) == INTEGER_CST)
8763 unsigned HOST_WIDE_INT ix;
8766 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8768 if (tree_int_cst_equal (field, index))
8770 if (!TREE_SIDE_EFFECTS (value))
8771 return expand_expr (fold (value), target, tmode, modifier);
8776 else if (optimize >= 1
8777 && modifier != EXPAND_CONST_ADDRESS
8778 && modifier != EXPAND_INITIALIZER
8779 && modifier != EXPAND_MEMORY
8780 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8781 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8782 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8783 && targetm.binds_local_p (array))
8785 if (TREE_CODE (index) == INTEGER_CST)
8787 tree init = DECL_INITIAL (array);
8789 if (TREE_CODE (init) == CONSTRUCTOR)
8791 unsigned HOST_WIDE_INT ix;
8794 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8796 if (tree_int_cst_equal (field, index))
8798 if (TREE_SIDE_EFFECTS (value))
8801 if (TREE_CODE (value) == CONSTRUCTOR)
8803 /* If VALUE is a CONSTRUCTOR, this
8804 optimization is only useful if
8805 this doesn't store the CONSTRUCTOR
8806 into memory. If it does, it is more
8807 efficient to just load the data from
8808 the array directly. */
8809 rtx ret = expand_constructor (value, target,
8811 if (ret == NULL_RTX)
8815 return expand_expr (fold (value), target, tmode,
8819 else if(TREE_CODE (init) == STRING_CST)
8821 tree index1 = index;
8822 tree low_bound = array_ref_low_bound (exp);
8823 index1 = fold_convert_loc (loc, sizetype,
8826 /* Optimize the special-case of a zero lower bound.
8828 We convert the low_bound to sizetype to avoid some problems
8829 with constant folding. (E.g. suppose the lower bound is 1,
8830 and its mode is QI. Without the conversion,l (ARRAY
8831 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8832 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
8834 if (! integer_zerop (low_bound))
8835 index1 = size_diffop_loc (loc, index1,
8836 fold_convert_loc (loc, sizetype,
8839 if (0 > compare_tree_int (index1,
8840 TREE_STRING_LENGTH (init)))
8842 tree type = TREE_TYPE (TREE_TYPE (init));
8843 enum machine_mode mode = TYPE_MODE (type);
8845 if (GET_MODE_CLASS (mode) == MODE_INT
8846 && GET_MODE_SIZE (mode) == 1)
8847 return gen_int_mode (TREE_STRING_POINTER (init)
8848 [TREE_INT_CST_LOW (index1)],
8855 goto normal_inner_ref;
8858 /* If the operand is a CONSTRUCTOR, we can just extract the
8859 appropriate field if it is present. */
8860 if (TREE_CODE (treeop0) == CONSTRUCTOR)
8862 unsigned HOST_WIDE_INT idx;
8865 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
8867 if (field == treeop1
8868 /* We can normally use the value of the field in the
8869 CONSTRUCTOR. However, if this is a bitfield in
8870 an integral mode that we can fit in a HOST_WIDE_INT,
8871 we must mask only the number of bits in the bitfield,
8872 since this is done implicitly by the constructor. If
8873 the bitfield does not meet either of those conditions,
8874 we can't do this optimization. */
8875 && (! DECL_BIT_FIELD (field)
8876 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
8877 && (GET_MODE_BITSIZE (DECL_MODE (field))
8878 <= HOST_BITS_PER_WIDE_INT))))
8880 if (DECL_BIT_FIELD (field)
8881 && modifier == EXPAND_STACK_PARM)
8883 op0 = expand_expr (value, target, tmode, modifier);
8884 if (DECL_BIT_FIELD (field))
8886 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
8887 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
8889 if (TYPE_UNSIGNED (TREE_TYPE (field)))
8891 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
8892 op0 = expand_and (imode, op0, op1, target);
8897 = build_int_cst (NULL_TREE,
8898 GET_MODE_BITSIZE (imode) - bitsize);
8900 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
8902 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
8910 goto normal_inner_ref;
8913 case ARRAY_RANGE_REF:
8916 enum machine_mode mode1, mode2;
8917 HOST_WIDE_INT bitsize, bitpos;
8919 int volatilep = 0, must_force_mem;
8920 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8921 &mode1, &unsignedp, &volatilep, true);
8922 rtx orig_op0, memloc;
8924 /* If we got back the original object, something is wrong. Perhaps
8925 we are evaluating an expression too early. In any event, don't
8926 infinitely recurse. */
8927 gcc_assert (tem != exp);
8929 /* If TEM's type is a union of variable size, pass TARGET to the inner
8930 computation, since it will need a temporary and TARGET is known
8931 to have to do. This occurs in unchecked conversion in Ada. */
8934 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8935 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8937 && modifier != EXPAND_STACK_PARM
8938 ? target : NULL_RTX),
8940 (modifier == EXPAND_INITIALIZER
8941 || modifier == EXPAND_CONST_ADDRESS
8942 || modifier == EXPAND_STACK_PARM)
8943 ? modifier : EXPAND_NORMAL);
8946 = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
8948 /* If we have either an offset, a BLKmode result, or a reference
8949 outside the underlying object, we must force it to memory.
8950 Such a case can occur in Ada if we have unchecked conversion
8951 of an expression from a scalar type to an aggregate type or
8952 for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
8953 passed a partially uninitialized object or a view-conversion
8954 to a larger size. */
8955 must_force_mem = (offset
8957 || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
8959 /* Handle CONCAT first. */
8960 if (GET_CODE (op0) == CONCAT && !must_force_mem)
8963 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
8966 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8969 op0 = XEXP (op0, 0);
8970 mode2 = GET_MODE (op0);
8972 else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8973 && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
8977 op0 = XEXP (op0, 1);
8979 mode2 = GET_MODE (op0);
8982 /* Otherwise force into memory. */
8986 /* If this is a constant, put it in a register if it is a legitimate
8987 constant and we don't need a memory reference. */
8988 if (CONSTANT_P (op0)
8990 && LEGITIMATE_CONSTANT_P (op0)
8992 op0 = force_reg (mode2, op0);
8994 /* Otherwise, if this is a constant, try to force it to the constant
8995 pool. Note that back-ends, e.g. MIPS, may refuse to do so if it
8996 is a legitimate constant. */
8997 else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
8998 op0 = validize_mem (memloc);
9000 /* Otherwise, if this is a constant or the object is not in memory
9001 and need be, put it there. */
9002 else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9004 tree nt = build_qualified_type (TREE_TYPE (tem),
9005 (TYPE_QUALS (TREE_TYPE (tem))
9006 | TYPE_QUAL_CONST));
9007 memloc = assign_temp (nt, 1, 1, 1);
9008 emit_move_insn (memloc, op0);
9014 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9017 gcc_assert (MEM_P (op0));
9019 #ifdef POINTERS_EXTEND_UNSIGNED
9020 if (GET_MODE (offset_rtx) != Pmode)
9021 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
9023 if (GET_MODE (offset_rtx) != ptr_mode)
9024 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9027 if (GET_MODE (op0) == BLKmode
9028 /* A constant address in OP0 can have VOIDmode, we must
9029 not try to call force_reg in that case. */
9030 && GET_MODE (XEXP (op0, 0)) != VOIDmode
9032 && (bitpos % bitsize) == 0
9033 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9034 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9036 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9040 op0 = offset_address (op0, offset_rtx,
9041 highest_pow2_factor (offset));
9044 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9045 record its alignment as BIGGEST_ALIGNMENT. */
9046 if (MEM_P (op0) && bitpos == 0 && offset != 0
9047 && is_aligning_offset (offset, tem))
9048 set_mem_align (op0, BIGGEST_ALIGNMENT);
9050 /* Don't forget about volatility even if this is a bitfield. */
9051 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9053 if (op0 == orig_op0)
9054 op0 = copy_rtx (op0);
9056 MEM_VOLATILE_P (op0) = 1;
9059 /* In cases where an aligned union has an unaligned object
9060 as a field, we might be extracting a BLKmode value from
9061 an integer-mode (e.g., SImode) object. Handle this case
9062 by doing the extract into an object as wide as the field
9063 (which we know to be the width of a basic mode), then
9064 storing into memory, and changing the mode to BLKmode. */
9065 if (mode1 == VOIDmode
9066 || REG_P (op0) || GET_CODE (op0) == SUBREG
9067 || (mode1 != BLKmode && ! direct_load[(int) mode1]
9068 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9069 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9070 && modifier != EXPAND_CONST_ADDRESS
9071 && modifier != EXPAND_INITIALIZER)
9072 /* If the field isn't aligned enough to fetch as a memref,
9073 fetch it as a bit field. */
9074 || (mode1 != BLKmode
9075 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9076 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9078 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9079 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9080 && ((modifier == EXPAND_CONST_ADDRESS
9081 || modifier == EXPAND_INITIALIZER)
9083 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9084 || (bitpos % BITS_PER_UNIT != 0)))
9085 /* If the type and the field are a constant size and the
9086 size of the type isn't the same size as the bitfield,
9087 we must use bitfield operations. */
9089 && TYPE_SIZE (TREE_TYPE (exp))
9090 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9091 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9094 enum machine_mode ext_mode = mode;
9096 if (ext_mode == BLKmode
9097 && ! (target != 0 && MEM_P (op0)
9099 && bitpos % BITS_PER_UNIT == 0))
9100 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9102 if (ext_mode == BLKmode)
9105 target = assign_temp (type, 0, 1, 1);
9110 /* In this case, BITPOS must start at a byte boundary and
9111 TARGET, if specified, must be a MEM. */
9112 gcc_assert (MEM_P (op0)
9113 && (!target || MEM_P (target))
9114 && !(bitpos % BITS_PER_UNIT));
9116 emit_block_move (target,
9117 adjust_address (op0, VOIDmode,
9118 bitpos / BITS_PER_UNIT),
9119 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9121 (modifier == EXPAND_STACK_PARM
9122 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9127 op0 = validize_mem (op0);
9129 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9130 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9132 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9133 (modifier == EXPAND_STACK_PARM
9134 ? NULL_RTX : target),
9135 ext_mode, ext_mode);
9137 /* If the result is a record type and BITSIZE is narrower than
9138 the mode of OP0, an integral mode, and this is a big endian
9139 machine, we must put the field into the high-order bits. */
9140 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9141 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9142 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9143 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9144 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9148 /* If the result type is BLKmode, store the data into a temporary
9149 of the appropriate type, but with the mode corresponding to the
9150 mode for the data we have (op0's mode). It's tempting to make
9151 this a constant type, since we know it's only being stored once,
9152 but that can cause problems if we are taking the address of this
9153 COMPONENT_REF because the MEM of any reference via that address
9154 will have flags corresponding to the type, which will not
9155 necessarily be constant. */
9156 if (mode == BLKmode)
9158 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9161 /* If the reference doesn't use the alias set of its type,
9162 we cannot create the temporary using that type. */
9163 if (component_uses_parent_alias_set (exp))
9165 new_rtx = assign_stack_local (ext_mode, size, 0);
9166 set_mem_alias_set (new_rtx, get_alias_set (exp));
9169 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9171 emit_move_insn (new_rtx, op0);
9172 op0 = copy_rtx (new_rtx);
9173 PUT_MODE (op0, BLKmode);
9174 set_mem_attributes (op0, exp, 1);
9180 /* If the result is BLKmode, use that to access the object
9182 if (mode == BLKmode)
9185 /* Get a reference to just this component. */
9186 if (modifier == EXPAND_CONST_ADDRESS
9187 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9188 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9190 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9192 if (op0 == orig_op0)
9193 op0 = copy_rtx (op0);
9195 set_mem_attributes (op0, exp, 0);
9196 if (REG_P (XEXP (op0, 0)))
9197 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9199 MEM_VOLATILE_P (op0) |= volatilep;
9200 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9201 || modifier == EXPAND_CONST_ADDRESS
9202 || modifier == EXPAND_INITIALIZER)
9204 else if (target == 0)
9205 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9207 convert_move (target, op0, unsignedp);
9212 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9215 /* All valid uses of __builtin_va_arg_pack () are removed during
9217 if (CALL_EXPR_VA_ARG_PACK (exp))
9218 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9220 tree fndecl = get_callee_fndecl (exp), attr;
9223 && (attr = lookup_attribute ("error",
9224 DECL_ATTRIBUTES (fndecl))) != NULL)
9225 error ("%Kcall to %qs declared with attribute error: %s",
9226 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9227 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9229 && (attr = lookup_attribute ("warning",
9230 DECL_ATTRIBUTES (fndecl))) != NULL)
9231 warning_at (tree_nonartificial_location (exp),
9232 0, "%Kcall to %qs declared with attribute warning: %s",
9233 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9234 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9236 /* Check for a built-in function. */
9237 if (fndecl && DECL_BUILT_IN (fndecl))
9239 gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9240 return expand_builtin (exp, target, subtarget, tmode, ignore);
9243 return expand_call (exp, target, ignore);
9245 case VIEW_CONVERT_EXPR:
9248 /* If we are converting to BLKmode, try to avoid an intermediate
9249 temporary by fetching an inner memory reference. */
9251 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9252 && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9253 && handled_component_p (treeop0))
9255 enum machine_mode mode1;
9256 HOST_WIDE_INT bitsize, bitpos;
9261 = get_inner_reference (treeop0, &bitsize, &bitpos,
9262 &offset, &mode1, &unsignedp, &volatilep,
9266 /* ??? We should work harder and deal with non-zero offsets. */
9268 && (bitpos % BITS_PER_UNIT) == 0
9270 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9272 /* See the normal_inner_ref case for the rationale. */
9275 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9276 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9278 && modifier != EXPAND_STACK_PARM
9279 ? target : NULL_RTX),
9281 (modifier == EXPAND_INITIALIZER
9282 || modifier == EXPAND_CONST_ADDRESS
9283 || modifier == EXPAND_STACK_PARM)
9284 ? modifier : EXPAND_NORMAL);
9286 if (MEM_P (orig_op0))
9290 /* Get a reference to just this component. */
9291 if (modifier == EXPAND_CONST_ADDRESS
9292 || modifier == EXPAND_SUM
9293 || modifier == EXPAND_INITIALIZER)
9294 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9296 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9298 if (op0 == orig_op0)
9299 op0 = copy_rtx (op0);
9301 set_mem_attributes (op0, treeop0, 0);
9302 if (REG_P (XEXP (op0, 0)))
9303 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9305 MEM_VOLATILE_P (op0) |= volatilep;
9311 op0 = expand_expr (treeop0,
9312 NULL_RTX, VOIDmode, modifier);
9314 /* If the input and output modes are both the same, we are done. */
9315 if (mode == GET_MODE (op0))
9317 /* If neither mode is BLKmode, and both modes are the same size
9318 then we can use gen_lowpart. */
9319 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9320 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9321 && !COMPLEX_MODE_P (GET_MODE (op0)))
9323 if (GET_CODE (op0) == SUBREG)
9324 op0 = force_reg (GET_MODE (op0), op0);
9325 op0 = gen_lowpart (mode, op0);
9327 /* If both modes are integral, then we can convert from one to the
9329 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9330 op0 = convert_modes (mode, GET_MODE (op0), op0,
9331 TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9332 /* As a last resort, spill op0 to memory, and reload it in a
9334 else if (!MEM_P (op0))
9336 /* If the operand is not a MEM, force it into memory. Since we
9337 are going to be changing the mode of the MEM, don't call
9338 force_const_mem for constants because we don't allow pool
9339 constants to change mode. */
9340 tree inner_type = TREE_TYPE (treeop0);
9342 gcc_assert (!TREE_ADDRESSABLE (exp));
9344 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9346 = assign_stack_temp_for_type
9347 (TYPE_MODE (inner_type),
9348 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9350 emit_move_insn (target, op0);
9354 /* At this point, OP0 is in the correct mode. If the output type is
9355 such that the operand is known to be aligned, indicate that it is.
9356 Otherwise, we need only be concerned about alignment for non-BLKmode
9360 op0 = copy_rtx (op0);
9362 if (TYPE_ALIGN_OK (type))
9363 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9364 else if (STRICT_ALIGNMENT
9366 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9368 tree inner_type = TREE_TYPE (treeop0);
9369 HOST_WIDE_INT temp_size
9370 = MAX (int_size_in_bytes (inner_type),
9371 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9373 = assign_stack_temp_for_type (mode, temp_size, 0, type);
9374 rtx new_with_op0_mode
9375 = adjust_address (new_rtx, GET_MODE (op0), 0);
9377 gcc_assert (!TREE_ADDRESSABLE (exp));
9379 if (GET_MODE (op0) == BLKmode)
9380 emit_block_move (new_with_op0_mode, op0,
9381 GEN_INT (GET_MODE_SIZE (mode)),
9382 (modifier == EXPAND_STACK_PARM
9383 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9385 emit_move_insn (new_with_op0_mode, op0);
9390 op0 = adjust_address (op0, mode, 0);
9395 /* Use a compare and a jump for BLKmode comparisons, or for function
9396 type comparisons is HAVE_canonicalize_funcptr_for_compare. */
9398 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9399 are occassionally created by folding during expansion. */
9400 case TRUTH_ANDIF_EXPR:
9401 case TRUTH_ORIF_EXPR:
9404 || modifier == EXPAND_STACK_PARM
9405 || ! safe_from_p (target, treeop0, 1)
9406 || ! safe_from_p (target, treeop1, 1)
9407 /* Make sure we don't have a hard reg (such as function's return
9408 value) live across basic blocks, if not optimizing. */
9409 || (!optimize && REG_P (target)
9410 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9411 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9414 emit_move_insn (target, const0_rtx);
9416 op1 = gen_label_rtx ();
9417 jumpifnot_1 (code, treeop0, treeop1, op1);
9420 emit_move_insn (target, const1_rtx);
9423 return ignore ? const0_rtx : target;
9425 case STATEMENT_LIST:
9427 tree_stmt_iterator iter;
9429 gcc_assert (ignore);
9431 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9432 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9437 /* A COND_EXPR with its type being VOID_TYPE represents a
9438 conditional jump and is handled in
9439 expand_gimple_cond_expr. */
9440 gcc_assert (!VOID_TYPE_P (type));
9442 /* Note that COND_EXPRs whose type is a structure or union
9443 are required to be constructed to contain assignments of
9444 a temporary variable, so that we can evaluate them here
9445 for side effect only. If type is void, we must do likewise. */
9447 gcc_assert (!TREE_ADDRESSABLE (type)
9449 && TREE_TYPE (treeop1) != void_type_node
9450 && TREE_TYPE (treeop2) != void_type_node);
9452 /* If we are not to produce a result, we have no target. Otherwise,
9453 if a target was specified use it; it will not be used as an
9454 intermediate target unless it is safe. If no target, use a
9457 if (modifier != EXPAND_STACK_PARM
9459 && safe_from_p (original_target, treeop0, 1)
9460 && GET_MODE (original_target) == mode
9461 #ifdef HAVE_conditional_move
9462 && (! can_conditionally_move_p (mode)
9463 || REG_P (original_target))
9465 && !MEM_P (original_target))
9466 temp = original_target;
9468 temp = assign_temp (type, 0, 0, 1);
9470 do_pending_stack_adjust ();
9472 op0 = gen_label_rtx ();
9473 op1 = gen_label_rtx ();
9474 jumpifnot (treeop0, op0);
9475 store_expr (treeop1, temp,
9476 modifier == EXPAND_STACK_PARM,
9479 emit_jump_insn (gen_jump (op1));
9482 store_expr (treeop2, temp,
9483 modifier == EXPAND_STACK_PARM,
9491 target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9498 gcc_assert (ignore);
9500 /* Check for |= or &= of a bitfield of size one into another bitfield
9501 of size 1. In this case, (unless we need the result of the
9502 assignment) we can do this more efficiently with a
9503 test followed by an assignment, if necessary.
9505 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9506 things change so we do, this code should be enhanced to
9508 if (TREE_CODE (lhs) == COMPONENT_REF
9509 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9510 || TREE_CODE (rhs) == BIT_AND_EXPR)
9511 && TREE_OPERAND (rhs, 0) == lhs
9512 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9513 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9514 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9516 rtx label = gen_label_rtx ();
9517 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9518 do_jump (TREE_OPERAND (rhs, 1),
9521 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9522 MOVE_NONTEMPORAL (exp));
9523 do_pending_stack_adjust ();
9528 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9533 return expand_expr_addr_expr (exp, target, tmode, modifier);
9536 op0 = expand_normal (treeop0);
9537 return read_complex_part (op0, false);
9540 op0 = expand_normal (treeop0);
9541 return read_complex_part (op0, true);
9548 /* Expanded in cfgexpand.c. */
9551 case TRY_CATCH_EXPR:
9553 case EH_FILTER_EXPR:
9554 case TRY_FINALLY_EXPR:
9555 /* Lowered by tree-eh.c. */
9558 case WITH_CLEANUP_EXPR:
9559 case CLEANUP_POINT_EXPR:
9561 case CASE_LABEL_EXPR:
9567 case PREINCREMENT_EXPR:
9568 case PREDECREMENT_EXPR:
9569 case POSTINCREMENT_EXPR:
9570 case POSTDECREMENT_EXPR:
9573 /* Lowered by gimplify.c. */
9577 /* Function descriptors are not valid except for as
9578 initialization constants, and should not be expanded. */
9581 case WITH_SIZE_EXPR:
9582 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9583 have pulled out the size to use in whatever context it needed. */
9584 return expand_expr_real (treeop0, original_target, tmode,
9587 case REALIGN_LOAD_EXPR:
9589 tree oprnd0 = treeop0;
9590 tree oprnd1 = treeop1;
9591 tree oprnd2 = treeop2;
9594 this_optab = optab_for_tree_code (code, type, optab_default);
9595 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9596 op2 = expand_normal (oprnd2);
9597 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9605 tree oprnd0 = treeop0;
9606 tree oprnd1 = treeop1;
9607 tree oprnd2 = treeop2;
9610 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9611 op2 = expand_normal (oprnd2);
9612 target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9617 case COMPOUND_LITERAL_EXPR:
9619 /* Initialize the anonymous variable declared in the compound
9620 literal, then return the variable. */
9621 tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9623 /* Create RTL for this variable. */
9624 if (!DECL_RTL_SET_P (decl))
9626 if (DECL_HARD_REGISTER (decl))
9627 /* The user specified an assembler name for this variable.
9629 rest_of_decl_compilation (decl, 0, 0);
9634 return expand_expr_real (decl, original_target, tmode,
9639 return expand_expr_real_2 (&ops, target, tmode, modifier);
9643 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9644 signedness of TYPE), possibly returning the result in TARGET. */
9646 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9648 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9649 if (target && GET_MODE (target) != GET_MODE (exp))
9651 /* For constant values, reduce using build_int_cst_type. */
9652 if (CONST_INT_P (exp))
9654 HOST_WIDE_INT value = INTVAL (exp);
9655 tree t = build_int_cst_type (type, value);
9656 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9658 else if (TYPE_UNSIGNED (type))
9661 if (prec < HOST_BITS_PER_WIDE_INT)
9662 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9665 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9666 ((unsigned HOST_WIDE_INT) 1
9667 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9669 return expand_and (GET_MODE (exp), exp, mask, target);
9673 tree count = build_int_cst (NULL_TREE,
9674 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9675 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9676 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9680 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9681 when applied to the address of EXP produces an address known to be
9682 aligned more than BIGGEST_ALIGNMENT. */
9685 is_aligning_offset (const_tree offset, const_tree exp)
9687 /* Strip off any conversions. */
9688 while (CONVERT_EXPR_P (offset))
9689 offset = TREE_OPERAND (offset, 0);
9691 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9692 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9693 if (TREE_CODE (offset) != BIT_AND_EXPR
9694 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9695 || compare_tree_int (TREE_OPERAND (offset, 1),
9696 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9697 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9700 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9701 It must be NEGATE_EXPR. Then strip any more conversions. */
9702 offset = TREE_OPERAND (offset, 0);
9703 while (CONVERT_EXPR_P (offset))
9704 offset = TREE_OPERAND (offset, 0);
9706 if (TREE_CODE (offset) != NEGATE_EXPR)
9709 offset = TREE_OPERAND (offset, 0);
9710 while (CONVERT_EXPR_P (offset))
9711 offset = TREE_OPERAND (offset, 0);
9713 /* This must now be the address of EXP. */
9714 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9717 /* Return the tree node if an ARG corresponds to a string constant or zero
9718 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9719 in bytes within the string that ARG is accessing. The type of the
9720 offset will be `sizetype'. */
9723 string_constant (tree arg, tree *ptr_offset)
9725 tree array, offset, lower_bound;
9728 if (TREE_CODE (arg) == ADDR_EXPR)
9730 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9732 *ptr_offset = size_zero_node;
9733 return TREE_OPERAND (arg, 0);
9735 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9737 array = TREE_OPERAND (arg, 0);
9738 offset = size_zero_node;
9740 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9742 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9743 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9744 if (TREE_CODE (array) != STRING_CST
9745 && TREE_CODE (array) != VAR_DECL)
9748 /* Check if the array has a nonzero lower bound. */
9749 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9750 if (!integer_zerop (lower_bound))
9752 /* If the offset and base aren't both constants, return 0. */
9753 if (TREE_CODE (lower_bound) != INTEGER_CST)
9755 if (TREE_CODE (offset) != INTEGER_CST)
9757 /* Adjust offset by the lower bound. */
9758 offset = size_diffop (fold_convert (sizetype, offset),
9759 fold_convert (sizetype, lower_bound));
9765 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9767 tree arg0 = TREE_OPERAND (arg, 0);
9768 tree arg1 = TREE_OPERAND (arg, 1);
9773 if (TREE_CODE (arg0) == ADDR_EXPR
9774 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9775 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9777 array = TREE_OPERAND (arg0, 0);
9780 else if (TREE_CODE (arg1) == ADDR_EXPR
9781 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9782 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9784 array = TREE_OPERAND (arg1, 0);
9793 if (TREE_CODE (array) == STRING_CST)
9795 *ptr_offset = fold_convert (sizetype, offset);
9798 else if (TREE_CODE (array) == VAR_DECL)
9802 /* Variables initialized to string literals can be handled too. */
9803 if (DECL_INITIAL (array) == NULL_TREE
9804 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9807 /* If they are read-only, non-volatile and bind locally. */
9808 if (! TREE_READONLY (array)
9809 || TREE_SIDE_EFFECTS (array)
9810 || ! targetm.binds_local_p (array))
9813 /* Avoid const char foo[4] = "abcde"; */
9814 if (DECL_SIZE_UNIT (array) == NULL_TREE
9815 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9816 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9817 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9820 /* If variable is bigger than the string literal, OFFSET must be constant
9821 and inside of the bounds of the string literal. */
9822 offset = fold_convert (sizetype, offset);
9823 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9824 && (! host_integerp (offset, 1)
9825 || compare_tree_int (offset, length) >= 0))
9828 *ptr_offset = offset;
9829 return DECL_INITIAL (array);
9835 /* Generate code to calculate OPS, and exploded expression
9836 using a store-flag instruction and return an rtx for the result.
9837 OPS reflects a comparison.
9839 If TARGET is nonzero, store the result there if convenient.
9841 Return zero if there is no suitable set-flag instruction
9842 available on this machine.
9844 Once expand_expr has been called on the arguments of the comparison,
9845 we are committed to doing the store flag, since it is not safe to
9846 re-evaluate the expression. We emit the store-flag insn by calling
9847 emit_store_flag, but only expand the arguments if we have a reason
9848 to believe that emit_store_flag will be successful. If we think that
9849 it will, but it isn't, we have to simulate the store-flag with a
9850 set/jump/set sequence. */
9853 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
9856 tree arg0, arg1, type;
9858 enum machine_mode operand_mode;
9861 rtx subtarget = target;
9862 location_t loc = ops->location;
9867 /* Don't crash if the comparison was erroneous. */
9868 if (arg0 == error_mark_node || arg1 == error_mark_node)
9871 type = TREE_TYPE (arg0);
9872 operand_mode = TYPE_MODE (type);
9873 unsignedp = TYPE_UNSIGNED (type);
9875 /* We won't bother with BLKmode store-flag operations because it would mean
9876 passing a lot of information to emit_store_flag. */
9877 if (operand_mode == BLKmode)
9880 /* We won't bother with store-flag operations involving function pointers
9881 when function pointers must be canonicalized before comparisons. */
9882 #ifdef HAVE_canonicalize_funcptr_for_compare
9883 if (HAVE_canonicalize_funcptr_for_compare
9884 && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
9885 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
9887 || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
9888 && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
9889 == FUNCTION_TYPE))))
9896 /* Get the rtx comparison code to use. We know that EXP is a comparison
9897 operation of some type. Some comparisons against 1 and -1 can be
9898 converted to comparisons with zero. Do so here so that the tests
9899 below will be aware that we have a comparison with zero. These
9900 tests will not catch constants in the first operand, but constants
9901 are rarely passed as the first operand. */
9912 if (integer_onep (arg1))
9913 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9915 code = unsignedp ? LTU : LT;
9918 if (! unsignedp && integer_all_onesp (arg1))
9919 arg1 = integer_zero_node, code = LT;
9921 code = unsignedp ? LEU : LE;
9924 if (! unsignedp && integer_all_onesp (arg1))
9925 arg1 = integer_zero_node, code = GE;
9927 code = unsignedp ? GTU : GT;
9930 if (integer_onep (arg1))
9931 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9933 code = unsignedp ? GEU : GE;
9936 case UNORDERED_EXPR:
9965 /* Put a constant second. */
9966 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9967 || TREE_CODE (arg0) == FIXED_CST)
9969 tem = arg0; arg0 = arg1; arg1 = tem;
9970 code = swap_condition (code);
9973 /* If this is an equality or inequality test of a single bit, we can
9974 do this by shifting the bit being tested to the low-order bit and
9975 masking the result with the constant 1. If the condition was EQ,
9976 we xor it with 1. This does not require an scc insn and is faster
9977 than an scc insn even if we have it.
9979 The code to make this transformation was moved into fold_single_bit_test,
9980 so we just call into the folder and expand its result. */
9982 if ((code == NE || code == EQ)
9983 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9984 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9986 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9987 return expand_expr (fold_single_bit_test (loc,
9988 code == NE ? NE_EXPR : EQ_EXPR,
9990 target, VOIDmode, EXPAND_NORMAL);
9993 if (! get_subtarget (target)
9994 || GET_MODE (subtarget) != operand_mode)
9997 expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10000 target = gen_reg_rtx (mode);
10002 /* Try a cstore if possible. */
10003 return emit_store_flag_force (target, code, op0, op1,
10004 operand_mode, unsignedp, 1);
10008 /* Stubs in case we haven't got a casesi insn. */
10009 #ifndef HAVE_casesi
10010 # define HAVE_casesi 0
10011 # define gen_casesi(a, b, c, d, e) (0)
10012 # define CODE_FOR_casesi CODE_FOR_nothing
10015 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10016 0 otherwise (i.e. if there is no casesi instruction). */
10018 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10019 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10020 rtx fallback_label ATTRIBUTE_UNUSED)
10022 enum machine_mode index_mode = SImode;
10023 int index_bits = GET_MODE_BITSIZE (index_mode);
10024 rtx op1, op2, index;
10025 enum machine_mode op_mode;
10030 /* Convert the index to SImode. */
10031 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10033 enum machine_mode omode = TYPE_MODE (index_type);
10034 rtx rangertx = expand_normal (range);
10036 /* We must handle the endpoints in the original mode. */
10037 index_expr = build2 (MINUS_EXPR, index_type,
10038 index_expr, minval);
10039 minval = integer_zero_node;
10040 index = expand_normal (index_expr);
10042 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10043 omode, 1, default_label);
10044 /* Now we can safely truncate. */
10045 index = convert_to_mode (index_mode, index, 0);
10049 if (TYPE_MODE (index_type) != index_mode)
10051 index_type = lang_hooks.types.type_for_size (index_bits, 0);
10052 index_expr = fold_convert (index_type, index_expr);
10055 index = expand_normal (index_expr);
10058 do_pending_stack_adjust ();
10060 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10061 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10063 index = copy_to_mode_reg (op_mode, index);
10065 op1 = expand_normal (minval);
10067 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10068 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10069 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10070 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10072 op1 = copy_to_mode_reg (op_mode, op1);
10074 op2 = expand_normal (range);
10076 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10077 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10078 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10079 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10081 op2 = copy_to_mode_reg (op_mode, op2);
10083 emit_jump_insn (gen_casesi (index, op1, op2,
10084 table_label, !default_label
10085 ? fallback_label : default_label));
10089 /* Attempt to generate a tablejump instruction; same concept. */
10090 #ifndef HAVE_tablejump
10091 #define HAVE_tablejump 0
10092 #define gen_tablejump(x, y) (0)
10095 /* Subroutine of the next function.
10097 INDEX is the value being switched on, with the lowest value
10098 in the table already subtracted.
10099 MODE is its expected mode (needed if INDEX is constant).
10100 RANGE is the length of the jump table.
10101 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10103 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10104 index value is out of range. */
10107 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10112 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10113 cfun->cfg->max_jumptable_ents = INTVAL (range);
10115 /* Do an unsigned comparison (in the proper mode) between the index
10116 expression and the value which represents the length of the range.
10117 Since we just finished subtracting the lower bound of the range
10118 from the index expression, this comparison allows us to simultaneously
10119 check that the original index expression value is both greater than
10120 or equal to the minimum value of the range and less than or equal to
10121 the maximum value of the range. */
10124 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10127 /* If index is in range, it must fit in Pmode.
10128 Convert to Pmode so we can index with it. */
10130 index = convert_to_mode (Pmode, index, 1);
10132 /* Don't let a MEM slip through, because then INDEX that comes
10133 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10134 and break_out_memory_refs will go to work on it and mess it up. */
10135 #ifdef PIC_CASE_VECTOR_ADDRESS
10136 if (flag_pic && !REG_P (index))
10137 index = copy_to_mode_reg (Pmode, index);
10140 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10141 GET_MODE_SIZE, because this indicates how large insns are. The other
10142 uses should all be Pmode, because they are addresses. This code
10143 could fail if addresses and insns are not the same size. */
10144 index = gen_rtx_PLUS (Pmode,
10145 gen_rtx_MULT (Pmode, index,
10146 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10147 gen_rtx_LABEL_REF (Pmode, table_label));
10148 #ifdef PIC_CASE_VECTOR_ADDRESS
10150 index = PIC_CASE_VECTOR_ADDRESS (index);
10153 index = memory_address (CASE_VECTOR_MODE, index);
10154 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10155 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10156 convert_move (temp, vector, 0);
10158 emit_jump_insn (gen_tablejump (temp, table_label));
10160 /* If we are generating PIC code or if the table is PC-relative, the
10161 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10162 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10167 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10168 rtx table_label, rtx default_label)
10172 if (! HAVE_tablejump)
10175 index_expr = fold_build2 (MINUS_EXPR, index_type,
10176 fold_convert (index_type, index_expr),
10177 fold_convert (index_type, minval));
10178 index = expand_normal (index_expr);
10179 do_pending_stack_adjust ();
10181 do_tablejump (index, TYPE_MODE (index_type),
10182 convert_modes (TYPE_MODE (index_type),
10183 TYPE_MODE (TREE_TYPE (range)),
10184 expand_normal (range),
10185 TYPE_UNSIGNED (TREE_TYPE (range))),
10186 table_label, default_label);
10190 /* Nonzero if the mode is a valid vector mode for this architecture.
10191 This returns nonzero even if there is no hardware support for the
10192 vector mode, but we can emulate with narrower modes. */
10195 vector_mode_valid_p (enum machine_mode mode)
10197 enum mode_class mclass = GET_MODE_CLASS (mode);
10198 enum machine_mode innermode;
10200 /* Doh! What's going on? */
10201 if (mclass != MODE_VECTOR_INT
10202 && mclass != MODE_VECTOR_FLOAT
10203 && mclass != MODE_VECTOR_FRACT
10204 && mclass != MODE_VECTOR_UFRACT
10205 && mclass != MODE_VECTOR_ACCUM
10206 && mclass != MODE_VECTOR_UACCUM)
10209 /* Hardware support. Woo hoo! */
10210 if (targetm.vector_mode_supported_p (mode))
10213 innermode = GET_MODE_INNER (mode);
10215 /* We should probably return 1 if requesting V4DI and we have no DI,
10216 but we have V2DI, but this is probably very unlikely. */
10218 /* If we have support for the inner mode, we can safely emulate it.
10219 We may not have V2DI, but me can emulate with a pair of DIs. */
10220 return targetm.scalar_mode_supported_p (innermode);
10223 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10225 const_vector_from_tree (tree exp)
10230 enum machine_mode inner, mode;
10232 mode = TYPE_MODE (TREE_TYPE (exp));
10234 if (initializer_zerop (exp))
10235 return CONST0_RTX (mode);
10237 units = GET_MODE_NUNITS (mode);
10238 inner = GET_MODE_INNER (mode);
10240 v = rtvec_alloc (units);
10242 link = TREE_VECTOR_CST_ELTS (exp);
10243 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10245 elt = TREE_VALUE (link);
10247 if (TREE_CODE (elt) == REAL_CST)
10248 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10250 else if (TREE_CODE (elt) == FIXED_CST)
10251 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10254 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10255 TREE_INT_CST_HIGH (elt),
10259 /* Initialize remaining elements to 0. */
10260 for (; i < units; ++i)
10261 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10263 return gen_rtx_CONST_VECTOR (mode, v);
10267 /* Build a decl for a EH personality function named NAME. */
10270 build_personality_function (const char *name)
10274 type = build_function_type_list (integer_type_node, integer_type_node,
10275 long_long_unsigned_type_node,
10276 ptr_type_node, ptr_type_node, NULL_TREE);
10277 decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10278 get_identifier (name), type);
10279 DECL_ARTIFICIAL (decl) = 1;
10280 DECL_EXTERNAL (decl) = 1;
10281 TREE_PUBLIC (decl) = 1;
10283 /* Zap the nonsensical SYMBOL_REF_DECL for this. What we're left with
10284 are the flags assigned by targetm.encode_section_info. */
10285 SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10290 /* Extracts the personality function of DECL and returns the corresponding
10294 get_personality_function (tree decl)
10296 tree personality = DECL_FUNCTION_PERSONALITY (decl);
10297 enum eh_personality_kind pk;
10299 pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10300 if (pk == eh_personality_none)
10304 && pk == eh_personality_any)
10305 personality = lang_hooks.eh_personality ();
10307 if (pk == eh_personality_lang)
10308 gcc_assert (personality != NULL_TREE);
10310 return XEXP (DECL_RTL (personality), 0);
10313 #include "gt-expr.h"