1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, alias_set_type);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
145 tree, tree, alias_set_type, bool);
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
149 static int is_aligning_offset (const_tree, const_tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero. */
190 #ifndef SET_BY_PIECES_P
191 #define SET_BY_PIECES_P(SIZE, ALIGN) \
192 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
193 < (unsigned int) SET_RATIO)
196 /* This macro is used to determine whether store_by_pieces should be
197 called to "memcpy" storage when the source is a constant string. */
198 #ifndef STORE_BY_PIECES_P
199 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
200 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
201 < (unsigned int) MOVE_RATIO)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movmem_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block sets. */
208 enum insn_code setmem_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of three different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
216 /* Synchronization primitives. */
217 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
237 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
242 #ifndef SLOW_UNALIGNED_ACCESS
243 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
246 /* This is run once per compilation to set up which modes can be used
247 directly in memory and to initialize the block move optab. */
250 init_expr_once (void)
253 enum machine_mode mode;
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
264 /* A scratch register we can modify in-place below to avoid
265 useless RTL allocations. */
266 reg = gen_rtx_REG (VOIDmode, -1);
268 insn = rtx_alloc (INSN);
269 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
270 PATTERN (insn) = pat;
272 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
273 mode = (enum machine_mode) ((int) mode + 1))
277 direct_load[(int) mode] = direct_store[(int) mode] = 0;
278 PUT_MODE (mem, mode);
279 PUT_MODE (mem1, mode);
280 PUT_MODE (reg, mode);
282 /* See if there is some register that can be used in this mode and
283 directly loaded or stored from memory. */
285 if (mode != VOIDmode && mode != BLKmode)
286 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
287 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 if (! HARD_REGNO_MODE_OK (regno, mode))
293 SET_REGNO (reg, regno);
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
300 SET_SRC (pat) = mem1;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
306 SET_DEST (pat) = mem;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
311 SET_DEST (pat) = mem1;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
317 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
320 mode = GET_MODE_WIDER_MODE (mode))
322 enum machine_mode srcmode;
323 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
324 srcmode = GET_MODE_WIDER_MODE (srcmode))
328 ic = can_extend_p (mode, srcmode, 0);
329 if (ic == CODE_FOR_nothing)
332 PUT_MODE (mem, srcmode);
334 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
335 float_extend_from_mem[mode][srcmode] = true;
340 /* This is run at the start of compiling a function. */
345 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
348 /* Copy data from FROM to TO, where the machine modes are not the same.
349 Both modes may be integer, or both may be floating.
350 UNSIGNEDP should be nonzero if FROM is an unsigned type.
351 This causes zero-extension instead of sign-extension. */
354 convert_move (rtx to, rtx from, int unsignedp)
356 enum machine_mode to_mode = GET_MODE (to);
357 enum machine_mode from_mode = GET_MODE (from);
358 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
359 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363 /* rtx code for making an equivalent value. */
364 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
365 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
368 gcc_assert (to_real == from_real);
369 gcc_assert (to_mode != BLKmode);
370 gcc_assert (from_mode != BLKmode);
372 /* If the source and destination are already the same, then there's
377 /* If FROM is a SUBREG that indicates that we have already done at least
378 the required extension, strip it. We don't handle such SUBREGs as
381 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
382 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
383 >= GET_MODE_SIZE (to_mode))
384 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
385 from = gen_lowpart (to_mode, from), from_mode = to_mode;
387 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
389 if (to_mode == from_mode
390 || (from_mode == VOIDmode && CONSTANT_P (from)))
392 emit_move_insn (to, from);
396 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
398 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
400 if (VECTOR_MODE_P (to_mode))
401 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
403 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
405 emit_move_insn (to, from);
409 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
411 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
412 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
421 gcc_assert ((GET_MODE_PRECISION (from_mode)
422 != GET_MODE_PRECISION (to_mode))
423 || (DECIMAL_FLOAT_MODE_P (from_mode)
424 != DECIMAL_FLOAT_MODE_P (to_mode)));
426 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
427 /* Conversion between decimal float and binary float, same size. */
428 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
429 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
434 /* Try converting directly if the insn is supported. */
436 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
437 if (code != CODE_FOR_nothing)
439 emit_unop_insn (code, to, from,
440 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 /* Otherwise use a libcall. */
445 libcall = convert_optab_handler (tab, to_mode, from_mode)->libfunc;
447 /* Is this conversion implemented yet? */
448 gcc_assert (libcall);
451 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
453 insns = get_insns ();
455 emit_libcall_block (insns, to, value,
456 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
458 : gen_rtx_FLOAT_EXTEND (to_mode, from));
462 /* Handle pointer conversion. */ /* SPEE 900220. */
463 /* Targets are expected to provide conversion insns between PxImode and
464 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
465 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
467 enum machine_mode full_mode
468 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
470 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
471 != CODE_FOR_nothing);
473 if (full_mode != from_mode)
474 from = convert_to_mode (full_mode, from, unsignedp);
475 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
482 enum machine_mode full_mode
483 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
485 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
486 != CODE_FOR_nothing);
488 if (to_mode == full_mode)
490 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
495 new_from = gen_reg_rtx (full_mode);
496 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
497 new_from, from, UNKNOWN);
499 /* else proceed to integer conversions below. */
500 from_mode = full_mode;
504 /* Now both modes are integers. */
506 /* Handle expanding beyond a word. */
507 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
508 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
515 enum machine_mode lowpart_mode;
516 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
518 /* Try converting directly if the insn is supported. */
519 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
522 /* If FROM is a SUBREG, put it into a register. Do this
523 so that we always generate the same set of insns for
524 better cse'ing; if an intermediate assignment occurred,
525 we won't be doing the operation directly on the SUBREG. */
526 if (optimize > 0 && GET_CODE (from) == SUBREG)
527 from = force_reg (from_mode, from);
528 emit_unop_insn (code, to, from, equiv_code);
531 /* Next, try converting via full word. */
532 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
533 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
534 != CODE_FOR_nothing))
538 if (reg_overlap_mentioned_p (to, from))
539 from = force_reg (from_mode, from);
540 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
542 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
543 emit_unop_insn (code, to,
544 gen_lowpart (word_mode, to), equiv_code);
548 /* No special multiword conversion insn; do it by hand. */
551 /* Since we will turn this into a no conflict block, we must ensure
552 that the source does not overlap the target. */
554 if (reg_overlap_mentioned_p (to, from))
555 from = force_reg (from_mode, from);
557 /* Get a copy of FROM widened to a word, if necessary. */
558 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
559 lowpart_mode = word_mode;
561 lowpart_mode = from_mode;
563 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
565 lowpart = gen_lowpart (lowpart_mode, to);
566 emit_move_insn (lowpart, lowfrom);
568 /* Compute the value to put in each remaining word. */
570 fill_value = const0_rtx;
575 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
576 && STORE_FLAG_VALUE == -1)
578 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
580 fill_value = gen_reg_rtx (word_mode);
581 emit_insn (gen_slt (fill_value));
587 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
588 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
590 fill_value = convert_to_mode (word_mode, fill_value, 1);
594 /* Fill the remaining words. */
595 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
598 rtx subword = operand_subword (to, index, 1, to_mode);
600 gcc_assert (subword);
602 if (fill_value != subword)
603 emit_move_insn (subword, fill_value);
606 insns = get_insns ();
609 emit_no_conflict_block (insns, to, from, NULL_RTX,
610 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
614 /* Truncating multi-word to a word or less. */
615 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
619 && ! MEM_VOLATILE_P (from)
620 && direct_load[(int) to_mode]
621 && ! mode_dependent_address_p (XEXP (from, 0)))
623 || GET_CODE (from) == SUBREG))
624 from = force_reg (from_mode, from);
625 convert_move (to, gen_lowpart (word_mode, from), 0);
629 /* Now follow all the conversions between integers
630 no more than a word long. */
632 /* For truncation, usually we can just refer to FROM in a narrower mode. */
633 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 GET_MODE_BITSIZE (from_mode)))
638 && ! MEM_VOLATILE_P (from)
639 && direct_load[(int) to_mode]
640 && ! mode_dependent_address_p (XEXP (from, 0)))
642 || GET_CODE (from) == SUBREG))
643 from = force_reg (from_mode, from);
644 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 from = copy_to_reg (from);
647 emit_move_insn (to, gen_lowpart (to_mode, from));
651 /* Handle extension. */
652 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
654 /* Convert directly if that works. */
655 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
658 emit_unop_insn (code, to, from, equiv_code);
663 enum machine_mode intermediate;
667 /* Search for a mode to convert via. */
668 for (intermediate = from_mode; intermediate != VOIDmode;
669 intermediate = GET_MODE_WIDER_MODE (intermediate))
670 if (((can_extend_p (to_mode, intermediate, unsignedp)
672 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 GET_MODE_BITSIZE (intermediate))))
675 && (can_extend_p (intermediate, from_mode, unsignedp)
676 != CODE_FOR_nothing))
678 convert_move (to, convert_to_mode (intermediate, from,
679 unsignedp), unsignedp);
683 /* No suitable intermediate mode.
684 Generate what we need with shifts. */
685 shift_amount = build_int_cst (NULL_TREE,
686 GET_MODE_BITSIZE (to_mode)
687 - GET_MODE_BITSIZE (from_mode));
688 from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
691 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
694 emit_move_insn (to, tmp);
699 /* Support special truncate insns for certain modes. */
700 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
702 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
707 /* Handle truncation of volatile memrefs, and so on;
708 the things that couldn't be truncated directly,
709 and for which there was no special instruction.
711 ??? Code above formerly short-circuited this, for most integer
712 mode pairs, with a force_reg in from_mode followed by a recursive
713 call to this routine. Appears always to have been wrong. */
714 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
716 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717 emit_move_insn (to, temp);
721 /* Mode combination is not recognized. */
725 /* Return an rtx for a value that would result
726 from converting X to mode MODE.
727 Both X and MODE may be floating, or both integer.
728 UNSIGNEDP is nonzero if X is an unsigned value.
729 This can be done by referring to a part of X in place
730 or by copying to a new temporary with conversion. */
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
735 return convert_modes (mode, VOIDmode, x, unsignedp);
738 /* Return an rtx for a value that would result
739 from converting X from mode OLDMODE to mode MODE.
740 Both modes may be floating, or both integer.
741 UNSIGNEDP is nonzero if X is an unsigned value.
743 This can be done by referring to a part of X in place
744 or by copying to a new temporary with conversion.
746 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
753 /* If FROM is a SUBREG that indicates that we have already done at least
754 the required extension, strip it. */
756 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759 x = gen_lowpart (mode, x);
761 if (GET_MODE (x) != VOIDmode)
762 oldmode = GET_MODE (x);
767 /* There is one case that we must handle specially: If we are converting
768 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769 we are to interpret the constant as unsigned, gen_lowpart will do
770 the wrong if the constant appears negative. What we want to do is
771 make the high-order word of the constant zero, not all ones. */
773 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
777 HOST_WIDE_INT val = INTVAL (x);
779 if (oldmode != VOIDmode
780 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
782 int width = GET_MODE_BITSIZE (oldmode);
784 /* We need to zero extend VAL. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
788 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
791 /* We can do this with a gen_lowpart if both desired and current modes
792 are integer, and this is either a constant integer, a register, or a
793 non-volatile MEM. Except for the constant case where MODE is no
794 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
796 if ((GET_CODE (x) == CONST_INT
797 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798 || (GET_MODE_CLASS (mode) == MODE_INT
799 && GET_MODE_CLASS (oldmode) == MODE_INT
800 && (GET_CODE (x) == CONST_DOUBLE
801 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 && direct_load[(int) mode])
805 && (! HARD_REGISTER_P (x)
806 || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 GET_MODE_BITSIZE (GET_MODE (x)))))))))
810 /* ?? If we don't know OLDMODE, we have to assume here that
811 X does not need sign- or zero-extension. This may not be
812 the case, but it's the best we can do. */
813 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
814 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
816 HOST_WIDE_INT val = INTVAL (x);
817 int width = GET_MODE_BITSIZE (oldmode);
819 /* We must sign or zero-extend in this case. Start by
820 zero-extending, then sign extend if we need to. */
821 val &= ((HOST_WIDE_INT) 1 << width) - 1;
823 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 val |= (HOST_WIDE_INT) (-1) << width;
826 return gen_int_mode (val, mode);
829 return gen_lowpart (mode, x);
832 /* Converting from integer constant into mode is always equivalent to an
834 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
836 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837 return simplify_gen_subreg (mode, x, oldmode, 0);
840 temp = gen_reg_rtx (mode);
841 convert_move (temp, x, unsignedp);
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846 store efficiently. Due to internal GCC limitations, this is
847 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848 for an immediate constant. */
850 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
852 /* Determine whether the LEN bytes can be moved by using several move
853 instructions. Return nonzero if a call to move_by_pieces should
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 unsigned int align ATTRIBUTE_UNUSED)
860 return MOVE_BY_PIECES_P (len, align);
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864 block TO. (These are MEM rtx's with BLKmode).
866 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867 used to push FROM to the stack.
869 ALIGN is maximum stack alignment we can assume.
871 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 unsigned int align, int endp)
879 struct move_by_pieces data;
880 rtx to_addr, from_addr = XEXP (from, 0);
881 unsigned int max_size = MOVE_MAX_PIECES + 1;
882 enum machine_mode mode = VOIDmode, tmode;
883 enum insn_code icode;
885 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 data.from_addr = from_addr;
891 to_addr = XEXP (to, 0);
894 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
895 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
897 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
904 #ifdef STACK_GROWS_DOWNWARD
910 data.to_addr = to_addr;
913 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
914 || GET_CODE (from_addr) == POST_INC
915 || GET_CODE (from_addr) == POST_DEC);
917 data.explicit_inc_from = 0;
918 data.explicit_inc_to = 0;
919 if (data.reverse) data.offset = len;
922 /* If copying requires more than two move insns,
923 copy addresses to registers (to make displacements shorter)
924 and use post-increment if available. */
925 if (!(data.autinc_from && data.autinc_to)
926 && move_by_pieces_ninsns (len, align, max_size) > 2)
928 /* Find the mode of the largest move... */
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
930 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
931 if (GET_MODE_SIZE (tmode) < max_size)
934 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
936 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
937 data.autinc_from = 1;
938 data.explicit_inc_from = -1;
940 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
942 data.from_addr = copy_addr_to_reg (from_addr);
943 data.autinc_from = 1;
944 data.explicit_inc_from = 1;
946 if (!data.autinc_from && CONSTANT_P (from_addr))
947 data.from_addr = copy_addr_to_reg (from_addr);
948 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
950 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
952 data.explicit_inc_to = -1;
954 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
956 data.to_addr = copy_addr_to_reg (to_addr);
958 data.explicit_inc_to = 1;
960 if (!data.autinc_to && CONSTANT_P (to_addr))
961 data.to_addr = copy_addr_to_reg (to_addr);
964 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
965 if (align >= GET_MODE_ALIGNMENT (tmode))
966 align = GET_MODE_ALIGNMENT (tmode);
969 enum machine_mode xmode;
971 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
973 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
974 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
975 || SLOW_UNALIGNED_ACCESS (tmode, align))
978 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
981 /* First move what we can in the largest integer mode, then go to
982 successively smaller modes. */
986 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
987 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
988 if (GET_MODE_SIZE (tmode) < max_size)
991 if (mode == VOIDmode)
994 icode = optab_handler (mov_optab, mode)->insn_code;
995 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
996 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
998 max_size = GET_MODE_SIZE (mode);
1001 /* The code above should have handled everything. */
1002 gcc_assert (!data.len);
1008 gcc_assert (!data.reverse);
1013 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1014 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1016 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1019 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1026 to1 = adjust_address (data.to, QImode, data.offset);
1034 /* Return number of insns required to move L bytes by pieces.
1035 ALIGN (in bits) is maximum alignment we can assume. */
1037 static unsigned HOST_WIDE_INT
1038 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1039 unsigned int max_size)
1041 unsigned HOST_WIDE_INT n_insns = 0;
1042 enum machine_mode tmode;
1044 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1045 if (align >= GET_MODE_ALIGNMENT (tmode))
1046 align = GET_MODE_ALIGNMENT (tmode);
1049 enum machine_mode tmode, xmode;
1051 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1053 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1054 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1055 || SLOW_UNALIGNED_ACCESS (tmode, align))
1058 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1061 while (max_size > 1)
1063 enum machine_mode mode = VOIDmode;
1064 enum insn_code icode;
1066 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1067 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1068 if (GET_MODE_SIZE (tmode) < max_size)
1071 if (mode == VOIDmode)
1074 icode = optab_handler (mov_optab, mode)->insn_code;
1075 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1076 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1078 max_size = GET_MODE_SIZE (mode);
1085 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1086 with move instructions for mode MODE. GENFUN is the gen_... function
1087 to make a move insn for that mode. DATA has all the other info. */
1090 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1091 struct move_by_pieces *data)
1093 unsigned int size = GET_MODE_SIZE (mode);
1094 rtx to1 = NULL_RTX, from1;
1096 while (data->len >= size)
1099 data->offset -= size;
1103 if (data->autinc_to)
1104 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1107 to1 = adjust_address (data->to, mode, data->offset);
1110 if (data->autinc_from)
1111 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1114 from1 = adjust_address (data->from, mode, data->offset);
1116 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1117 emit_insn (gen_add2_insn (data->to_addr,
1118 GEN_INT (-(HOST_WIDE_INT)size)));
1119 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1120 emit_insn (gen_add2_insn (data->from_addr,
1121 GEN_INT (-(HOST_WIDE_INT)size)));
1124 emit_insn ((*genfun) (to1, from1));
1127 #ifdef PUSH_ROUNDING
1128 emit_single_push_insn (mode, from1, NULL);
1134 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1135 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1136 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1137 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1139 if (! data->reverse)
1140 data->offset += size;
1146 /* Emit code to move a block Y to a block X. This may be done with
1147 string-move instructions, with multiple scalar move instructions,
1148 or with a library call.
1150 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1151 SIZE is an rtx that says how long they are.
1152 ALIGN is the maximum alignment we can assume they have.
1153 METHOD describes what kind of copy this is, and what mechanisms may be used.
1155 Return the address of the new block, if memcpy is called and returns it,
1159 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1160 unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 case BLOCK_OP_NORMAL:
1169 case BLOCK_OP_TAILCALL:
1170 may_use_call = true;
1173 case BLOCK_OP_CALL_PARM:
1174 may_use_call = block_move_libcall_safe_for_call_parm ();
1176 /* Make inhibit_defer_pop nonzero around the library call
1177 to force it to pop the arguments right away. */
1181 case BLOCK_OP_NO_LIBCALL:
1182 may_use_call = false;
1189 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1191 gcc_assert (MEM_P (x));
1192 gcc_assert (MEM_P (y));
1195 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1196 block copy is more efficient for other large modes, e.g. DCmode. */
1197 x = adjust_address (x, BLKmode, 0);
1198 y = adjust_address (y, BLKmode, 0);
1200 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1201 can be incorrect is coming from __builtin_memcpy. */
1202 if (GET_CODE (size) == CONST_INT)
1204 if (INTVAL (size) == 0)
1207 x = shallow_copy_rtx (x);
1208 y = shallow_copy_rtx (y);
1209 set_mem_size (x, size);
1210 set_mem_size (y, size);
1213 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1214 move_by_pieces (x, y, INTVAL (size), align, 0);
1215 else if (emit_block_move_via_movmem (x, y, size, align,
1216 expected_align, expected_size))
1218 else if (may_use_call)
1219 retval = emit_block_move_via_libcall (x, y, size,
1220 method == BLOCK_OP_TAILCALL);
1222 emit_block_move_via_loop (x, y, size, align);
1224 if (method == BLOCK_OP_CALL_PARM)
1231 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1233 return emit_block_move_hints (x, y, size, method, 0, -1);
1236 /* A subroutine of emit_block_move. Returns true if calling the
1237 block move libcall will not clobber any parameters which may have
1238 already been placed on the stack. */
1241 block_move_libcall_safe_for_call_parm (void)
1243 /* If arguments are pushed on the stack, then they're safe. */
1247 /* If registers go on the stack anyway, any argument is sure to clobber
1248 an outgoing argument. */
1249 #if defined (REG_PARM_STACK_SPACE)
1250 if (OUTGOING_REG_PARM_STACK_SPACE)
1253 fn = emit_block_move_libcall_fn (false);
1254 if (REG_PARM_STACK_SPACE (fn) != 0)
1259 /* If any argument goes in memory, then it might clobber an outgoing
1262 CUMULATIVE_ARGS args_so_far;
1265 fn = emit_block_move_libcall_fn (false);
1266 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1268 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1269 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1271 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1272 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1273 if (!tmp || !REG_P (tmp))
1275 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1277 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1283 /* A subroutine of emit_block_move. Expand a movmem pattern;
1284 return true if successful. */
1287 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1288 unsigned int expected_align, HOST_WIDE_INT expected_size)
1290 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1291 int save_volatile_ok = volatile_ok;
1292 enum machine_mode mode;
1294 if (expected_align < align)
1295 expected_align = align;
1297 /* Since this is a move insn, we don't care about volatility. */
1300 /* Try the most limited insn first, because there's no point
1301 including more than one in the machine description unless
1302 the more limited one has some advantage. */
1304 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1305 mode = GET_MODE_WIDER_MODE (mode))
1307 enum insn_code code = movmem_optab[(int) mode];
1308 insn_operand_predicate_fn pred;
1310 if (code != CODE_FOR_nothing
1311 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1312 here because if SIZE is less than the mode mask, as it is
1313 returned by the macro, it will definitely be less than the
1314 actual mode mask. */
1315 && ((GET_CODE (size) == CONST_INT
1316 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1317 <= (GET_MODE_MASK (mode) >> 1)))
1318 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1319 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1320 || (*pred) (x, BLKmode))
1321 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1322 || (*pred) (y, BLKmode))
1323 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1324 || (*pred) (opalign, VOIDmode)))
1327 rtx last = get_last_insn ();
1330 op2 = convert_to_mode (mode, size, 1);
1331 pred = insn_data[(int) code].operand[2].predicate;
1332 if (pred != 0 && ! (*pred) (op2, mode))
1333 op2 = copy_to_mode_reg (mode, op2);
1335 /* ??? When called via emit_block_move_for_call, it'd be
1336 nice if there were some way to inform the backend, so
1337 that it doesn't fail the expansion because it thinks
1338 emitting the libcall would be more efficient. */
1340 if (insn_data[(int) code].n_operands == 4)
1341 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1343 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1344 GEN_INT (expected_align),
1345 GEN_INT (expected_size));
1349 volatile_ok = save_volatile_ok;
1353 delete_insns_since (last);
1357 volatile_ok = save_volatile_ok;
1361 /* A subroutine of emit_block_move. Expand a call to memcpy.
1362 Return the return value from memcpy, 0 otherwise. */
1365 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1367 rtx dst_addr, src_addr;
1368 tree call_expr, fn, src_tree, dst_tree, size_tree;
1369 enum machine_mode size_mode;
1372 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1373 pseudos. We can then place those new pseudos into a VAR_DECL and
1376 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1377 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1379 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1380 src_addr = convert_memory_address (ptr_mode, src_addr);
1382 dst_tree = make_tree (ptr_type_node, dst_addr);
1383 src_tree = make_tree (ptr_type_node, src_addr);
1385 size_mode = TYPE_MODE (sizetype);
1387 size = convert_to_mode (size_mode, size, 1);
1388 size = copy_to_mode_reg (size_mode, size);
1390 /* It is incorrect to use the libcall calling conventions to call
1391 memcpy in this context. This could be a user call to memcpy and
1392 the user may wish to examine the return value from memcpy. For
1393 targets where libcalls and normal calls have different conventions
1394 for returning pointers, we could end up generating incorrect code. */
1396 size_tree = make_tree (sizetype, size);
1398 fn = emit_block_move_libcall_fn (true);
1399 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1400 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1402 retval = expand_normal (call_expr);
1407 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1408 for the function we use for block copies. The first time FOR_CALL
1409 is true, we call assemble_external. */
1411 static GTY(()) tree block_move_fn;
1414 init_block_move_fn (const char *asmspec)
1420 fn = get_identifier ("memcpy");
1421 args = build_function_type_list (ptr_type_node, ptr_type_node,
1422 const_ptr_type_node, sizetype,
1425 fn = build_decl (FUNCTION_DECL, fn, args);
1426 DECL_EXTERNAL (fn) = 1;
1427 TREE_PUBLIC (fn) = 1;
1428 DECL_ARTIFICIAL (fn) = 1;
1429 TREE_NOTHROW (fn) = 1;
1430 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1431 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1437 set_user_assembler_name (block_move_fn, asmspec);
1441 emit_block_move_libcall_fn (int for_call)
1443 static bool emitted_extern;
1446 init_block_move_fn (NULL);
1448 if (for_call && !emitted_extern)
1450 emitted_extern = true;
1451 make_decl_rtl (block_move_fn);
1452 assemble_external (block_move_fn);
1455 return block_move_fn;
1458 /* A subroutine of emit_block_move. Copy the data via an explicit
1459 loop. This is used only when libcalls are forbidden. */
1460 /* ??? It'd be nice to copy in hunks larger than QImode. */
1463 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1464 unsigned int align ATTRIBUTE_UNUSED)
1466 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1467 enum machine_mode iter_mode;
1469 iter_mode = GET_MODE (size);
1470 if (iter_mode == VOIDmode)
1471 iter_mode = word_mode;
1473 top_label = gen_label_rtx ();
1474 cmp_label = gen_label_rtx ();
1475 iter = gen_reg_rtx (iter_mode);
1477 emit_move_insn (iter, const0_rtx);
1479 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1480 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1481 do_pending_stack_adjust ();
1483 emit_jump (cmp_label);
1484 emit_label (top_label);
1486 tmp = convert_modes (Pmode, iter_mode, iter, true);
1487 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1488 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1489 x = change_address (x, QImode, x_addr);
1490 y = change_address (y, QImode, y_addr);
1492 emit_move_insn (x, y);
1494 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1495 true, OPTAB_LIB_WIDEN);
1497 emit_move_insn (iter, tmp);
1499 emit_label (cmp_label);
1501 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1505 /* Copy all or part of a value X into registers starting at REGNO.
1506 The number of registers to be filled is NREGS. */
1509 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1512 #ifdef HAVE_load_multiple
1520 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1521 x = validize_mem (force_const_mem (mode, x));
1523 /* See if the machine can do this with a load multiple insn. */
1524 #ifdef HAVE_load_multiple
1525 if (HAVE_load_multiple)
1527 last = get_last_insn ();
1528 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1536 delete_insns_since (last);
1540 for (i = 0; i < nregs; i++)
1541 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1542 operand_subword_force (x, i, mode));
1545 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1546 The number of registers to be filled is NREGS. */
1549 move_block_from_reg (int regno, rtx x, int nregs)
1556 /* See if the machine can do this with a store multiple insn. */
1557 #ifdef HAVE_store_multiple
1558 if (HAVE_store_multiple)
1560 rtx last = get_last_insn ();
1561 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1569 delete_insns_since (last);
1573 for (i = 0; i < nregs; i++)
1575 rtx tem = operand_subword (x, i, 1, BLKmode);
1579 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1583 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1584 ORIG, where ORIG is a non-consecutive group of registers represented by
1585 a PARALLEL. The clone is identical to the original except in that the
1586 original set of registers is replaced by a new set of pseudo registers.
1587 The new set has the same modes as the original set. */
1590 gen_group_rtx (rtx orig)
1595 gcc_assert (GET_CODE (orig) == PARALLEL);
1597 length = XVECLEN (orig, 0);
1598 tmps = alloca (sizeof (rtx) * length);
1600 /* Skip a NULL entry in first slot. */
1601 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1606 for (; i < length; i++)
1608 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1609 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1611 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1614 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1617 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1618 except that values are placed in TMPS[i], and must later be moved
1619 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1622 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1626 enum machine_mode m = GET_MODE (orig_src);
1628 gcc_assert (GET_CODE (dst) == PARALLEL);
1631 && !SCALAR_INT_MODE_P (m)
1632 && !MEM_P (orig_src)
1633 && GET_CODE (orig_src) != CONCAT)
1635 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1636 if (imode == BLKmode)
1637 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1639 src = gen_reg_rtx (imode);
1640 if (imode != BLKmode)
1641 src = gen_lowpart (GET_MODE (orig_src), src);
1642 emit_move_insn (src, orig_src);
1643 /* ...and back again. */
1644 if (imode != BLKmode)
1645 src = gen_lowpart (imode, src);
1646 emit_group_load_1 (tmps, dst, src, type, ssize);
1650 /* Check for a NULL entry, used to indicate that the parameter goes
1651 both on the stack and in registers. */
1652 if (XEXP (XVECEXP (dst, 0, 0), 0))
1657 /* Process the pieces. */
1658 for (i = start; i < XVECLEN (dst, 0); i++)
1660 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1661 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1662 unsigned int bytelen = GET_MODE_SIZE (mode);
1665 /* Handle trailing fragments that run over the size of the struct. */
1666 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1668 /* Arrange to shift the fragment to where it belongs.
1669 extract_bit_field loads to the lsb of the reg. */
1671 #ifdef BLOCK_REG_PADDING
1672 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1673 == (BYTES_BIG_ENDIAN ? upward : downward)
1678 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1679 bytelen = ssize - bytepos;
1680 gcc_assert (bytelen > 0);
1683 /* If we won't be loading directly from memory, protect the real source
1684 from strange tricks we might play; but make sure that the source can
1685 be loaded directly into the destination. */
1687 if (!MEM_P (orig_src)
1688 && (!CONSTANT_P (orig_src)
1689 || (GET_MODE (orig_src) != mode
1690 && GET_MODE (orig_src) != VOIDmode)))
1692 if (GET_MODE (orig_src) == VOIDmode)
1693 src = gen_reg_rtx (mode);
1695 src = gen_reg_rtx (GET_MODE (orig_src));
1697 emit_move_insn (src, orig_src);
1700 /* Optimize the access just a bit. */
1702 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1703 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1704 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1705 && bytelen == GET_MODE_SIZE (mode))
1707 tmps[i] = gen_reg_rtx (mode);
1708 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1710 else if (COMPLEX_MODE_P (mode)
1711 && GET_MODE (src) == mode
1712 && bytelen == GET_MODE_SIZE (mode))
1713 /* Let emit_move_complex do the bulk of the work. */
1715 else if (GET_CODE (src) == CONCAT)
1717 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1718 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1720 if ((bytepos == 0 && bytelen == slen0)
1721 || (bytepos != 0 && bytepos + bytelen <= slen))
1723 /* The following assumes that the concatenated objects all
1724 have the same size. In this case, a simple calculation
1725 can be used to determine the object and the bit field
1727 tmps[i] = XEXP (src, bytepos / slen0);
1728 if (! CONSTANT_P (tmps[i])
1729 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1730 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1731 (bytepos % slen0) * BITS_PER_UNIT,
1732 1, NULL_RTX, mode, mode);
1738 gcc_assert (!bytepos);
1739 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1740 emit_move_insn (mem, src);
1741 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1742 0, 1, NULL_RTX, mode, mode);
1745 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1746 SIMD register, which is currently broken. While we get GCC
1747 to emit proper RTL for these cases, let's dump to memory. */
1748 else if (VECTOR_MODE_P (GET_MODE (dst))
1751 int slen = GET_MODE_SIZE (GET_MODE (src));
1754 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1755 emit_move_insn (mem, src);
1756 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1758 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1759 && XVECLEN (dst, 0) > 1)
1760 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1761 else if (CONSTANT_P (src)
1762 || (REG_P (src) && GET_MODE (src) == mode))
1765 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1766 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1770 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1771 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1775 /* Emit code to move a block SRC of type TYPE to a block DST,
1776 where DST is non-consecutive registers represented by a PARALLEL.
1777 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1781 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1786 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1787 emit_group_load_1 (tmps, dst, src, type, ssize);
1789 /* Copy the extracted pieces into the proper (probable) hard regs. */
1790 for (i = 0; i < XVECLEN (dst, 0); i++)
1792 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1795 emit_move_insn (d, tmps[i]);
1799 /* Similar, but load SRC into new pseudos in a format that looks like
1800 PARALLEL. This can later be fed to emit_group_move to get things
1801 in the right place. */
1804 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1809 vec = rtvec_alloc (XVECLEN (parallel, 0));
1810 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1812 /* Convert the vector to look just like the original PARALLEL, except
1813 with the computed values. */
1814 for (i = 0; i < XVECLEN (parallel, 0); i++)
1816 rtx e = XVECEXP (parallel, 0, i);
1817 rtx d = XEXP (e, 0);
1821 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1822 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1824 RTVEC_ELT (vec, i) = e;
1827 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1830 /* Emit code to move a block SRC to block DST, where SRC and DST are
1831 non-consecutive groups of registers, each represented by a PARALLEL. */
1834 emit_group_move (rtx dst, rtx src)
1838 gcc_assert (GET_CODE (src) == PARALLEL
1839 && GET_CODE (dst) == PARALLEL
1840 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1842 /* Skip first entry if NULL. */
1843 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1844 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1845 XEXP (XVECEXP (src, 0, i), 0));
1848 /* Move a group of registers represented by a PARALLEL into pseudos. */
1851 emit_group_move_into_temps (rtx src)
1853 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1856 for (i = 0; i < XVECLEN (src, 0); i++)
1858 rtx e = XVECEXP (src, 0, i);
1859 rtx d = XEXP (e, 0);
1862 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1863 RTVEC_ELT (vec, i) = e;
1866 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1869 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1870 where SRC is non-consecutive registers represented by a PARALLEL.
1871 SSIZE represents the total size of block ORIG_DST, or -1 if not
1875 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1878 int start, finish, i;
1879 enum machine_mode m = GET_MODE (orig_dst);
1881 gcc_assert (GET_CODE (src) == PARALLEL);
1883 if (!SCALAR_INT_MODE_P (m)
1884 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1886 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1887 if (imode == BLKmode)
1888 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1890 dst = gen_reg_rtx (imode);
1891 emit_group_store (dst, src, type, ssize);
1892 if (imode != BLKmode)
1893 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1894 emit_move_insn (orig_dst, dst);
1898 /* Check for a NULL entry, used to indicate that the parameter goes
1899 both on the stack and in registers. */
1900 if (XEXP (XVECEXP (src, 0, 0), 0))
1904 finish = XVECLEN (src, 0);
1906 tmps = alloca (sizeof (rtx) * finish);
1908 /* Copy the (probable) hard regs into pseudos. */
1909 for (i = start; i < finish; i++)
1911 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1912 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1914 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1915 emit_move_insn (tmps[i], reg);
1921 /* If we won't be storing directly into memory, protect the real destination
1922 from strange tricks we might play. */
1924 if (GET_CODE (dst) == PARALLEL)
1928 /* We can get a PARALLEL dst if there is a conditional expression in
1929 a return statement. In that case, the dst and src are the same,
1930 so no action is necessary. */
1931 if (rtx_equal_p (dst, src))
1934 /* It is unclear if we can ever reach here, but we may as well handle
1935 it. Allocate a temporary, and split this into a store/load to/from
1938 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1939 emit_group_store (temp, src, type, ssize);
1940 emit_group_load (dst, temp, type, ssize);
1943 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1945 enum machine_mode outer = GET_MODE (dst);
1946 enum machine_mode inner;
1947 HOST_WIDE_INT bytepos;
1951 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1952 dst = gen_reg_rtx (outer);
1954 /* Make life a bit easier for combine. */
1955 /* If the first element of the vector is the low part
1956 of the destination mode, use a paradoxical subreg to
1957 initialize the destination. */
1960 inner = GET_MODE (tmps[start]);
1961 bytepos = subreg_lowpart_offset (inner, outer);
1962 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1964 temp = simplify_gen_subreg (outer, tmps[start],
1968 emit_move_insn (dst, temp);
1975 /* If the first element wasn't the low part, try the last. */
1977 && start < finish - 1)
1979 inner = GET_MODE (tmps[finish - 1]);
1980 bytepos = subreg_lowpart_offset (inner, outer);
1981 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1983 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1987 emit_move_insn (dst, temp);
1994 /* Otherwise, simply initialize the result to zero. */
1996 emit_move_insn (dst, CONST0_RTX (outer));
1999 /* Process the pieces. */
2000 for (i = start; i < finish; i++)
2002 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2003 enum machine_mode mode = GET_MODE (tmps[i]);
2004 unsigned int bytelen = GET_MODE_SIZE (mode);
2007 /* Handle trailing fragments that run over the size of the struct. */
2008 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2010 /* store_bit_field always takes its value from the lsb.
2011 Move the fragment to the lsb if it's not already there. */
2013 #ifdef BLOCK_REG_PADDING
2014 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2015 == (BYTES_BIG_ENDIAN ? upward : downward)
2021 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2022 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2023 build_int_cst (NULL_TREE, shift),
2026 bytelen = ssize - bytepos;
2029 if (GET_CODE (dst) == CONCAT)
2031 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2032 dest = XEXP (dst, 0);
2033 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2035 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2036 dest = XEXP (dst, 1);
2040 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2041 dest = assign_stack_temp (GET_MODE (dest),
2042 GET_MODE_SIZE (GET_MODE (dest)), 0);
2043 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2050 /* Optimize the access just a bit. */
2052 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2053 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2054 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2055 && bytelen == GET_MODE_SIZE (mode))
2056 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2058 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2062 /* Copy from the pseudo into the (probable) hard reg. */
2063 if (orig_dst != dst)
2064 emit_move_insn (orig_dst, dst);
2067 /* Generate code to copy a BLKmode object of TYPE out of a
2068 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2069 is null, a stack temporary is created. TGTBLK is returned.
2071 The purpose of this routine is to handle functions that return
2072 BLKmode structures in registers. Some machines (the PA for example)
2073 want to return all small structures in registers regardless of the
2074 structure's alignment. */
2077 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2079 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2080 rtx src = NULL, dst = NULL;
2081 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2082 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2086 tgtblk = assign_temp (build_qualified_type (type,
2088 | TYPE_QUAL_CONST)),
2090 preserve_temp_slots (tgtblk);
2093 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2094 into a new pseudo which is a full word. */
2096 if (GET_MODE (srcreg) != BLKmode
2097 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2098 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2100 /* If the structure doesn't take up a whole number of words, see whether
2101 SRCREG is padded on the left or on the right. If it's on the left,
2102 set PADDING_CORRECTION to the number of bits to skip.
2104 In most ABIs, the structure will be returned at the least end of
2105 the register, which translates to right padding on little-endian
2106 targets and left padding on big-endian targets. The opposite
2107 holds if the structure is returned at the most significant
2108 end of the register. */
2109 if (bytes % UNITS_PER_WORD != 0
2110 && (targetm.calls.return_in_msb (type)
2112 : BYTES_BIG_ENDIAN))
2114 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2116 /* Copy the structure BITSIZE bites at a time.
2118 We could probably emit more efficient code for machines which do not use
2119 strict alignment, but it doesn't seem worth the effort at the current
2121 for (bitpos = 0, xbitpos = padding_correction;
2122 bitpos < bytes * BITS_PER_UNIT;
2123 bitpos += bitsize, xbitpos += bitsize)
2125 /* We need a new source operand each time xbitpos is on a
2126 word boundary and when xbitpos == padding_correction
2127 (the first time through). */
2128 if (xbitpos % BITS_PER_WORD == 0
2129 || xbitpos == padding_correction)
2130 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2133 /* We need a new destination operand each time bitpos is on
2135 if (bitpos % BITS_PER_WORD == 0)
2136 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2138 /* Use xbitpos for the source extraction (right justified) and
2139 xbitpos for the destination store (left justified). */
2140 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2141 extract_bit_field (src, bitsize,
2142 xbitpos % BITS_PER_WORD, 1,
2143 NULL_RTX, word_mode, word_mode));
2149 /* Add a USE expression for REG to the (possibly empty) list pointed
2150 to by CALL_FUSAGE. REG must denote a hard register. */
2153 use_reg (rtx *call_fusage, rtx reg)
2155 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2158 = gen_rtx_EXPR_LIST (VOIDmode,
2159 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2162 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2163 starting at REGNO. All of these registers must be hard registers. */
2166 use_regs (rtx *call_fusage, int regno, int nregs)
2170 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2172 for (i = 0; i < nregs; i++)
2173 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2176 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2177 PARALLEL REGS. This is for calls that pass values in multiple
2178 non-contiguous locations. The Irix 6 ABI has examples of this. */
2181 use_group_regs (rtx *call_fusage, rtx regs)
2185 for (i = 0; i < XVECLEN (regs, 0); i++)
2187 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2189 /* A NULL entry means the parameter goes both on the stack and in
2190 registers. This can also be a MEM for targets that pass values
2191 partially on the stack and partially in registers. */
2192 if (reg != 0 && REG_P (reg))
2193 use_reg (call_fusage, reg);
2198 /* Determine whether the LEN bytes generated by CONSTFUN can be
2199 stored to memory using several move instructions. CONSTFUNDATA is
2200 a pointer which will be passed as argument in every CONSTFUN call.
2201 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2202 a memset operation and false if it's a copy of a constant string.
2203 Return nonzero if a call to store_by_pieces should succeed. */
2206 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2207 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2208 void *constfundata, unsigned int align, bool memsetp)
2210 unsigned HOST_WIDE_INT l;
2211 unsigned int max_size;
2212 HOST_WIDE_INT offset = 0;
2213 enum machine_mode mode, tmode;
2214 enum insn_code icode;
2222 ? SET_BY_PIECES_P (len, align)
2223 : STORE_BY_PIECES_P (len, align)))
2226 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2227 if (align >= GET_MODE_ALIGNMENT (tmode))
2228 align = GET_MODE_ALIGNMENT (tmode);
2231 enum machine_mode xmode;
2233 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2235 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2236 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2237 || SLOW_UNALIGNED_ACCESS (tmode, align))
2240 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2243 /* We would first store what we can in the largest integer mode, then go to
2244 successively smaller modes. */
2247 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2252 max_size = STORE_MAX_PIECES + 1;
2253 while (max_size > 1)
2255 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2256 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2257 if (GET_MODE_SIZE (tmode) < max_size)
2260 if (mode == VOIDmode)
2263 icode = optab_handler (mov_optab, mode)->insn_code;
2264 if (icode != CODE_FOR_nothing
2265 && align >= GET_MODE_ALIGNMENT (mode))
2267 unsigned int size = GET_MODE_SIZE (mode);
2274 cst = (*constfun) (constfundata, offset, mode);
2275 if (!LEGITIMATE_CONSTANT_P (cst))
2285 max_size = GET_MODE_SIZE (mode);
2288 /* The code above should have handled everything. */
2295 /* Generate several move instructions to store LEN bytes generated by
2296 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2297 pointer which will be passed as argument in every CONSTFUN call.
2298 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2299 a memset operation and false if it's a copy of a constant string.
2300 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2301 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2305 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2306 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2307 void *constfundata, unsigned int align, bool memsetp, int endp)
2309 struct store_by_pieces data;
2313 gcc_assert (endp != 2);
2318 ? SET_BY_PIECES_P (len, align)
2319 : STORE_BY_PIECES_P (len, align));
2320 data.constfun = constfun;
2321 data.constfundata = constfundata;
2324 store_by_pieces_1 (&data, align);
2329 gcc_assert (!data.reverse);
2334 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2335 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2337 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2340 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2347 to1 = adjust_address (data.to, QImode, data.offset);
2355 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2356 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2359 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2361 struct store_by_pieces data;
2366 data.constfun = clear_by_pieces_1;
2367 data.constfundata = NULL;
2370 store_by_pieces_1 (&data, align);
2373 /* Callback routine for clear_by_pieces.
2374 Return const0_rtx unconditionally. */
2377 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2378 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2379 enum machine_mode mode ATTRIBUTE_UNUSED)
2384 /* Subroutine of clear_by_pieces and store_by_pieces.
2385 Generate several move instructions to store LEN bytes of block TO. (A MEM
2386 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2389 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2390 unsigned int align ATTRIBUTE_UNUSED)
2392 rtx to_addr = XEXP (data->to, 0);
2393 unsigned int max_size = STORE_MAX_PIECES + 1;
2394 enum machine_mode mode = VOIDmode, tmode;
2395 enum insn_code icode;
2398 data->to_addr = to_addr;
2400 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2401 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2403 data->explicit_inc_to = 0;
2405 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2407 data->offset = data->len;
2409 /* If storing requires more than two move insns,
2410 copy addresses to registers (to make displacements shorter)
2411 and use post-increment if available. */
2412 if (!data->autinc_to
2413 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2415 /* Determine the main mode we'll be using. */
2416 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2417 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2418 if (GET_MODE_SIZE (tmode) < max_size)
2421 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2423 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2424 data->autinc_to = 1;
2425 data->explicit_inc_to = -1;
2428 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2429 && ! data->autinc_to)
2431 data->to_addr = copy_addr_to_reg (to_addr);
2432 data->autinc_to = 1;
2433 data->explicit_inc_to = 1;
2436 if ( !data->autinc_to && CONSTANT_P (to_addr))
2437 data->to_addr = copy_addr_to_reg (to_addr);
2440 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2441 if (align >= GET_MODE_ALIGNMENT (tmode))
2442 align = GET_MODE_ALIGNMENT (tmode);
2445 enum machine_mode xmode;
2447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2449 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2450 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2451 || SLOW_UNALIGNED_ACCESS (tmode, align))
2454 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2457 /* First store what we can in the largest integer mode, then go to
2458 successively smaller modes. */
2460 while (max_size > 1)
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2467 if (mode == VOIDmode)
2470 icode = optab_handler (mov_optab, mode)->insn_code;
2471 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2472 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2474 max_size = GET_MODE_SIZE (mode);
2477 /* The code above should have handled everything. */
2478 gcc_assert (!data->len);
2481 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2482 with move instructions for mode MODE. GENFUN is the gen_... function
2483 to make a move insn for that mode. DATA has all the other info. */
2486 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2487 struct store_by_pieces *data)
2489 unsigned int size = GET_MODE_SIZE (mode);
2492 while (data->len >= size)
2495 data->offset -= size;
2497 if (data->autinc_to)
2498 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2501 to1 = adjust_address (data->to, mode, data->offset);
2503 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2504 emit_insn (gen_add2_insn (data->to_addr,
2505 GEN_INT (-(HOST_WIDE_INT) size)));
2507 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2508 emit_insn ((*genfun) (to1, cst));
2510 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2511 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2513 if (! data->reverse)
2514 data->offset += size;
2520 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2521 its length in bytes. */
2524 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2525 unsigned int expected_align, HOST_WIDE_INT expected_size)
2527 enum machine_mode mode = GET_MODE (object);
2530 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2532 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2533 just move a zero. Otherwise, do this a piece at a time. */
2535 && GET_CODE (size) == CONST_INT
2536 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2538 rtx zero = CONST0_RTX (mode);
2541 emit_move_insn (object, zero);
2545 if (COMPLEX_MODE_P (mode))
2547 zero = CONST0_RTX (GET_MODE_INNER (mode));
2550 write_complex_part (object, zero, 0);
2551 write_complex_part (object, zero, 1);
2557 if (size == const0_rtx)
2560 align = MEM_ALIGN (object);
2562 if (GET_CODE (size) == CONST_INT
2563 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2564 clear_by_pieces (object, INTVAL (size), align);
2565 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2566 expected_align, expected_size))
2569 return set_storage_via_libcall (object, size, const0_rtx,
2570 method == BLOCK_OP_TAILCALL);
2576 clear_storage (rtx object, rtx size, enum block_op_methods method)
2578 return clear_storage_hints (object, size, method, 0, -1);
2582 /* A subroutine of clear_storage. Expand a call to memset.
2583 Return the return value of memset, 0 otherwise. */
2586 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2588 tree call_expr, fn, object_tree, size_tree, val_tree;
2589 enum machine_mode size_mode;
2592 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2593 place those into new pseudos into a VAR_DECL and use them later. */
2595 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2597 size_mode = TYPE_MODE (sizetype);
2598 size = convert_to_mode (size_mode, size, 1);
2599 size = copy_to_mode_reg (size_mode, size);
2601 /* It is incorrect to use the libcall calling conventions to call
2602 memset in this context. This could be a user call to memset and
2603 the user may wish to examine the return value from memset. For
2604 targets where libcalls and normal calls have different conventions
2605 for returning pointers, we could end up generating incorrect code. */
2607 object_tree = make_tree (ptr_type_node, object);
2608 if (GET_CODE (val) != CONST_INT)
2609 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2610 size_tree = make_tree (sizetype, size);
2611 val_tree = make_tree (integer_type_node, val);
2613 fn = clear_storage_libcall_fn (true);
2614 call_expr = build_call_expr (fn, 3,
2615 object_tree, integer_zero_node, size_tree);
2616 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2618 retval = expand_normal (call_expr);
2623 /* A subroutine of set_storage_via_libcall. Create the tree node
2624 for the function we use for block clears. The first time FOR_CALL
2625 is true, we call assemble_external. */
2627 static GTY(()) tree block_clear_fn;
2630 init_block_clear_fn (const char *asmspec)
2632 if (!block_clear_fn)
2636 fn = get_identifier ("memset");
2637 args = build_function_type_list (ptr_type_node, ptr_type_node,
2638 integer_type_node, sizetype,
2641 fn = build_decl (FUNCTION_DECL, fn, args);
2642 DECL_EXTERNAL (fn) = 1;
2643 TREE_PUBLIC (fn) = 1;
2644 DECL_ARTIFICIAL (fn) = 1;
2645 TREE_NOTHROW (fn) = 1;
2646 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2647 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2649 block_clear_fn = fn;
2653 set_user_assembler_name (block_clear_fn, asmspec);
2657 clear_storage_libcall_fn (int for_call)
2659 static bool emitted_extern;
2661 if (!block_clear_fn)
2662 init_block_clear_fn (NULL);
2664 if (for_call && !emitted_extern)
2666 emitted_extern = true;
2667 make_decl_rtl (block_clear_fn);
2668 assemble_external (block_clear_fn);
2671 return block_clear_fn;
2674 /* Expand a setmem pattern; return true if successful. */
2677 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2678 unsigned int expected_align, HOST_WIDE_INT expected_size)
2680 /* Try the most limited insn first, because there's no point
2681 including more than one in the machine description unless
2682 the more limited one has some advantage. */
2684 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2685 enum machine_mode mode;
2687 if (expected_align < align)
2688 expected_align = align;
2690 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2691 mode = GET_MODE_WIDER_MODE (mode))
2693 enum insn_code code = setmem_optab[(int) mode];
2694 insn_operand_predicate_fn pred;
2696 if (code != CODE_FOR_nothing
2697 /* We don't need MODE to be narrower than
2698 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2699 the mode mask, as it is returned by the macro, it will
2700 definitely be less than the actual mode mask. */
2701 && ((GET_CODE (size) == CONST_INT
2702 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2703 <= (GET_MODE_MASK (mode) >> 1)))
2704 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2705 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2706 || (*pred) (object, BLKmode))
2707 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2708 || (*pred) (opalign, VOIDmode)))
2711 enum machine_mode char_mode;
2712 rtx last = get_last_insn ();
2715 opsize = convert_to_mode (mode, size, 1);
2716 pred = insn_data[(int) code].operand[1].predicate;
2717 if (pred != 0 && ! (*pred) (opsize, mode))
2718 opsize = copy_to_mode_reg (mode, opsize);
2721 char_mode = insn_data[(int) code].operand[2].mode;
2722 if (char_mode != VOIDmode)
2724 opchar = convert_to_mode (char_mode, opchar, 1);
2725 pred = insn_data[(int) code].operand[2].predicate;
2726 if (pred != 0 && ! (*pred) (opchar, char_mode))
2727 opchar = copy_to_mode_reg (char_mode, opchar);
2730 if (insn_data[(int) code].n_operands == 4)
2731 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2733 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2734 GEN_INT (expected_align),
2735 GEN_INT (expected_size));
2742 delete_insns_since (last);
2750 /* Write to one of the components of the complex value CPLX. Write VAL to
2751 the real part if IMAG_P is false, and the imaginary part if its true. */
2754 write_complex_part (rtx cplx, rtx val, bool imag_p)
2756 enum machine_mode cmode;
2757 enum machine_mode imode;
2760 if (GET_CODE (cplx) == CONCAT)
2762 emit_move_insn (XEXP (cplx, imag_p), val);
2766 cmode = GET_MODE (cplx);
2767 imode = GET_MODE_INNER (cmode);
2768 ibitsize = GET_MODE_BITSIZE (imode);
2770 /* For MEMs simplify_gen_subreg may generate an invalid new address
2771 because, e.g., the original address is considered mode-dependent
2772 by the target, which restricts simplify_subreg from invoking
2773 adjust_address_nv. Instead of preparing fallback support for an
2774 invalid address, we call adjust_address_nv directly. */
2777 emit_move_insn (adjust_address_nv (cplx, imode,
2778 imag_p ? GET_MODE_SIZE (imode) : 0),
2783 /* If the sub-object is at least word sized, then we know that subregging
2784 will work. This special case is important, since store_bit_field
2785 wants to operate on integer modes, and there's rarely an OImode to
2786 correspond to TCmode. */
2787 if (ibitsize >= BITS_PER_WORD
2788 /* For hard regs we have exact predicates. Assume we can split
2789 the original object if it spans an even number of hard regs.
2790 This special case is important for SCmode on 64-bit platforms
2791 where the natural size of floating-point regs is 32-bit. */
2793 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2794 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2796 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2797 imag_p ? GET_MODE_SIZE (imode) : 0);
2800 emit_move_insn (part, val);
2804 /* simplify_gen_subreg may fail for sub-word MEMs. */
2805 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2808 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2811 /* Extract one of the components of the complex value CPLX. Extract the
2812 real part if IMAG_P is false, and the imaginary part if it's true. */
2815 read_complex_part (rtx cplx, bool imag_p)
2817 enum machine_mode cmode, imode;
2820 if (GET_CODE (cplx) == CONCAT)
2821 return XEXP (cplx, imag_p);
2823 cmode = GET_MODE (cplx);
2824 imode = GET_MODE_INNER (cmode);
2825 ibitsize = GET_MODE_BITSIZE (imode);
2827 /* Special case reads from complex constants that got spilled to memory. */
2828 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2830 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2831 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2833 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2834 if (CONSTANT_CLASS_P (part))
2835 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2839 /* For MEMs simplify_gen_subreg may generate an invalid new address
2840 because, e.g., the original address is considered mode-dependent
2841 by the target, which restricts simplify_subreg from invoking
2842 adjust_address_nv. Instead of preparing fallback support for an
2843 invalid address, we call adjust_address_nv directly. */
2845 return adjust_address_nv (cplx, imode,
2846 imag_p ? GET_MODE_SIZE (imode) : 0);
2848 /* If the sub-object is at least word sized, then we know that subregging
2849 will work. This special case is important, since extract_bit_field
2850 wants to operate on integer modes, and there's rarely an OImode to
2851 correspond to TCmode. */
2852 if (ibitsize >= BITS_PER_WORD
2853 /* For hard regs we have exact predicates. Assume we can split
2854 the original object if it spans an even number of hard regs.
2855 This special case is important for SCmode on 64-bit platforms
2856 where the natural size of floating-point regs is 32-bit. */
2858 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2859 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2861 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2862 imag_p ? GET_MODE_SIZE (imode) : 0);
2866 /* simplify_gen_subreg may fail for sub-word MEMs. */
2867 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2870 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2871 true, NULL_RTX, imode, imode);
2874 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2875 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2876 represented in NEW_MODE. If FORCE is true, this will never happen, as
2877 we'll force-create a SUBREG if needed. */
2880 emit_move_change_mode (enum machine_mode new_mode,
2881 enum machine_mode old_mode, rtx x, bool force)
2885 if (push_operand (x, GET_MODE (x)))
2887 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2888 MEM_COPY_ATTRIBUTES (ret, x);
2892 /* We don't have to worry about changing the address since the
2893 size in bytes is supposed to be the same. */
2894 if (reload_in_progress)
2896 /* Copy the MEM to change the mode and move any
2897 substitutions from the old MEM to the new one. */
2898 ret = adjust_address_nv (x, new_mode, 0);
2899 copy_replacements (x, ret);
2902 ret = adjust_address (x, new_mode, 0);
2906 /* Note that we do want simplify_subreg's behavior of validating
2907 that the new mode is ok for a hard register. If we were to use
2908 simplify_gen_subreg, we would create the subreg, but would
2909 probably run into the target not being able to implement it. */
2910 /* Except, of course, when FORCE is true, when this is exactly what
2911 we want. Which is needed for CCmodes on some targets. */
2913 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2915 ret = simplify_subreg (new_mode, x, old_mode, 0);
2921 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2922 an integer mode of the same size as MODE. Returns the instruction
2923 emitted, or NULL if such a move could not be generated. */
2926 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2928 enum machine_mode imode;
2929 enum insn_code code;
2931 /* There must exist a mode of the exact size we require. */
2932 imode = int_mode_for_mode (mode);
2933 if (imode == BLKmode)
2936 /* The target must support moves in this mode. */
2937 code = optab_handler (mov_optab, imode)->insn_code;
2938 if (code == CODE_FOR_nothing)
2941 x = emit_move_change_mode (imode, mode, x, force);
2944 y = emit_move_change_mode (imode, mode, y, force);
2947 return emit_insn (GEN_FCN (code) (x, y));
2950 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2951 Return an equivalent MEM that does not use an auto-increment. */
2954 emit_move_resolve_push (enum machine_mode mode, rtx x)
2956 enum rtx_code code = GET_CODE (XEXP (x, 0));
2957 HOST_WIDE_INT adjust;
2960 adjust = GET_MODE_SIZE (mode);
2961 #ifdef PUSH_ROUNDING
2962 adjust = PUSH_ROUNDING (adjust);
2964 if (code == PRE_DEC || code == POST_DEC)
2966 else if (code == PRE_MODIFY || code == POST_MODIFY)
2968 rtx expr = XEXP (XEXP (x, 0), 1);
2971 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2972 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2973 val = INTVAL (XEXP (expr, 1));
2974 if (GET_CODE (expr) == MINUS)
2976 gcc_assert (adjust == val || adjust == -val);
2980 /* Do not use anti_adjust_stack, since we don't want to update
2981 stack_pointer_delta. */
2982 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2983 GEN_INT (adjust), stack_pointer_rtx,
2984 0, OPTAB_LIB_WIDEN);
2985 if (temp != stack_pointer_rtx)
2986 emit_move_insn (stack_pointer_rtx, temp);
2993 temp = stack_pointer_rtx;
2998 temp = plus_constant (stack_pointer_rtx, -adjust);
3004 return replace_equiv_address (x, temp);
3007 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3008 X is known to satisfy push_operand, and MODE is known to be complex.
3009 Returns the last instruction emitted. */
3012 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3014 enum machine_mode submode = GET_MODE_INNER (mode);
3017 #ifdef PUSH_ROUNDING
3018 unsigned int submodesize = GET_MODE_SIZE (submode);
3020 /* In case we output to the stack, but the size is smaller than the
3021 machine can push exactly, we need to use move instructions. */
3022 if (PUSH_ROUNDING (submodesize) != submodesize)
3024 x = emit_move_resolve_push (mode, x);
3025 return emit_move_insn (x, y);
3029 /* Note that the real part always precedes the imag part in memory
3030 regardless of machine's endianness. */
3031 switch (GET_CODE (XEXP (x, 0)))
3045 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3046 read_complex_part (y, imag_first));
3047 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3048 read_complex_part (y, !imag_first));
3051 /* A subroutine of emit_move_complex. Perform the move from Y to X
3052 via two moves of the parts. Returns the last instruction emitted. */
3055 emit_move_complex_parts (rtx x, rtx y)
3057 /* Show the output dies here. This is necessary for SUBREGs
3058 of pseudos since we cannot track their lifetimes correctly;
3059 hard regs shouldn't appear here except as return values. */
3060 if (!reload_completed && !reload_in_progress
3061 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3062 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3064 write_complex_part (x, read_complex_part (y, false), false);
3065 write_complex_part (x, read_complex_part (y, true), true);
3067 return get_last_insn ();
3070 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3071 MODE is known to be complex. Returns the last instruction emitted. */
3074 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3078 /* Need to take special care for pushes, to maintain proper ordering
3079 of the data, and possibly extra padding. */
3080 if (push_operand (x, mode))
3081 return emit_move_complex_push (mode, x, y);
3083 /* See if we can coerce the target into moving both values at once. */
3085 /* Move floating point as parts. */
3086 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3087 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3089 /* Not possible if the values are inherently not adjacent. */
3090 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3092 /* Is possible if both are registers (or subregs of registers). */
3093 else if (register_operand (x, mode) && register_operand (y, mode))
3095 /* If one of the operands is a memory, and alignment constraints
3096 are friendly enough, we may be able to do combined memory operations.
3097 We do not attempt this if Y is a constant because that combination is
3098 usually better with the by-parts thing below. */
3099 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3100 && (!STRICT_ALIGNMENT
3101 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3110 /* For memory to memory moves, optimal behavior can be had with the
3111 existing block move logic. */
3112 if (MEM_P (x) && MEM_P (y))
3114 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3115 BLOCK_OP_NO_LIBCALL);
3116 return get_last_insn ();
3119 ret = emit_move_via_integer (mode, x, y, true);
3124 return emit_move_complex_parts (x, y);
3127 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3128 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3131 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3135 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3138 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3139 if (code != CODE_FOR_nothing)
3141 x = emit_move_change_mode (CCmode, mode, x, true);
3142 y = emit_move_change_mode (CCmode, mode, y, true);
3143 return emit_insn (GEN_FCN (code) (x, y));
3147 /* Otherwise, find the MODE_INT mode of the same width. */
3148 ret = emit_move_via_integer (mode, x, y, false);
3149 gcc_assert (ret != NULL);
3153 /* Return true if word I of OP lies entirely in the
3154 undefined bits of a paradoxical subreg. */
3157 undefined_operand_subword_p (const_rtx op, int i)
3159 enum machine_mode innermode, innermostmode;
3161 if (GET_CODE (op) != SUBREG)
3163 innermode = GET_MODE (op);
3164 innermostmode = GET_MODE (SUBREG_REG (op));
3165 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3166 /* The SUBREG_BYTE represents offset, as if the value were stored in
3167 memory, except for a paradoxical subreg where we define
3168 SUBREG_BYTE to be 0; undo this exception as in
3170 if (SUBREG_BYTE (op) == 0
3171 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3173 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3174 if (WORDS_BIG_ENDIAN)
3175 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3176 if (BYTES_BIG_ENDIAN)
3177 offset += difference % UNITS_PER_WORD;
3179 if (offset >= GET_MODE_SIZE (innermostmode)
3180 || offset <= -GET_MODE_SIZE (word_mode))
3185 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3186 MODE is any multi-word or full-word mode that lacks a move_insn
3187 pattern. Note that you will get better code if you define such
3188 patterns, even if they must turn into multiple assembler instructions. */
3191 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3198 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3200 /* If X is a push on the stack, do the push now and replace
3201 X with a reference to the stack pointer. */
3202 if (push_operand (x, mode))
3203 x = emit_move_resolve_push (mode, x);
3205 /* If we are in reload, see if either operand is a MEM whose address
3206 is scheduled for replacement. */
3207 if (reload_in_progress && MEM_P (x)
3208 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3209 x = replace_equiv_address_nv (x, inner);
3210 if (reload_in_progress && MEM_P (y)
3211 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3212 y = replace_equiv_address_nv (y, inner);
3216 need_clobber = false;
3218 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3221 rtx xpart = operand_subword (x, i, 1, mode);
3224 /* Do not generate code for a move if it would come entirely
3225 from the undefined bits of a paradoxical subreg. */
3226 if (undefined_operand_subword_p (y, i))
3229 ypart = operand_subword (y, i, 1, mode);
3231 /* If we can't get a part of Y, put Y into memory if it is a
3232 constant. Otherwise, force it into a register. Then we must
3233 be able to get a part of Y. */
3234 if (ypart == 0 && CONSTANT_P (y))
3236 y = use_anchored_address (force_const_mem (mode, y));
3237 ypart = operand_subword (y, i, 1, mode);
3239 else if (ypart == 0)
3240 ypart = operand_subword_force (y, i, mode);
3242 gcc_assert (xpart && ypart);
3244 need_clobber |= (GET_CODE (xpart) == SUBREG);
3246 last_insn = emit_move_insn (xpart, ypart);
3252 /* Show the output dies here. This is necessary for SUBREGs
3253 of pseudos since we cannot track their lifetimes correctly;
3254 hard regs shouldn't appear here except as return values.
3255 We never want to emit such a clobber after reload. */
3257 && ! (reload_in_progress || reload_completed)
3258 && need_clobber != 0)
3259 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3266 /* Low level part of emit_move_insn.
3267 Called just like emit_move_insn, but assumes X and Y
3268 are basically valid. */
3271 emit_move_insn_1 (rtx x, rtx y)
3273 enum machine_mode mode = GET_MODE (x);
3274 enum insn_code code;
3276 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3278 code = optab_handler (mov_optab, mode)->insn_code;
3279 if (code != CODE_FOR_nothing)
3280 return emit_insn (GEN_FCN (code) (x, y));
3282 /* Expand complex moves by moving real part and imag part. */
3283 if (COMPLEX_MODE_P (mode))
3284 return emit_move_complex (mode, x, y);
3286 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3288 rtx result = emit_move_via_integer (mode, x, y, true);
3290 /* If we can't find an integer mode, use multi words. */
3294 return emit_move_multi_word (mode, x, y);
3297 if (GET_MODE_CLASS (mode) == MODE_CC)
3298 return emit_move_ccmode (mode, x, y);
3300 /* Try using a move pattern for the corresponding integer mode. This is
3301 only safe when simplify_subreg can convert MODE constants into integer
3302 constants. At present, it can only do this reliably if the value
3303 fits within a HOST_WIDE_INT. */
3304 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3306 rtx ret = emit_move_via_integer (mode, x, y, false);
3311 return emit_move_multi_word (mode, x, y);
3314 /* Generate code to copy Y into X.
3315 Both Y and X must have the same mode, except that
3316 Y can be a constant with VOIDmode.
3317 This mode cannot be BLKmode; use emit_block_move for that.
3319 Return the last instruction emitted. */
3322 emit_move_insn (rtx x, rtx y)
3324 enum machine_mode mode = GET_MODE (x);
3325 rtx y_cst = NULL_RTX;
3328 gcc_assert (mode != BLKmode
3329 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3334 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3335 && (last_insn = compress_float_constant (x, y)))
3340 if (!LEGITIMATE_CONSTANT_P (y))
3342 y = force_const_mem (mode, y);
3344 /* If the target's cannot_force_const_mem prevented the spill,
3345 assume that the target's move expanders will also take care
3346 of the non-legitimate constant. */
3350 y = use_anchored_address (y);
3354 /* If X or Y are memory references, verify that their addresses are valid
3357 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3358 && ! push_operand (x, GET_MODE (x)))
3360 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3361 x = validize_mem (x);
3364 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3366 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3367 y = validize_mem (y);
3369 gcc_assert (mode != BLKmode);
3371 last_insn = emit_move_insn_1 (x, y);
3373 if (y_cst && REG_P (x)
3374 && (set = single_set (last_insn)) != NULL_RTX
3375 && SET_DEST (set) == x
3376 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3377 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3382 /* If Y is representable exactly in a narrower mode, and the target can
3383 perform the extension directly from constant or memory, then emit the
3384 move as an extension. */
3387 compress_float_constant (rtx x, rtx y)
3389 enum machine_mode dstmode = GET_MODE (x);
3390 enum machine_mode orig_srcmode = GET_MODE (y);
3391 enum machine_mode srcmode;
3393 int oldcost, newcost;
3395 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3397 if (LEGITIMATE_CONSTANT_P (y))
3398 oldcost = rtx_cost (y, SET);
3400 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3402 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3403 srcmode != orig_srcmode;
3404 srcmode = GET_MODE_WIDER_MODE (srcmode))
3407 rtx trunc_y, last_insn;
3409 /* Skip if the target can't extend this way. */
3410 ic = can_extend_p (dstmode, srcmode, 0);
3411 if (ic == CODE_FOR_nothing)
3414 /* Skip if the narrowed value isn't exact. */
3415 if (! exact_real_truncate (srcmode, &r))
3418 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3420 if (LEGITIMATE_CONSTANT_P (trunc_y))
3422 /* Skip if the target needs extra instructions to perform
3424 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3426 /* This is valid, but may not be cheaper than the original. */
3427 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3428 if (oldcost < newcost)
3431 else if (float_extend_from_mem[dstmode][srcmode])
3433 trunc_y = force_const_mem (srcmode, trunc_y);
3434 /* This is valid, but may not be cheaper than the original. */
3435 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3436 if (oldcost < newcost)
3438 trunc_y = validize_mem (trunc_y);
3443 /* For CSE's benefit, force the compressed constant pool entry
3444 into a new pseudo. This constant may be used in different modes,
3445 and if not, combine will put things back together for us. */
3446 trunc_y = force_reg (srcmode, trunc_y);
3447 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3448 last_insn = get_last_insn ();
3451 set_unique_reg_note (last_insn, REG_EQUAL, y);
3459 /* Pushing data onto the stack. */
3461 /* Push a block of length SIZE (perhaps variable)
3462 and return an rtx to address the beginning of the block.
3463 The value may be virtual_outgoing_args_rtx.
3465 EXTRA is the number of bytes of padding to push in addition to SIZE.
3466 BELOW nonzero means this padding comes at low addresses;
3467 otherwise, the padding comes at high addresses. */
3470 push_block (rtx size, int extra, int below)
3474 size = convert_modes (Pmode, ptr_mode, size, 1);
3475 if (CONSTANT_P (size))
3476 anti_adjust_stack (plus_constant (size, extra));
3477 else if (REG_P (size) && extra == 0)
3478 anti_adjust_stack (size);
3481 temp = copy_to_mode_reg (Pmode, size);
3483 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3484 temp, 0, OPTAB_LIB_WIDEN);
3485 anti_adjust_stack (temp);
3488 #ifndef STACK_GROWS_DOWNWARD
3494 temp = virtual_outgoing_args_rtx;
3495 if (extra != 0 && below)
3496 temp = plus_constant (temp, extra);
3500 if (GET_CODE (size) == CONST_INT)
3501 temp = plus_constant (virtual_outgoing_args_rtx,
3502 -INTVAL (size) - (below ? 0 : extra));
3503 else if (extra != 0 && !below)
3504 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3505 negate_rtx (Pmode, plus_constant (size, extra)));
3507 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3508 negate_rtx (Pmode, size));
3511 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3514 #ifdef PUSH_ROUNDING
3516 /* Emit single push insn. */
3519 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3522 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3524 enum insn_code icode;
3525 insn_operand_predicate_fn pred;
3527 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3528 /* If there is push pattern, use it. Otherwise try old way of throwing
3529 MEM representing push operation to move expander. */
3530 icode = optab_handler (push_optab, mode)->insn_code;
3531 if (icode != CODE_FOR_nothing)
3533 if (((pred = insn_data[(int) icode].operand[0].predicate)
3534 && !((*pred) (x, mode))))
3535 x = force_reg (mode, x);
3536 emit_insn (GEN_FCN (icode) (x));
3539 if (GET_MODE_SIZE (mode) == rounded_size)
3540 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3541 /* If we are to pad downward, adjust the stack pointer first and
3542 then store X into the stack location using an offset. This is
3543 because emit_move_insn does not know how to pad; it does not have
3545 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3547 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3548 HOST_WIDE_INT offset;
3550 emit_move_insn (stack_pointer_rtx,
3551 expand_binop (Pmode,
3552 #ifdef STACK_GROWS_DOWNWARD
3558 GEN_INT (rounded_size),
3559 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3561 offset = (HOST_WIDE_INT) padding_size;
3562 #ifdef STACK_GROWS_DOWNWARD
3563 if (STACK_PUSH_CODE == POST_DEC)
3564 /* We have already decremented the stack pointer, so get the
3566 offset += (HOST_WIDE_INT) rounded_size;
3568 if (STACK_PUSH_CODE == POST_INC)
3569 /* We have already incremented the stack pointer, so get the
3571 offset -= (HOST_WIDE_INT) rounded_size;
3573 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3577 #ifdef STACK_GROWS_DOWNWARD
3578 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3579 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3580 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3582 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3583 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3584 GEN_INT (rounded_size));
3586 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3589 dest = gen_rtx_MEM (mode, dest_addr);
3593 set_mem_attributes (dest, type, 1);
3595 if (flag_optimize_sibling_calls)
3596 /* Function incoming arguments may overlap with sibling call
3597 outgoing arguments and we cannot allow reordering of reads
3598 from function arguments with stores to outgoing arguments
3599 of sibling calls. */
3600 set_mem_alias_set (dest, 0);
3602 emit_move_insn (dest, x);
3606 /* Generate code to push X onto the stack, assuming it has mode MODE and
3608 MODE is redundant except when X is a CONST_INT (since they don't
3610 SIZE is an rtx for the size of data to be copied (in bytes),
3611 needed only if X is BLKmode.
3613 ALIGN (in bits) is maximum alignment we can assume.
3615 If PARTIAL and REG are both nonzero, then copy that many of the first
3616 bytes of X into registers starting with REG, and push the rest of X.
3617 The amount of space pushed is decreased by PARTIAL bytes.
3618 REG must be a hard register in this case.
3619 If REG is zero but PARTIAL is not, take any all others actions for an
3620 argument partially in registers, but do not actually load any
3623 EXTRA is the amount in bytes of extra space to leave next to this arg.
3624 This is ignored if an argument block has already been allocated.
3626 On a machine that lacks real push insns, ARGS_ADDR is the address of
3627 the bottom of the argument block for this call. We use indexing off there
3628 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3629 argument block has not been preallocated.
3631 ARGS_SO_FAR is the size of args previously pushed for this call.
3633 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3634 for arguments passed in registers. If nonzero, it will be the number
3635 of bytes required. */
3638 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3639 unsigned int align, int partial, rtx reg, int extra,
3640 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3644 enum direction stack_direction
3645 #ifdef STACK_GROWS_DOWNWARD
3651 /* Decide where to pad the argument: `downward' for below,
3652 `upward' for above, or `none' for don't pad it.
3653 Default is below for small data on big-endian machines; else above. */
3654 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3656 /* Invert direction if stack is post-decrement.
3658 if (STACK_PUSH_CODE == POST_DEC)
3659 if (where_pad != none)
3660 where_pad = (where_pad == downward ? upward : downward);
3665 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3667 /* Copy a block into the stack, entirely or partially. */
3674 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3675 used = partial - offset;
3677 if (mode != BLKmode)
3679 /* A value is to be stored in an insufficiently aligned
3680 stack slot; copy via a suitably aligned slot if
3682 size = GEN_INT (GET_MODE_SIZE (mode));
3683 if (!MEM_P (xinner))
3685 temp = assign_temp (type, 0, 1, 1);
3686 emit_move_insn (temp, xinner);
3693 /* USED is now the # of bytes we need not copy to the stack
3694 because registers will take care of them. */
3697 xinner = adjust_address (xinner, BLKmode, used);
3699 /* If the partial register-part of the arg counts in its stack size,
3700 skip the part of stack space corresponding to the registers.
3701 Otherwise, start copying to the beginning of the stack space,
3702 by setting SKIP to 0. */
3703 skip = (reg_parm_stack_space == 0) ? 0 : used;
3705 #ifdef PUSH_ROUNDING
3706 /* Do it with several push insns if that doesn't take lots of insns
3707 and if there is no difficulty with push insns that skip bytes
3708 on the stack for alignment purposes. */
3711 && GET_CODE (size) == CONST_INT
3713 && MEM_ALIGN (xinner) >= align
3714 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3715 /* Here we avoid the case of a structure whose weak alignment
3716 forces many pushes of a small amount of data,
3717 and such small pushes do rounding that causes trouble. */
3718 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3719 || align >= BIGGEST_ALIGNMENT
3720 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3721 == (align / BITS_PER_UNIT)))
3722 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3724 /* Push padding now if padding above and stack grows down,
3725 or if padding below and stack grows up.
3726 But if space already allocated, this has already been done. */
3727 if (extra && args_addr == 0
3728 && where_pad != none && where_pad != stack_direction)
3729 anti_adjust_stack (GEN_INT (extra));
3731 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3734 #endif /* PUSH_ROUNDING */
3738 /* Otherwise make space on the stack and copy the data
3739 to the address of that space. */
3741 /* Deduct words put into registers from the size we must copy. */
3744 if (GET_CODE (size) == CONST_INT)
3745 size = GEN_INT (INTVAL (size) - used);
3747 size = expand_binop (GET_MODE (size), sub_optab, size,
3748 GEN_INT (used), NULL_RTX, 0,
3752 /* Get the address of the stack space.
3753 In this case, we do not deal with EXTRA separately.
3754 A single stack adjust will do. */
3757 temp = push_block (size, extra, where_pad == downward);
3760 else if (GET_CODE (args_so_far) == CONST_INT)
3761 temp = memory_address (BLKmode,
3762 plus_constant (args_addr,
3763 skip + INTVAL (args_so_far)));
3765 temp = memory_address (BLKmode,
3766 plus_constant (gen_rtx_PLUS (Pmode,
3771 if (!ACCUMULATE_OUTGOING_ARGS)
3773 /* If the source is referenced relative to the stack pointer,
3774 copy it to another register to stabilize it. We do not need
3775 to do this if we know that we won't be changing sp. */
3777 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3778 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3779 temp = copy_to_reg (temp);
3782 target = gen_rtx_MEM (BLKmode, temp);
3784 /* We do *not* set_mem_attributes here, because incoming arguments
3785 may overlap with sibling call outgoing arguments and we cannot
3786 allow reordering of reads from function arguments with stores
3787 to outgoing arguments of sibling calls. We do, however, want
3788 to record the alignment of the stack slot. */
3789 /* ALIGN may well be better aligned than TYPE, e.g. due to
3790 PARM_BOUNDARY. Assume the caller isn't lying. */
3791 set_mem_align (target, align);
3793 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3796 else if (partial > 0)
3798 /* Scalar partly in registers. */
3800 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3803 /* # bytes of start of argument
3804 that we must make space for but need not store. */
3805 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3806 int args_offset = INTVAL (args_so_far);
3809 /* Push padding now if padding above and stack grows down,
3810 or if padding below and stack grows up.
3811 But if space already allocated, this has already been done. */
3812 if (extra && args_addr == 0
3813 && where_pad != none && where_pad != stack_direction)
3814 anti_adjust_stack (GEN_INT (extra));
3816 /* If we make space by pushing it, we might as well push
3817 the real data. Otherwise, we can leave OFFSET nonzero
3818 and leave the space uninitialized. */
3822 /* Now NOT_STACK gets the number of words that we don't need to
3823 allocate on the stack. Convert OFFSET to words too. */
3824 not_stack = (partial - offset) / UNITS_PER_WORD;
3825 offset /= UNITS_PER_WORD;
3827 /* If the partial register-part of the arg counts in its stack size,
3828 skip the part of stack space corresponding to the registers.
3829 Otherwise, start copying to the beginning of the stack space,
3830 by setting SKIP to 0. */
3831 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3833 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3834 x = validize_mem (force_const_mem (mode, x));
3836 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3837 SUBREGs of such registers are not allowed. */
3838 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3839 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3840 x = copy_to_reg (x);
3842 /* Loop over all the words allocated on the stack for this arg. */
3843 /* We can do it by words, because any scalar bigger than a word
3844 has a size a multiple of a word. */
3845 #ifndef PUSH_ARGS_REVERSED
3846 for (i = not_stack; i < size; i++)
3848 for (i = size - 1; i >= not_stack; i--)
3850 if (i >= not_stack + offset)
3851 emit_push_insn (operand_subword_force (x, i, mode),
3852 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3854 GEN_INT (args_offset + ((i - not_stack + skip)
3856 reg_parm_stack_space, alignment_pad);
3863 /* Push padding now if padding above and stack grows down,
3864 or if padding below and stack grows up.
3865 But if space already allocated, this has already been done. */
3866 if (extra && args_addr == 0
3867 && where_pad != none && where_pad != stack_direction)
3868 anti_adjust_stack (GEN_INT (extra));
3870 #ifdef PUSH_ROUNDING
3871 if (args_addr == 0 && PUSH_ARGS)
3872 emit_single_push_insn (mode, x, type);
3876 if (GET_CODE (args_so_far) == CONST_INT)
3878 = memory_address (mode,
3879 plus_constant (args_addr,
3880 INTVAL (args_so_far)));
3882 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3884 dest = gen_rtx_MEM (mode, addr);
3886 /* We do *not* set_mem_attributes here, because incoming arguments
3887 may overlap with sibling call outgoing arguments and we cannot
3888 allow reordering of reads from function arguments with stores
3889 to outgoing arguments of sibling calls. We do, however, want
3890 to record the alignment of the stack slot. */
3891 /* ALIGN may well be better aligned than TYPE, e.g. due to
3892 PARM_BOUNDARY. Assume the caller isn't lying. */
3893 set_mem_align (dest, align);
3895 emit_move_insn (dest, x);
3899 /* If part should go in registers, copy that part
3900 into the appropriate registers. Do this now, at the end,
3901 since mem-to-mem copies above may do function calls. */
3902 if (partial > 0 && reg != 0)
3904 /* Handle calls that pass values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (reg) == PARALLEL)
3907 emit_group_load (reg, x, type, -1);
3910 gcc_assert (partial % UNITS_PER_WORD == 0);
3911 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3915 if (extra && args_addr == 0 && where_pad == stack_direction)
3916 anti_adjust_stack (GEN_INT (extra));
3918 if (alignment_pad && args_addr == 0)
3919 anti_adjust_stack (alignment_pad);
3922 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3926 get_subtarget (rtx x)
3930 /* Only registers can be subtargets. */
3932 /* Don't use hard regs to avoid extending their life. */
3933 || REGNO (x) < FIRST_PSEUDO_REGISTER
3937 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3938 FIELD is a bitfield. Returns true if the optimization was successful,
3939 and there's nothing else to do. */
3942 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3943 unsigned HOST_WIDE_INT bitpos,
3944 enum machine_mode mode1, rtx str_rtx,
3947 enum machine_mode str_mode = GET_MODE (str_rtx);
3948 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3953 if (mode1 != VOIDmode
3954 || bitsize >= BITS_PER_WORD
3955 || str_bitsize > BITS_PER_WORD
3956 || TREE_SIDE_EFFECTS (to)
3957 || TREE_THIS_VOLATILE (to))
3961 if (!BINARY_CLASS_P (src)
3962 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3965 op0 = TREE_OPERAND (src, 0);
3966 op1 = TREE_OPERAND (src, 1);
3969 if (!operand_equal_p (to, op0, 0))
3972 if (MEM_P (str_rtx))
3974 unsigned HOST_WIDE_INT offset1;
3976 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3977 str_mode = word_mode;
3978 str_mode = get_best_mode (bitsize, bitpos,
3979 MEM_ALIGN (str_rtx), str_mode, 0);
3980 if (str_mode == VOIDmode)
3982 str_bitsize = GET_MODE_BITSIZE (str_mode);
3985 bitpos %= str_bitsize;
3986 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3987 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3989 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3992 /* If the bit field covers the whole REG/MEM, store_field
3993 will likely generate better code. */
3994 if (bitsize >= str_bitsize)
3997 /* We can't handle fields split across multiple entities. */
3998 if (bitpos + bitsize > str_bitsize)
4001 if (BYTES_BIG_ENDIAN)
4002 bitpos = str_bitsize - bitpos - bitsize;
4004 switch (TREE_CODE (src))
4008 /* For now, just optimize the case of the topmost bitfield
4009 where we don't need to do any masking and also
4010 1 bit bitfields where xor can be used.
4011 We might win by one instruction for the other bitfields
4012 too if insv/extv instructions aren't used, so that
4013 can be added later. */
4014 if (bitpos + bitsize != str_bitsize
4015 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4018 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4019 value = convert_modes (str_mode,
4020 TYPE_MODE (TREE_TYPE (op1)), value,
4021 TYPE_UNSIGNED (TREE_TYPE (op1)));
4023 /* We may be accessing data outside the field, which means
4024 we can alias adjacent data. */
4025 if (MEM_P (str_rtx))
4027 str_rtx = shallow_copy_rtx (str_rtx);
4028 set_mem_alias_set (str_rtx, 0);
4029 set_mem_expr (str_rtx, 0);
4032 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4033 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4035 value = expand_and (str_mode, value, const1_rtx, NULL);
4038 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4039 build_int_cst (NULL_TREE, bitpos),
4041 result = expand_binop (str_mode, binop, str_rtx,
4042 value, str_rtx, 1, OPTAB_WIDEN);
4043 if (result != str_rtx)
4044 emit_move_insn (str_rtx, result);
4049 if (TREE_CODE (op1) != INTEGER_CST)
4051 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4052 value = convert_modes (GET_MODE (str_rtx),
4053 TYPE_MODE (TREE_TYPE (op1)), value,
4054 TYPE_UNSIGNED (TREE_TYPE (op1)));
4056 /* We may be accessing data outside the field, which means
4057 we can alias adjacent data. */
4058 if (MEM_P (str_rtx))
4060 str_rtx = shallow_copy_rtx (str_rtx);
4061 set_mem_alias_set (str_rtx, 0);
4062 set_mem_expr (str_rtx, 0);
4065 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4066 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4068 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4070 value = expand_and (GET_MODE (str_rtx), value, mask,
4073 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4074 build_int_cst (NULL_TREE, bitpos),
4076 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4077 value, str_rtx, 1, OPTAB_WIDEN);
4078 if (result != str_rtx)
4079 emit_move_insn (str_rtx, result);
4090 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4091 is true, try generating a nontemporal store. */
4094 expand_assignment (tree to, tree from, bool nontemporal)
4099 /* Don't crash if the lhs of the assignment was erroneous. */
4100 if (TREE_CODE (to) == ERROR_MARK)
4102 result = expand_normal (from);
4106 /* Optimize away no-op moves without side-effects. */
4107 if (operand_equal_p (to, from, 0))
4110 /* Assignment of a structure component needs special treatment
4111 if the structure component's rtx is not simply a MEM.
4112 Assignment of an array element at a constant index, and assignment of
4113 an array element in an unaligned packed structure field, has the same
4115 if (handled_component_p (to)
4116 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4118 enum machine_mode mode1;
4119 HOST_WIDE_INT bitsize, bitpos;
4126 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4127 &unsignedp, &volatilep, true);
4129 /* If we are going to use store_bit_field and extract_bit_field,
4130 make sure to_rtx will be safe for multiple use. */
4132 to_rtx = expand_normal (tem);
4138 if (!MEM_P (to_rtx))
4140 /* We can get constant negative offsets into arrays with broken
4141 user code. Translate this to a trap instead of ICEing. */
4142 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4143 expand_builtin_trap ();
4144 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4147 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4148 #ifdef POINTERS_EXTEND_UNSIGNED
4149 if (GET_MODE (offset_rtx) != Pmode)
4150 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4152 if (GET_MODE (offset_rtx) != ptr_mode)
4153 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4156 /* A constant address in TO_RTX can have VOIDmode, we must not try
4157 to call force_reg for that case. Avoid that case. */
4159 && GET_MODE (to_rtx) == BLKmode
4160 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4162 && (bitpos % bitsize) == 0
4163 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4164 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4166 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4170 to_rtx = offset_address (to_rtx, offset_rtx,
4171 highest_pow2_factor_for_target (to,
4175 /* Handle expand_expr of a complex value returning a CONCAT. */
4176 if (GET_CODE (to_rtx) == CONCAT)
4178 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4180 gcc_assert (bitpos == 0);
4181 result = store_expr (from, to_rtx, false, nontemporal);
4185 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4186 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4194 /* If the field is at offset zero, we could have been given the
4195 DECL_RTX of the parent struct. Don't munge it. */
4196 to_rtx = shallow_copy_rtx (to_rtx);
4198 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4200 /* Deal with volatile and readonly fields. The former is only
4201 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4203 MEM_VOLATILE_P (to_rtx) = 1;
4204 if (component_uses_parent_alias_set (to))
4205 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4208 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4212 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4213 TREE_TYPE (tem), get_alias_set (to),
4218 preserve_temp_slots (result);
4224 /* If the rhs is a function call and its value is not an aggregate,
4225 call the function before we start to compute the lhs.
4226 This is needed for correct code for cases such as
4227 val = setjmp (buf) on machines where reference to val
4228 requires loading up part of an address in a separate insn.
4230 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4231 since it might be a promoted variable where the zero- or sign- extension
4232 needs to be done. Handling this in the normal way is safe because no
4233 computation is done before the call. */
4234 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4235 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4236 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4237 && REG_P (DECL_RTL (to))))
4242 value = expand_normal (from);
4244 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4246 /* Handle calls that return values in multiple non-contiguous locations.
4247 The Irix 6 ABI has examples of this. */
4248 if (GET_CODE (to_rtx) == PARALLEL)
4249 emit_group_load (to_rtx, value, TREE_TYPE (from),
4250 int_size_in_bytes (TREE_TYPE (from)));
4251 else if (GET_MODE (to_rtx) == BLKmode)
4252 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4255 if (POINTER_TYPE_P (TREE_TYPE (to)))
4256 value = convert_memory_address (GET_MODE (to_rtx), value);
4257 emit_move_insn (to_rtx, value);
4259 preserve_temp_slots (to_rtx);
4265 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4266 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4269 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4271 /* Don't move directly into a return register. */
4272 if (TREE_CODE (to) == RESULT_DECL
4273 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4278 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4280 if (GET_CODE (to_rtx) == PARALLEL)
4281 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4282 int_size_in_bytes (TREE_TYPE (from)));
4284 emit_move_insn (to_rtx, temp);
4286 preserve_temp_slots (to_rtx);
4292 /* In case we are returning the contents of an object which overlaps
4293 the place the value is being stored, use a safe function when copying
4294 a value through a pointer into a structure value return block. */
4295 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4296 && current_function_returns_struct
4297 && !current_function_returns_pcc_struct)
4302 size = expr_size (from);
4303 from_rtx = expand_normal (from);
4305 emit_library_call (memmove_libfunc, LCT_NORMAL,
4306 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4307 XEXP (from_rtx, 0), Pmode,
4308 convert_to_mode (TYPE_MODE (sizetype),
4309 size, TYPE_UNSIGNED (sizetype)),
4310 TYPE_MODE (sizetype));
4312 preserve_temp_slots (to_rtx);
4318 /* Compute FROM and store the value in the rtx we got. */
4321 result = store_expr (from, to_rtx, 0, nontemporal);
4322 preserve_temp_slots (result);
4328 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4329 succeeded, false otherwise. */
4332 emit_storent_insn (rtx to, rtx from)
4334 enum machine_mode mode = GET_MODE (to), imode;
4335 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4338 if (code == CODE_FOR_nothing)
4341 imode = insn_data[code].operand[0].mode;
4342 if (!insn_data[code].operand[0].predicate (to, imode))
4345 imode = insn_data[code].operand[1].mode;
4346 if (!insn_data[code].operand[1].predicate (from, imode))
4348 from = copy_to_mode_reg (imode, from);
4349 if (!insn_data[code].operand[1].predicate (from, imode))
4353 pattern = GEN_FCN (code) (to, from);
4354 if (pattern == NULL_RTX)
4357 emit_insn (pattern);
4361 /* Generate code for computing expression EXP,
4362 and storing the value into TARGET.
4364 If the mode is BLKmode then we may return TARGET itself.
4365 It turns out that in BLKmode it doesn't cause a problem.
4366 because C has no operators that could combine two different
4367 assignments into the same BLKmode object with different values
4368 with no sequence point. Will other languages need this to
4371 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4372 stack, and block moves may need to be treated specially.
4374 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4377 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4380 rtx alt_rtl = NULL_RTX;
4381 int dont_return_target = 0;
4383 if (VOID_TYPE_P (TREE_TYPE (exp)))
4385 /* C++ can generate ?: expressions with a throw expression in one
4386 branch and an rvalue in the other. Here, we resolve attempts to
4387 store the throw expression's nonexistent result. */
4388 gcc_assert (!call_param_p);
4389 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4392 if (TREE_CODE (exp) == COMPOUND_EXPR)
4394 /* Perform first part of compound expression, then assign from second
4396 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4397 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4398 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4401 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4403 /* For conditional expression, get safe form of the target. Then
4404 test the condition, doing the appropriate assignment on either
4405 side. This avoids the creation of unnecessary temporaries.
4406 For non-BLKmode, it is more efficient not to do this. */
4408 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4410 do_pending_stack_adjust ();
4412 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4413 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4415 emit_jump_insn (gen_jump (lab2));
4418 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4425 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4426 /* If this is a scalar in a register that is stored in a wider mode
4427 than the declared mode, compute the result into its declared mode
4428 and then convert to the wider mode. Our value is the computed
4431 rtx inner_target = 0;
4433 /* We can do the conversion inside EXP, which will often result
4434 in some optimizations. Do the conversion in two steps: first
4435 change the signedness, if needed, then the extend. But don't
4436 do this if the type of EXP is a subtype of something else
4437 since then the conversion might involve more than just
4438 converting modes. */
4439 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4440 && TREE_TYPE (TREE_TYPE (exp)) == 0
4441 && (!lang_hooks.reduce_bit_field_operations
4442 || (GET_MODE_PRECISION (GET_MODE (target))
4443 == TYPE_PRECISION (TREE_TYPE (exp)))))
4445 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4446 != SUBREG_PROMOTED_UNSIGNED_P (target))
4448 /* Some types, e.g. Fortran's logical*4, won't have a signed
4449 version, so use the mode instead. */
4451 = (signed_or_unsigned_type_for
4452 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4454 ntype = lang_hooks.types.type_for_mode
4455 (TYPE_MODE (TREE_TYPE (exp)),
4456 SUBREG_PROMOTED_UNSIGNED_P (target));
4458 exp = fold_convert (ntype, exp);
4461 exp = fold_convert (lang_hooks.types.type_for_mode
4462 (GET_MODE (SUBREG_REG (target)),
4463 SUBREG_PROMOTED_UNSIGNED_P (target)),
4466 inner_target = SUBREG_REG (target);
4469 temp = expand_expr (exp, inner_target, VOIDmode,
4470 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4472 /* If TEMP is a VOIDmode constant, use convert_modes to make
4473 sure that we properly convert it. */
4474 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4476 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4477 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4478 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4479 GET_MODE (target), temp,
4480 SUBREG_PROMOTED_UNSIGNED_P (target));
4483 convert_move (SUBREG_REG (target), temp,
4484 SUBREG_PROMOTED_UNSIGNED_P (target));
4488 else if (TREE_CODE (exp) == STRING_CST
4489 && !nontemporal && !call_param_p
4490 && TREE_STRING_LENGTH (exp) > 0
4491 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4493 /* Optimize initialization of an array with a STRING_CST. */
4494 HOST_WIDE_INT exp_len, str_copy_len;
4497 exp_len = int_expr_size (exp);
4501 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4502 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4505 str_copy_len = TREE_STRING_LENGTH (exp);
4506 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4508 str_copy_len += STORE_MAX_PIECES - 1;
4509 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4511 str_copy_len = MIN (str_copy_len, exp_len);
4512 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4513 (void *) TREE_STRING_POINTER (exp),
4514 MEM_ALIGN (target), false))
4519 dest_mem = store_by_pieces (dest_mem,
4520 str_copy_len, builtin_strncpy_read_str,
4521 (void *) TREE_STRING_POINTER (exp),
4522 MEM_ALIGN (target), false,
4523 exp_len > str_copy_len ? 1 : 0);
4524 if (exp_len > str_copy_len)
4525 clear_storage (dest_mem, GEN_INT (exp_len - str_copy_len),
4534 /* If we want to use a nontemporal store, force the value to
4536 tmp_target = nontemporal ? NULL_RTX : target;
4537 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4539 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4541 /* Return TARGET if it's a specified hardware register.
4542 If TARGET is a volatile mem ref, either return TARGET
4543 or return a reg copied *from* TARGET; ANSI requires this.
4545 Otherwise, if TEMP is not TARGET, return TEMP
4546 if it is constant (for efficiency),
4547 or if we really want the correct value. */
4548 if (!(target && REG_P (target)
4549 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4550 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4551 && ! rtx_equal_p (temp, target)
4552 && CONSTANT_P (temp))
4553 dont_return_target = 1;
4556 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4557 the same as that of TARGET, adjust the constant. This is needed, for
4558 example, in case it is a CONST_DOUBLE and we want only a word-sized
4560 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4561 && TREE_CODE (exp) != ERROR_MARK
4562 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4563 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4564 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4566 /* If value was not generated in the target, store it there.
4567 Convert the value to TARGET's type first if necessary and emit the
4568 pending incrementations that have been queued when expanding EXP.
4569 Note that we cannot emit the whole queue blindly because this will
4570 effectively disable the POST_INC optimization later.
4572 If TEMP and TARGET compare equal according to rtx_equal_p, but
4573 one or both of them are volatile memory refs, we have to distinguish
4575 - expand_expr has used TARGET. In this case, we must not generate
4576 another copy. This can be detected by TARGET being equal according
4578 - expand_expr has not used TARGET - that means that the source just
4579 happens to have the same RTX form. Since temp will have been created
4580 by expand_expr, it will compare unequal according to == .
4581 We must generate a copy in this case, to reach the correct number
4582 of volatile memory references. */
4584 if ((! rtx_equal_p (temp, target)
4585 || (temp != target && (side_effects_p (temp)
4586 || side_effects_p (target))))
4587 && TREE_CODE (exp) != ERROR_MARK
4588 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4589 but TARGET is not valid memory reference, TEMP will differ
4590 from TARGET although it is really the same location. */
4591 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4592 /* If there's nothing to copy, don't bother. Don't call
4593 expr_size unless necessary, because some front-ends (C++)
4594 expr_size-hook must not be given objects that are not
4595 supposed to be bit-copied or bit-initialized. */
4596 && expr_size (exp) != const0_rtx)
4598 if (GET_MODE (temp) != GET_MODE (target)
4599 && GET_MODE (temp) != VOIDmode)
4601 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4602 if (dont_return_target)
4604 /* In this case, we will return TEMP,
4605 so make sure it has the proper mode.
4606 But don't forget to store the value into TARGET. */
4607 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4608 emit_move_insn (target, temp);
4610 else if (GET_MODE (target) == BLKmode)
4611 emit_block_move (target, temp, expr_size (exp),
4613 ? BLOCK_OP_CALL_PARM
4614 : BLOCK_OP_NORMAL));
4616 convert_move (target, temp, unsignedp);
4619 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4621 /* Handle copying a string constant into an array. The string
4622 constant may be shorter than the array. So copy just the string's
4623 actual length, and clear the rest. First get the size of the data
4624 type of the string, which is actually the size of the target. */
4625 rtx size = expr_size (exp);
4627 if (GET_CODE (size) == CONST_INT
4628 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4629 emit_block_move (target, temp, size,
4631 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4634 /* Compute the size of the data to copy from the string. */
4636 = size_binop (MIN_EXPR,
4637 make_tree (sizetype, size),
4638 size_int (TREE_STRING_LENGTH (exp)));
4640 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4642 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4645 /* Copy that much. */
4646 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4647 TYPE_UNSIGNED (sizetype));
4648 emit_block_move (target, temp, copy_size_rtx,
4650 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4652 /* Figure out how much is left in TARGET that we have to clear.
4653 Do all calculations in ptr_mode. */
4654 if (GET_CODE (copy_size_rtx) == CONST_INT)
4656 size = plus_constant (size, -INTVAL (copy_size_rtx));
4657 target = adjust_address (target, BLKmode,
4658 INTVAL (copy_size_rtx));
4662 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4663 copy_size_rtx, NULL_RTX, 0,
4666 #ifdef POINTERS_EXTEND_UNSIGNED
4667 if (GET_MODE (copy_size_rtx) != Pmode)
4668 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4669 TYPE_UNSIGNED (sizetype));
4672 target = offset_address (target, copy_size_rtx,
4673 highest_pow2_factor (copy_size));
4674 label = gen_label_rtx ();
4675 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4676 GET_MODE (size), 0, label);
4679 if (size != const0_rtx)
4680 clear_storage (target, size, BLOCK_OP_NORMAL);
4686 /* Handle calls that return values in multiple non-contiguous locations.
4687 The Irix 6 ABI has examples of this. */
4688 else if (GET_CODE (target) == PARALLEL)
4689 emit_group_load (target, temp, TREE_TYPE (exp),
4690 int_size_in_bytes (TREE_TYPE (exp)));
4691 else if (GET_MODE (temp) == BLKmode)
4692 emit_block_move (target, temp, expr_size (exp),
4694 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4695 else if (nontemporal
4696 && emit_storent_insn (target, temp))
4697 /* If we managed to emit a nontemporal store, there is nothing else to
4702 temp = force_operand (temp, target);
4704 emit_move_insn (target, temp);
4711 /* Helper for categorize_ctor_elements. Identical interface. */
4714 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4715 HOST_WIDE_INT *p_elt_count,
4718 unsigned HOST_WIDE_INT idx;
4719 HOST_WIDE_INT nz_elts, elt_count;
4720 tree value, purpose;
4722 /* Whether CTOR is a valid constant initializer, in accordance with what
4723 initializer_constant_valid_p does. If inferred from the constructor
4724 elements, true until proven otherwise. */
4725 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4726 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4731 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4736 if (TREE_CODE (purpose) == RANGE_EXPR)
4738 tree lo_index = TREE_OPERAND (purpose, 0);
4739 tree hi_index = TREE_OPERAND (purpose, 1);
4741 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4742 mult = (tree_low_cst (hi_index, 1)
4743 - tree_low_cst (lo_index, 1) + 1);
4746 switch (TREE_CODE (value))
4750 HOST_WIDE_INT nz = 0, ic = 0;
4753 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4755 nz_elts += mult * nz;
4756 elt_count += mult * ic;
4758 if (const_from_elts_p && const_p)
4759 const_p = const_elt_p;
4765 if (!initializer_zerop (value))
4771 nz_elts += mult * TREE_STRING_LENGTH (value);
4772 elt_count += mult * TREE_STRING_LENGTH (value);
4776 if (!initializer_zerop (TREE_REALPART (value)))
4778 if (!initializer_zerop (TREE_IMAGPART (value)))
4786 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4788 if (!initializer_zerop (TREE_VALUE (v)))
4799 if (const_from_elts_p && const_p)
4800 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4807 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4808 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4811 bool clear_this = true;
4813 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4815 /* We don't expect more than one element of the union to be
4816 initialized. Not sure what we should do otherwise... */
4817 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4820 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4821 CONSTRUCTOR_ELTS (ctor),
4824 /* ??? We could look at each element of the union, and find the
4825 largest element. Which would avoid comparing the size of the
4826 initialized element against any tail padding in the union.
4827 Doesn't seem worth the effort... */
4828 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4829 TYPE_SIZE (init_sub_type)) == 1)
4831 /* And now we have to find out if the element itself is fully
4832 constructed. E.g. for union { struct { int a, b; } s; } u
4833 = { .s = { .a = 1 } }. */
4834 if (elt_count == count_type_elements (init_sub_type, false))
4839 *p_must_clear = clear_this;
4842 *p_nz_elts += nz_elts;
4843 *p_elt_count += elt_count;
4848 /* Examine CTOR to discover:
4849 * how many scalar fields are set to nonzero values,
4850 and place it in *P_NZ_ELTS;
4851 * how many scalar fields in total are in CTOR,
4852 and place it in *P_ELT_COUNT.
4853 * if a type is a union, and the initializer from the constructor
4854 is not the largest element in the union, then set *p_must_clear.
4856 Return whether or not CTOR is a valid static constant initializer, the same
4857 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4860 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4861 HOST_WIDE_INT *p_elt_count,
4866 *p_must_clear = false;
4869 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4872 /* Count the number of scalars in TYPE. Return -1 on overflow or
4873 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4874 array member at the end of the structure. */
4877 count_type_elements (const_tree type, bool allow_flexarr)
4879 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4880 switch (TREE_CODE (type))
4884 tree telts = array_type_nelts (type);
4885 if (telts && host_integerp (telts, 1))
4887 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4888 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4891 else if (max / n > m)
4899 HOST_WIDE_INT n = 0, t;
4902 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4903 if (TREE_CODE (f) == FIELD_DECL)
4905 t = count_type_elements (TREE_TYPE (f), false);
4908 /* Check for structures with flexible array member. */
4909 tree tf = TREE_TYPE (f);
4911 && TREE_CHAIN (f) == NULL
4912 && TREE_CODE (tf) == ARRAY_TYPE
4914 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4915 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4916 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4917 && int_size_in_bytes (type) >= 0)
4929 case QUAL_UNION_TYPE:
4931 /* Ho hum. How in the world do we guess here? Clearly it isn't
4932 right to count the fields. Guess based on the number of words. */
4933 HOST_WIDE_INT n = int_size_in_bytes (type);
4936 return n / UNITS_PER_WORD;
4943 return TYPE_VECTOR_SUBPARTS (type);
4951 case REFERENCE_TYPE:
4963 /* Return 1 if EXP contains mostly (3/4) zeros. */
4966 mostly_zeros_p (const_tree exp)
4968 if (TREE_CODE (exp) == CONSTRUCTOR)
4971 HOST_WIDE_INT nz_elts, count, elts;
4974 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4978 elts = count_type_elements (TREE_TYPE (exp), false);
4980 return nz_elts < elts / 4;
4983 return initializer_zerop (exp);
4986 /* Return 1 if EXP contains all zeros. */
4989 all_zeros_p (const_tree exp)
4991 if (TREE_CODE (exp) == CONSTRUCTOR)
4994 HOST_WIDE_INT nz_elts, count;
4997 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4998 return nz_elts == 0;
5001 return initializer_zerop (exp);
5004 /* Helper function for store_constructor.
5005 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5006 TYPE is the type of the CONSTRUCTOR, not the element type.
5007 CLEARED is as for store_constructor.
5008 ALIAS_SET is the alias set to use for any stores.
5010 This provides a recursive shortcut back to store_constructor when it isn't
5011 necessary to go through store_field. This is so that we can pass through
5012 the cleared field to let store_constructor know that we may not have to
5013 clear a substructure if the outer structure has already been cleared. */
5016 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5017 HOST_WIDE_INT bitpos, enum machine_mode mode,
5018 tree exp, tree type, int cleared,
5019 alias_set_type alias_set)
5021 if (TREE_CODE (exp) == CONSTRUCTOR
5022 /* We can only call store_constructor recursively if the size and
5023 bit position are on a byte boundary. */
5024 && bitpos % BITS_PER_UNIT == 0
5025 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5026 /* If we have a nonzero bitpos for a register target, then we just
5027 let store_field do the bitfield handling. This is unlikely to
5028 generate unnecessary clear instructions anyways. */
5029 && (bitpos == 0 || MEM_P (target)))
5033 = adjust_address (target,
5034 GET_MODE (target) == BLKmode
5036 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5037 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5040 /* Update the alias set, if required. */
5041 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5042 && MEM_ALIAS_SET (target) != 0)
5044 target = copy_rtx (target);
5045 set_mem_alias_set (target, alias_set);
5048 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5051 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5054 /* Store the value of constructor EXP into the rtx TARGET.
5055 TARGET is either a REG or a MEM; we know it cannot conflict, since
5056 safe_from_p has been called.
5057 CLEARED is true if TARGET is known to have been zero'd.
5058 SIZE is the number of bytes of TARGET we are allowed to modify: this
5059 may not be the same as the size of EXP if we are assigning to a field
5060 which has been packed to exclude padding bits. */
5063 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5065 tree type = TREE_TYPE (exp);
5066 #ifdef WORD_REGISTER_OPERATIONS
5067 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5070 switch (TREE_CODE (type))
5074 case QUAL_UNION_TYPE:
5076 unsigned HOST_WIDE_INT idx;
5079 /* If size is zero or the target is already cleared, do nothing. */
5080 if (size == 0 || cleared)
5082 /* We either clear the aggregate or indicate the value is dead. */
5083 else if ((TREE_CODE (type) == UNION_TYPE
5084 || TREE_CODE (type) == QUAL_UNION_TYPE)
5085 && ! CONSTRUCTOR_ELTS (exp))
5086 /* If the constructor is empty, clear the union. */
5088 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5092 /* If we are building a static constructor into a register,
5093 set the initial value as zero so we can fold the value into
5094 a constant. But if more than one register is involved,
5095 this probably loses. */
5096 else if (REG_P (target) && TREE_STATIC (exp)
5097 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5099 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5103 /* If the constructor has fewer fields than the structure or
5104 if we are initializing the structure to mostly zeros, clear
5105 the whole structure first. Don't do this if TARGET is a
5106 register whose mode size isn't equal to SIZE since
5107 clear_storage can't handle this case. */
5109 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5110 != fields_length (type))
5111 || mostly_zeros_p (exp))
5113 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5116 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5120 if (REG_P (target) && !cleared)
5121 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5123 /* Store each element of the constructor into the
5124 corresponding field of TARGET. */
5125 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5127 enum machine_mode mode;
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos = 0;
5131 rtx to_rtx = target;
5133 /* Just ignore missing fields. We cleared the whole
5134 structure, above, if any fields are missing. */
5138 if (cleared && initializer_zerop (value))
5141 if (host_integerp (DECL_SIZE (field), 1))
5142 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5146 mode = DECL_MODE (field);
5147 if (DECL_BIT_FIELD (field))
5150 offset = DECL_FIELD_OFFSET (field);
5151 if (host_integerp (offset, 0)
5152 && host_integerp (bit_position (field), 0))
5154 bitpos = int_bit_position (field);
5158 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5165 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5166 make_tree (TREE_TYPE (exp),
5169 offset_rtx = expand_normal (offset);
5170 gcc_assert (MEM_P (to_rtx));
5172 #ifdef POINTERS_EXTEND_UNSIGNED
5173 if (GET_MODE (offset_rtx) != Pmode)
5174 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5176 if (GET_MODE (offset_rtx) != ptr_mode)
5177 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5180 to_rtx = offset_address (to_rtx, offset_rtx,
5181 highest_pow2_factor (offset));
5184 #ifdef WORD_REGISTER_OPERATIONS
5185 /* If this initializes a field that is smaller than a
5186 word, at the start of a word, try to widen it to a full
5187 word. This special case allows us to output C++ member
5188 function initializations in a form that the optimizers
5191 && bitsize < BITS_PER_WORD
5192 && bitpos % BITS_PER_WORD == 0
5193 && GET_MODE_CLASS (mode) == MODE_INT
5194 && TREE_CODE (value) == INTEGER_CST
5196 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5198 tree type = TREE_TYPE (value);
5200 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5202 type = lang_hooks.types.type_for_size
5203 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5204 value = fold_convert (type, value);
5207 if (BYTES_BIG_ENDIAN)
5209 = fold_build2 (LSHIFT_EXPR, type, value,
5210 build_int_cst (type,
5211 BITS_PER_WORD - bitsize));
5212 bitsize = BITS_PER_WORD;
5217 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5218 && DECL_NONADDRESSABLE_P (field))
5220 to_rtx = copy_rtx (to_rtx);
5221 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5224 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5225 value, type, cleared,
5226 get_alias_set (TREE_TYPE (field)));
5233 unsigned HOST_WIDE_INT i;
5236 tree elttype = TREE_TYPE (type);
5238 HOST_WIDE_INT minelt = 0;
5239 HOST_WIDE_INT maxelt = 0;
5241 domain = TYPE_DOMAIN (type);
5242 const_bounds_p = (TYPE_MIN_VALUE (domain)
5243 && TYPE_MAX_VALUE (domain)
5244 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5245 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5247 /* If we have constant bounds for the range of the type, get them. */
5250 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5251 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5254 /* If the constructor has fewer elements than the array, clear
5255 the whole array first. Similarly if this is static
5256 constructor of a non-BLKmode object. */
5259 else if (REG_P (target) && TREE_STATIC (exp))
5263 unsigned HOST_WIDE_INT idx;
5265 HOST_WIDE_INT count = 0, zero_count = 0;
5266 need_to_clear = ! const_bounds_p;
5268 /* This loop is a more accurate version of the loop in
5269 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5270 is also needed to check for missing elements. */
5271 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5273 HOST_WIDE_INT this_node_count;
5278 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5280 tree lo_index = TREE_OPERAND (index, 0);
5281 tree hi_index = TREE_OPERAND (index, 1);
5283 if (! host_integerp (lo_index, 1)
5284 || ! host_integerp (hi_index, 1))
5290 this_node_count = (tree_low_cst (hi_index, 1)
5291 - tree_low_cst (lo_index, 1) + 1);
5294 this_node_count = 1;
5296 count += this_node_count;
5297 if (mostly_zeros_p (value))
5298 zero_count += this_node_count;
5301 /* Clear the entire array first if there are any missing
5302 elements, or if the incidence of zero elements is >=
5305 && (count < maxelt - minelt + 1
5306 || 4 * zero_count >= 3 * count))
5310 if (need_to_clear && size > 0)
5313 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5315 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5319 if (!cleared && REG_P (target))
5320 /* Inform later passes that the old value is dead. */
5321 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5323 /* Store each element of the constructor into the
5324 corresponding element of TARGET, determined by counting the
5326 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5328 enum machine_mode mode;
5329 HOST_WIDE_INT bitsize;
5330 HOST_WIDE_INT bitpos;
5332 rtx xtarget = target;
5334 if (cleared && initializer_zerop (value))
5337 unsignedp = TYPE_UNSIGNED (elttype);
5338 mode = TYPE_MODE (elttype);
5339 if (mode == BLKmode)
5340 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5341 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5344 bitsize = GET_MODE_BITSIZE (mode);
5346 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5348 tree lo_index = TREE_OPERAND (index, 0);
5349 tree hi_index = TREE_OPERAND (index, 1);
5350 rtx index_r, pos_rtx;
5351 HOST_WIDE_INT lo, hi, count;
5354 /* If the range is constant and "small", unroll the loop. */
5356 && host_integerp (lo_index, 0)
5357 && host_integerp (hi_index, 0)
5358 && (lo = tree_low_cst (lo_index, 0),
5359 hi = tree_low_cst (hi_index, 0),
5360 count = hi - lo + 1,
5363 || (host_integerp (TYPE_SIZE (elttype), 1)
5364 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5367 lo -= minelt; hi -= minelt;
5368 for (; lo <= hi; lo++)
5370 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5373 && !MEM_KEEP_ALIAS_SET_P (target)
5374 && TREE_CODE (type) == ARRAY_TYPE
5375 && TYPE_NONALIASED_COMPONENT (type))
5377 target = copy_rtx (target);
5378 MEM_KEEP_ALIAS_SET_P (target) = 1;
5381 store_constructor_field
5382 (target, bitsize, bitpos, mode, value, type, cleared,
5383 get_alias_set (elttype));
5388 rtx loop_start = gen_label_rtx ();
5389 rtx loop_end = gen_label_rtx ();
5392 expand_normal (hi_index);
5393 unsignedp = TYPE_UNSIGNED (domain);
5395 index = build_decl (VAR_DECL, NULL_TREE, domain);
5398 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5400 SET_DECL_RTL (index, index_r);
5401 store_expr (lo_index, index_r, 0, false);
5403 /* Build the head of the loop. */
5404 do_pending_stack_adjust ();
5405 emit_label (loop_start);
5407 /* Assign value to element index. */
5409 fold_convert (ssizetype,
5410 fold_build2 (MINUS_EXPR,
5413 TYPE_MIN_VALUE (domain)));
5416 size_binop (MULT_EXPR, position,
5417 fold_convert (ssizetype,
5418 TYPE_SIZE_UNIT (elttype)));
5420 pos_rtx = expand_normal (position);
5421 xtarget = offset_address (target, pos_rtx,
5422 highest_pow2_factor (position));
5423 xtarget = adjust_address (xtarget, mode, 0);
5424 if (TREE_CODE (value) == CONSTRUCTOR)
5425 store_constructor (value, xtarget, cleared,
5426 bitsize / BITS_PER_UNIT);
5428 store_expr (value, xtarget, 0, false);
5430 /* Generate a conditional jump to exit the loop. */
5431 exit_cond = build2 (LT_EXPR, integer_type_node,
5433 jumpif (exit_cond, loop_end);
5435 /* Update the loop counter, and jump to the head of
5437 expand_assignment (index,
5438 build2 (PLUS_EXPR, TREE_TYPE (index),
5439 index, integer_one_node),
5442 emit_jump (loop_start);
5444 /* Build the end of the loop. */
5445 emit_label (loop_end);
5448 else if ((index != 0 && ! host_integerp (index, 0))
5449 || ! host_integerp (TYPE_SIZE (elttype), 1))
5454 index = ssize_int (1);
5457 index = fold_convert (ssizetype,
5458 fold_build2 (MINUS_EXPR,
5461 TYPE_MIN_VALUE (domain)));
5464 size_binop (MULT_EXPR, index,
5465 fold_convert (ssizetype,
5466 TYPE_SIZE_UNIT (elttype)));
5467 xtarget = offset_address (target,
5468 expand_normal (position),
5469 highest_pow2_factor (position));
5470 xtarget = adjust_address (xtarget, mode, 0);
5471 store_expr (value, xtarget, 0, false);
5476 bitpos = ((tree_low_cst (index, 0) - minelt)
5477 * tree_low_cst (TYPE_SIZE (elttype), 1));
5479 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5481 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5482 && TREE_CODE (type) == ARRAY_TYPE
5483 && TYPE_NONALIASED_COMPONENT (type))
5485 target = copy_rtx (target);
5486 MEM_KEEP_ALIAS_SET_P (target) = 1;
5488 store_constructor_field (target, bitsize, bitpos, mode, value,
5489 type, cleared, get_alias_set (elttype));
5497 unsigned HOST_WIDE_INT idx;
5498 constructor_elt *ce;
5502 tree elttype = TREE_TYPE (type);
5503 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5504 enum machine_mode eltmode = TYPE_MODE (elttype);
5505 HOST_WIDE_INT bitsize;
5506 HOST_WIDE_INT bitpos;
5507 rtvec vector = NULL;
5510 gcc_assert (eltmode != BLKmode);
5512 n_elts = TYPE_VECTOR_SUBPARTS (type);
5513 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5515 enum machine_mode mode = GET_MODE (target);
5517 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5518 if (icode != CODE_FOR_nothing)
5522 vector = rtvec_alloc (n_elts);
5523 for (i = 0; i < n_elts; i++)
5524 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5528 /* If the constructor has fewer elements than the vector,
5529 clear the whole array first. Similarly if this is static
5530 constructor of a non-BLKmode object. */
5533 else if (REG_P (target) && TREE_STATIC (exp))
5537 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5540 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5542 int n_elts_here = tree_low_cst
5543 (int_const_binop (TRUNC_DIV_EXPR,
5544 TYPE_SIZE (TREE_TYPE (value)),
5545 TYPE_SIZE (elttype), 0), 1);
5547 count += n_elts_here;
5548 if (mostly_zeros_p (value))
5549 zero_count += n_elts_here;
5552 /* Clear the entire vector first if there are any missing elements,
5553 or if the incidence of zero elements is >= 75%. */
5554 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5557 if (need_to_clear && size > 0 && !vector)
5560 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5562 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5566 /* Inform later passes that the old value is dead. */
5567 if (!cleared && !vector && REG_P (target))
5568 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5570 /* Store each element of the constructor into the corresponding
5571 element of TARGET, determined by counting the elements. */
5572 for (idx = 0, i = 0;
5573 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5574 idx++, i += bitsize / elt_size)
5576 HOST_WIDE_INT eltpos;
5577 tree value = ce->value;
5579 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5580 if (cleared && initializer_zerop (value))
5584 eltpos = tree_low_cst (ce->index, 1);
5590 /* Vector CONSTRUCTORs should only be built from smaller
5591 vectors in the case of BLKmode vectors. */
5592 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5593 RTVEC_ELT (vector, eltpos)
5594 = expand_normal (value);
5598 enum machine_mode value_mode =
5599 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5600 ? TYPE_MODE (TREE_TYPE (value))
5602 bitpos = eltpos * elt_size;
5603 store_constructor_field (target, bitsize, bitpos,
5604 value_mode, value, type,
5605 cleared, get_alias_set (elttype));
5610 emit_insn (GEN_FCN (icode)
5612 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5621 /* Store the value of EXP (an expression tree)
5622 into a subfield of TARGET which has mode MODE and occupies
5623 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5624 If MODE is VOIDmode, it means that we are storing into a bit-field.
5626 Always return const0_rtx unless we have something particular to
5629 TYPE is the type of the underlying object,
5631 ALIAS_SET is the alias set for the destination. This value will
5632 (in general) be different from that for TARGET, since TARGET is a
5633 reference to the containing structure.
5635 If NONTEMPORAL is true, try generating a nontemporal store. */
5638 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5639 enum machine_mode mode, tree exp, tree type,
5640 alias_set_type alias_set, bool nontemporal)
5642 HOST_WIDE_INT width_mask = 0;
5644 if (TREE_CODE (exp) == ERROR_MARK)
5647 /* If we have nothing to store, do nothing unless the expression has
5650 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5651 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5652 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5654 /* If we are storing into an unaligned field of an aligned union that is
5655 in a register, we may have the mode of TARGET being an integer mode but
5656 MODE == BLKmode. In that case, get an aligned object whose size and
5657 alignment are the same as TARGET and store TARGET into it (we can avoid
5658 the store if the field being stored is the entire width of TARGET). Then
5659 call ourselves recursively to store the field into a BLKmode version of
5660 that object. Finally, load from the object into TARGET. This is not
5661 very efficient in general, but should only be slightly more expensive
5662 than the otherwise-required unaligned accesses. Perhaps this can be
5663 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5664 twice, once with emit_move_insn and once via store_field. */
5667 && (REG_P (target) || GET_CODE (target) == SUBREG))
5669 rtx object = assign_temp (type, 0, 1, 1);
5670 rtx blk_object = adjust_address (object, BLKmode, 0);
5672 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5673 emit_move_insn (object, target);
5675 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5678 emit_move_insn (target, object);
5680 /* We want to return the BLKmode version of the data. */
5684 if (GET_CODE (target) == CONCAT)
5686 /* We're storing into a struct containing a single __complex. */
5688 gcc_assert (!bitpos);
5689 return store_expr (exp, target, 0, nontemporal);
5692 /* If the structure is in a register or if the component
5693 is a bit field, we cannot use addressing to access it.
5694 Use bit-field techniques or SUBREG to store in it. */
5696 if (mode == VOIDmode
5697 || (mode != BLKmode && ! direct_store[(int) mode]
5698 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5699 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5701 || GET_CODE (target) == SUBREG
5702 /* If the field isn't aligned enough to store as an ordinary memref,
5703 store it as a bit field. */
5705 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5706 || bitpos % GET_MODE_ALIGNMENT (mode))
5707 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5708 || (bitpos % BITS_PER_UNIT != 0)))
5709 /* If the RHS and field are a constant size and the size of the
5710 RHS isn't the same size as the bitfield, we must use bitfield
5713 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5714 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5718 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5719 implies a mask operation. If the precision is the same size as
5720 the field we're storing into, that mask is redundant. This is
5721 particularly common with bit field assignments generated by the
5723 if (TREE_CODE (exp) == NOP_EXPR)
5725 tree type = TREE_TYPE (exp);
5726 if (INTEGRAL_TYPE_P (type)
5727 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5728 && bitsize == TYPE_PRECISION (type))
5730 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5731 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5732 exp = TREE_OPERAND (exp, 0);
5736 temp = expand_normal (exp);
5738 /* If BITSIZE is narrower than the size of the type of EXP
5739 we will be narrowing TEMP. Normally, what's wanted are the
5740 low-order bits. However, if EXP's type is a record and this is
5741 big-endian machine, we want the upper BITSIZE bits. */
5742 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5743 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5744 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5745 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5746 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5750 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5752 if (mode != VOIDmode && mode != BLKmode
5753 && mode != TYPE_MODE (TREE_TYPE (exp)))
5754 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5756 /* If the modes of TARGET and TEMP are both BLKmode, both
5757 must be in memory and BITPOS must be aligned on a byte
5758 boundary. If so, we simply do a block copy. */
5759 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5761 gcc_assert (MEM_P (target) && MEM_P (temp)
5762 && !(bitpos % BITS_PER_UNIT));
5764 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5765 emit_block_move (target, temp,
5766 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5773 /* Store the value in the bitfield. */
5774 store_bit_field (target, bitsize, bitpos, mode, temp);
5780 /* Now build a reference to just the desired component. */
5781 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5783 if (to_rtx == target)
5784 to_rtx = copy_rtx (to_rtx);
5786 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5787 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5788 set_mem_alias_set (to_rtx, alias_set);
5790 return store_expr (exp, to_rtx, 0, nontemporal);
5794 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5795 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5796 codes and find the ultimate containing object, which we return.
5798 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5799 bit position, and *PUNSIGNEDP to the signedness of the field.
5800 If the position of the field is variable, we store a tree
5801 giving the variable offset (in units) in *POFFSET.
5802 This offset is in addition to the bit position.
5803 If the position is not variable, we store 0 in *POFFSET.
5805 If any of the extraction expressions is volatile,
5806 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5808 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5809 is a mode that can be used to access the field. In that case, *PBITSIZE
5812 If the field describes a variable-sized object, *PMODE is set to
5813 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5814 this case, but the address of the object can be found.
5816 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5817 look through nodes that serve as markers of a greater alignment than
5818 the one that can be deduced from the expression. These nodes make it
5819 possible for front-ends to prevent temporaries from being created by
5820 the middle-end on alignment considerations. For that purpose, the
5821 normal operating mode at high-level is to always pass FALSE so that
5822 the ultimate containing object is really returned; moreover, the
5823 associated predicate handled_component_p will always return TRUE
5824 on these nodes, thus indicating that they are essentially handled
5825 by get_inner_reference. TRUE should only be passed when the caller
5826 is scanning the expression in order to build another representation
5827 and specifically knows how to handle these nodes; as such, this is
5828 the normal operating mode in the RTL expanders. */
5831 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5832 HOST_WIDE_INT *pbitpos, tree *poffset,
5833 enum machine_mode *pmode, int *punsignedp,
5834 int *pvolatilep, bool keep_aligning)
5837 enum machine_mode mode = VOIDmode;
5838 tree offset = size_zero_node;
5839 tree bit_offset = bitsize_zero_node;
5841 /* First get the mode, signedness, and size. We do this from just the
5842 outermost expression. */
5843 if (TREE_CODE (exp) == COMPONENT_REF)
5845 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5846 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5847 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5849 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5851 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5853 size_tree = TREE_OPERAND (exp, 1);
5854 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5856 /* For vector types, with the correct size of access, use the mode of
5858 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5859 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5860 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5861 mode = TYPE_MODE (TREE_TYPE (exp));
5865 mode = TYPE_MODE (TREE_TYPE (exp));
5866 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5868 if (mode == BLKmode)
5869 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5871 *pbitsize = GET_MODE_BITSIZE (mode);
5876 if (! host_integerp (size_tree, 1))
5877 mode = BLKmode, *pbitsize = -1;
5879 *pbitsize = tree_low_cst (size_tree, 1);
5884 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5885 and find the ultimate containing object. */
5888 switch (TREE_CODE (exp))
5891 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5892 TREE_OPERAND (exp, 2));
5897 tree field = TREE_OPERAND (exp, 1);
5898 tree this_offset = component_ref_field_offset (exp);
5900 /* If this field hasn't been filled in yet, don't go past it.
5901 This should only happen when folding expressions made during
5902 type construction. */
5903 if (this_offset == 0)
5906 offset = size_binop (PLUS_EXPR, offset, this_offset);
5907 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5908 DECL_FIELD_BIT_OFFSET (field));
5910 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5915 case ARRAY_RANGE_REF:
5917 tree index = TREE_OPERAND (exp, 1);
5918 tree low_bound = array_ref_low_bound (exp);
5919 tree unit_size = array_ref_element_size (exp);
5921 /* We assume all arrays have sizes that are a multiple of a byte.
5922 First subtract the lower bound, if any, in the type of the
5923 index, then convert to sizetype and multiply by the size of
5924 the array element. */
5925 if (! integer_zerop (low_bound))
5926 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5929 offset = size_binop (PLUS_EXPR, offset,
5930 size_binop (MULT_EXPR,
5931 fold_convert (sizetype, index),
5940 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5941 bitsize_int (*pbitsize));
5944 case VIEW_CONVERT_EXPR:
5945 if (keep_aligning && STRICT_ALIGNMENT
5946 && (TYPE_ALIGN (TREE_TYPE (exp))
5947 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5948 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5949 < BIGGEST_ALIGNMENT)
5950 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5951 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5959 /* If any reference in the chain is volatile, the effect is volatile. */
5960 if (TREE_THIS_VOLATILE (exp))
5963 exp = TREE_OPERAND (exp, 0);
5967 /* If OFFSET is constant, see if we can return the whole thing as a
5968 constant bit position. Make sure to handle overflow during
5970 if (host_integerp (offset, 0))
5972 double_int tem = double_int_mul (tree_to_double_int (offset),
5973 uhwi_to_double_int (BITS_PER_UNIT));
5974 tem = double_int_add (tem, tree_to_double_int (bit_offset));
5975 if (double_int_fits_in_shwi_p (tem))
5977 *pbitpos = double_int_to_shwi (tem);
5978 *poffset = NULL_TREE;
5983 /* Otherwise, split it up. */
5984 *pbitpos = tree_low_cst (bit_offset, 0);
5990 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
5991 look for whether EXP or any nested component-refs within EXP is marked
5995 contains_packed_reference (const_tree exp)
5997 bool packed_p = false;
6001 switch (TREE_CODE (exp))
6005 tree field = TREE_OPERAND (exp, 1);
6006 packed_p = DECL_PACKED (field)
6007 || TYPE_PACKED (TREE_TYPE (field))
6008 || TYPE_PACKED (TREE_TYPE (exp));
6016 case ARRAY_RANGE_REF:
6019 case VIEW_CONVERT_EXPR:
6025 exp = TREE_OPERAND (exp, 0);
6031 /* Return a tree of sizetype representing the size, in bytes, of the element
6032 of EXP, an ARRAY_REF. */
6035 array_ref_element_size (tree exp)
6037 tree aligned_size = TREE_OPERAND (exp, 3);
6038 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6040 /* If a size was specified in the ARRAY_REF, it's the size measured
6041 in alignment units of the element type. So multiply by that value. */
6044 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6045 sizetype from another type of the same width and signedness. */
6046 if (TREE_TYPE (aligned_size) != sizetype)
6047 aligned_size = fold_convert (sizetype, aligned_size);
6048 return size_binop (MULT_EXPR, aligned_size,
6049 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6052 /* Otherwise, take the size from that of the element type. Substitute
6053 any PLACEHOLDER_EXPR that we have. */
6055 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6058 /* Return a tree representing the lower bound of the array mentioned in
6059 EXP, an ARRAY_REF. */
6062 array_ref_low_bound (tree exp)
6064 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6066 /* If a lower bound is specified in EXP, use it. */
6067 if (TREE_OPERAND (exp, 2))
6068 return TREE_OPERAND (exp, 2);
6070 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6071 substituting for a PLACEHOLDER_EXPR as needed. */
6072 if (domain_type && TYPE_MIN_VALUE (domain_type))
6073 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6075 /* Otherwise, return a zero of the appropriate type. */
6076 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6079 /* Return a tree representing the upper bound of the array mentioned in
6080 EXP, an ARRAY_REF. */
6083 array_ref_up_bound (tree exp)
6085 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6087 /* If there is a domain type and it has an upper bound, use it, substituting
6088 for a PLACEHOLDER_EXPR as needed. */
6089 if (domain_type && TYPE_MAX_VALUE (domain_type))
6090 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6092 /* Otherwise fail. */
6096 /* Return a tree representing the offset, in bytes, of the field referenced
6097 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6100 component_ref_field_offset (tree exp)
6102 tree aligned_offset = TREE_OPERAND (exp, 2);
6103 tree field = TREE_OPERAND (exp, 1);
6105 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6106 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6110 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6111 sizetype from another type of the same width and signedness. */
6112 if (TREE_TYPE (aligned_offset) != sizetype)
6113 aligned_offset = fold_convert (sizetype, aligned_offset);
6114 return size_binop (MULT_EXPR, aligned_offset,
6115 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6118 /* Otherwise, take the offset from that of the field. Substitute
6119 any PLACEHOLDER_EXPR that we have. */
6121 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6124 /* Return 1 if T is an expression that get_inner_reference handles. */
6127 handled_component_p (const_tree t)
6129 switch (TREE_CODE (t))
6134 case ARRAY_RANGE_REF:
6135 case VIEW_CONVERT_EXPR:
6145 /* Given an rtx VALUE that may contain additions and multiplications, return
6146 an equivalent value that just refers to a register, memory, or constant.
6147 This is done by generating instructions to perform the arithmetic and
6148 returning a pseudo-register containing the value.
6150 The returned value may be a REG, SUBREG, MEM or constant. */
6153 force_operand (rtx value, rtx target)
6156 /* Use subtarget as the target for operand 0 of a binary operation. */
6157 rtx subtarget = get_subtarget (target);
6158 enum rtx_code code = GET_CODE (value);
6160 /* Check for subreg applied to an expression produced by loop optimizer. */
6162 && !REG_P (SUBREG_REG (value))
6163 && !MEM_P (SUBREG_REG (value)))
6166 = simplify_gen_subreg (GET_MODE (value),
6167 force_reg (GET_MODE (SUBREG_REG (value)),
6168 force_operand (SUBREG_REG (value),
6170 GET_MODE (SUBREG_REG (value)),
6171 SUBREG_BYTE (value));
6172 code = GET_CODE (value);
6175 /* Check for a PIC address load. */
6176 if ((code == PLUS || code == MINUS)
6177 && XEXP (value, 0) == pic_offset_table_rtx
6178 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6179 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6180 || GET_CODE (XEXP (value, 1)) == CONST))
6183 subtarget = gen_reg_rtx (GET_MODE (value));
6184 emit_move_insn (subtarget, value);
6188 if (ARITHMETIC_P (value))
6190 op2 = XEXP (value, 1);
6191 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6193 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6196 op2 = negate_rtx (GET_MODE (value), op2);
6199 /* Check for an addition with OP2 a constant integer and our first
6200 operand a PLUS of a virtual register and something else. In that
6201 case, we want to emit the sum of the virtual register and the
6202 constant first and then add the other value. This allows virtual
6203 register instantiation to simply modify the constant rather than
6204 creating another one around this addition. */
6205 if (code == PLUS && GET_CODE (op2) == CONST_INT
6206 && GET_CODE (XEXP (value, 0)) == PLUS
6207 && REG_P (XEXP (XEXP (value, 0), 0))
6208 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6209 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6211 rtx temp = expand_simple_binop (GET_MODE (value), code,
6212 XEXP (XEXP (value, 0), 0), op2,
6213 subtarget, 0, OPTAB_LIB_WIDEN);
6214 return expand_simple_binop (GET_MODE (value), code, temp,
6215 force_operand (XEXP (XEXP (value,
6217 target, 0, OPTAB_LIB_WIDEN);
6220 op1 = force_operand (XEXP (value, 0), subtarget);
6221 op2 = force_operand (op2, NULL_RTX);
6225 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6227 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6228 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6229 target, 1, OPTAB_LIB_WIDEN);
6231 return expand_divmod (0,
6232 FLOAT_MODE_P (GET_MODE (value))
6233 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6234 GET_MODE (value), op1, op2, target, 0);
6236 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6239 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6242 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6245 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6246 target, 0, OPTAB_LIB_WIDEN);
6248 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6249 target, 1, OPTAB_LIB_WIDEN);
6252 if (UNARY_P (value))
6255 target = gen_reg_rtx (GET_MODE (value));
6256 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6263 case FLOAT_TRUNCATE:
6264 convert_move (target, op1, code == ZERO_EXTEND);
6269 expand_fix (target, op1, code == UNSIGNED_FIX);
6273 case UNSIGNED_FLOAT:
6274 expand_float (target, op1, code == UNSIGNED_FLOAT);
6278 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6282 #ifdef INSN_SCHEDULING
6283 /* On machines that have insn scheduling, we want all memory reference to be
6284 explicit, so we need to deal with such paradoxical SUBREGs. */
6285 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6286 && (GET_MODE_SIZE (GET_MODE (value))
6287 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6289 = simplify_gen_subreg (GET_MODE (value),
6290 force_reg (GET_MODE (SUBREG_REG (value)),
6291 force_operand (SUBREG_REG (value),
6293 GET_MODE (SUBREG_REG (value)),
6294 SUBREG_BYTE (value));
6300 /* Subroutine of expand_expr: return nonzero iff there is no way that
6301 EXP can reference X, which is being modified. TOP_P is nonzero if this
6302 call is going to be used to determine whether we need a temporary
6303 for EXP, as opposed to a recursive call to this function.
6305 It is always safe for this routine to return zero since it merely
6306 searches for optimization opportunities. */
6309 safe_from_p (const_rtx x, tree exp, int top_p)
6315 /* If EXP has varying size, we MUST use a target since we currently
6316 have no way of allocating temporaries of variable size
6317 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6318 So we assume here that something at a higher level has prevented a
6319 clash. This is somewhat bogus, but the best we can do. Only
6320 do this when X is BLKmode and when we are at the top level. */
6321 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6322 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6323 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6324 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6325 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6327 && GET_MODE (x) == BLKmode)
6328 /* If X is in the outgoing argument area, it is always safe. */
6330 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6331 || (GET_CODE (XEXP (x, 0)) == PLUS
6332 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6335 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6336 find the underlying pseudo. */
6337 if (GET_CODE (x) == SUBREG)
6340 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6344 /* Now look at our tree code and possibly recurse. */
6345 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6347 case tcc_declaration:
6348 exp_rtl = DECL_RTL_IF_SET (exp);
6354 case tcc_exceptional:
6355 if (TREE_CODE (exp) == TREE_LIST)
6359 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6361 exp = TREE_CHAIN (exp);
6364 if (TREE_CODE (exp) != TREE_LIST)
6365 return safe_from_p (x, exp, 0);
6368 else if (TREE_CODE (exp) == CONSTRUCTOR)
6370 constructor_elt *ce;
6371 unsigned HOST_WIDE_INT idx;
6374 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6376 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6377 || !safe_from_p (x, ce->value, 0))
6381 else if (TREE_CODE (exp) == ERROR_MARK)
6382 return 1; /* An already-visited SAVE_EXPR? */
6387 /* The only case we look at here is the DECL_INITIAL inside a
6389 return (TREE_CODE (exp) != DECL_EXPR
6390 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6391 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6392 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6395 case tcc_comparison:
6396 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6401 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6403 case tcc_expression:
6406 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6407 the expression. If it is set, we conflict iff we are that rtx or
6408 both are in memory. Otherwise, we check all operands of the
6409 expression recursively. */
6411 switch (TREE_CODE (exp))
6414 /* If the operand is static or we are static, we can't conflict.
6415 Likewise if we don't conflict with the operand at all. */
6416 if (staticp (TREE_OPERAND (exp, 0))
6417 || TREE_STATIC (exp)
6418 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6421 /* Otherwise, the only way this can conflict is if we are taking
6422 the address of a DECL a that address if part of X, which is
6424 exp = TREE_OPERAND (exp, 0);
6427 if (!DECL_RTL_SET_P (exp)
6428 || !MEM_P (DECL_RTL (exp)))
6431 exp_rtl = XEXP (DECL_RTL (exp), 0);
6435 case MISALIGNED_INDIRECT_REF:
6436 case ALIGN_INDIRECT_REF:
6439 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6440 get_alias_set (exp)))
6445 /* Assume that the call will clobber all hard registers and
6447 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6452 case WITH_CLEANUP_EXPR:
6453 case CLEANUP_POINT_EXPR:
6454 /* Lowered by gimplify.c. */
6458 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6464 /* If we have an rtx, we do not need to scan our operands. */
6468 nops = TREE_OPERAND_LENGTH (exp);
6469 for (i = 0; i < nops; i++)
6470 if (TREE_OPERAND (exp, i) != 0
6471 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6477 /* Should never get a type here. */
6480 case tcc_gimple_stmt:
6484 /* If we have an rtl, find any enclosed object. Then see if we conflict
6488 if (GET_CODE (exp_rtl) == SUBREG)
6490 exp_rtl = SUBREG_REG (exp_rtl);
6492 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6496 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6497 are memory and they conflict. */
6498 return ! (rtx_equal_p (x, exp_rtl)
6499 || (MEM_P (x) && MEM_P (exp_rtl)
6500 && true_dependence (exp_rtl, VOIDmode, x,
6501 rtx_addr_varies_p)));
6504 /* If we reach here, it is safe. */
6509 /* Return the highest power of two that EXP is known to be a multiple of.
6510 This is used in updating alignment of MEMs in array references. */
6512 unsigned HOST_WIDE_INT
6513 highest_pow2_factor (const_tree exp)
6515 unsigned HOST_WIDE_INT c0, c1;
6517 switch (TREE_CODE (exp))
6520 /* We can find the lowest bit that's a one. If the low
6521 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6522 We need to handle this case since we can find it in a COND_EXPR,
6523 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6524 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6526 if (TREE_OVERFLOW (exp))
6527 return BIGGEST_ALIGNMENT;
6530 /* Note: tree_low_cst is intentionally not used here,
6531 we don't care about the upper bits. */
6532 c0 = TREE_INT_CST_LOW (exp);
6534 return c0 ? c0 : BIGGEST_ALIGNMENT;
6538 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6539 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6540 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6541 return MIN (c0, c1);
6544 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6545 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6548 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6550 if (integer_pow2p (TREE_OPERAND (exp, 1))
6551 && host_integerp (TREE_OPERAND (exp, 1), 1))
6553 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6554 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6555 return MAX (1, c0 / c1);
6559 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6561 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6564 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6567 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6568 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6569 return MIN (c0, c1);
6578 /* Similar, except that the alignment requirements of TARGET are
6579 taken into account. Assume it is at least as aligned as its
6580 type, unless it is a COMPONENT_REF in which case the layout of
6581 the structure gives the alignment. */
6583 static unsigned HOST_WIDE_INT
6584 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6586 unsigned HOST_WIDE_INT target_align, factor;
6588 factor = highest_pow2_factor (exp);
6589 if (TREE_CODE (target) == COMPONENT_REF)
6590 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6592 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6593 return MAX (factor, target_align);
6596 /* Return &VAR expression for emulated thread local VAR. */
6599 emutls_var_address (tree var)
6601 tree emuvar = emutls_decl (var);
6602 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6603 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6604 tree arglist = build_tree_list (NULL_TREE, arg);
6605 tree call = build_function_call_expr (fn, arglist);
6606 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6609 /* Expands variable VAR. */
6612 expand_var (tree var)
6614 if (DECL_EXTERNAL (var))
6617 if (TREE_STATIC (var))
6618 /* If this is an inlined copy of a static local variable,
6619 look up the original decl. */
6620 var = DECL_ORIGIN (var);
6622 if (TREE_STATIC (var)
6623 ? !TREE_ASM_WRITTEN (var)
6624 : !DECL_RTL_SET_P (var))
6626 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6627 /* Should be ignored. */;
6628 else if (lang_hooks.expand_decl (var))
6630 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6632 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6633 rest_of_decl_compilation (var, 0, 0);
6635 /* No expansion needed. */
6636 gcc_assert (TREE_CODE (var) == TYPE_DECL
6637 || TREE_CODE (var) == CONST_DECL
6638 || TREE_CODE (var) == FUNCTION_DECL
6639 || TREE_CODE (var) == LABEL_DECL);
6643 /* Subroutine of expand_expr. Expand the two operands of a binary
6644 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6645 The value may be stored in TARGET if TARGET is nonzero. The
6646 MODIFIER argument is as documented by expand_expr. */
6649 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6650 enum expand_modifier modifier)
6652 if (! safe_from_p (target, exp1, 1))
6654 if (operand_equal_p (exp0, exp1, 0))
6656 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6657 *op1 = copy_rtx (*op0);
6661 /* If we need to preserve evaluation order, copy exp0 into its own
6662 temporary variable so that it can't be clobbered by exp1. */
6663 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6664 exp0 = save_expr (exp0);
6665 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6666 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6671 /* Return a MEM that contains constant EXP. DEFER is as for
6672 output_constant_def and MODIFIER is as for expand_expr. */
6675 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6679 mem = output_constant_def (exp, defer);
6680 if (modifier != EXPAND_INITIALIZER)
6681 mem = use_anchored_address (mem);
6685 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6686 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6689 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6690 enum expand_modifier modifier)
6692 rtx result, subtarget;
6694 HOST_WIDE_INT bitsize, bitpos;
6695 int volatilep, unsignedp;
6696 enum machine_mode mode1;
6698 /* If we are taking the address of a constant and are at the top level,
6699 we have to use output_constant_def since we can't call force_const_mem
6701 /* ??? This should be considered a front-end bug. We should not be
6702 generating ADDR_EXPR of something that isn't an LVALUE. The only
6703 exception here is STRING_CST. */
6704 if (TREE_CODE (exp) == CONSTRUCTOR
6705 || CONSTANT_CLASS_P (exp))
6706 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6708 /* Everything must be something allowed by is_gimple_addressable. */
6709 switch (TREE_CODE (exp))
6712 /* This case will happen via recursion for &a->b. */
6713 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6716 /* Recurse and make the output_constant_def clause above handle this. */
6717 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6721 /* The real part of the complex number is always first, therefore
6722 the address is the same as the address of the parent object. */
6725 inner = TREE_OPERAND (exp, 0);
6729 /* The imaginary part of the complex number is always second.
6730 The expression is therefore always offset by the size of the
6733 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6734 inner = TREE_OPERAND (exp, 0);
6738 /* TLS emulation hook - replace __thread VAR's &VAR with
6739 __emutls_get_address (&_emutls.VAR). */
6740 if (! targetm.have_tls
6741 && TREE_CODE (exp) == VAR_DECL
6742 && DECL_THREAD_LOCAL_P (exp))
6744 exp = emutls_var_address (exp);
6745 return expand_expr (exp, target, tmode, modifier);
6750 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6751 expand_expr, as that can have various side effects; LABEL_DECLs for
6752 example, may not have their DECL_RTL set yet. Assume language
6753 specific tree nodes can be expanded in some interesting way. */
6755 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6757 result = expand_expr (exp, target, tmode,
6758 modifier == EXPAND_INITIALIZER
6759 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6761 /* If the DECL isn't in memory, then the DECL wasn't properly
6762 marked TREE_ADDRESSABLE, which will be either a front-end
6763 or a tree optimizer bug. */
6764 gcc_assert (MEM_P (result));
6765 result = XEXP (result, 0);
6767 /* ??? Is this needed anymore? */
6768 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6770 assemble_external (exp);
6771 TREE_USED (exp) = 1;
6774 if (modifier != EXPAND_INITIALIZER
6775 && modifier != EXPAND_CONST_ADDRESS)
6776 result = force_operand (result, target);
6780 /* Pass FALSE as the last argument to get_inner_reference although
6781 we are expanding to RTL. The rationale is that we know how to
6782 handle "aligning nodes" here: we can just bypass them because
6783 they won't change the final object whose address will be returned
6784 (they actually exist only for that purpose). */
6785 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6786 &mode1, &unsignedp, &volatilep, false);
6790 /* We must have made progress. */
6791 gcc_assert (inner != exp);
6793 subtarget = offset || bitpos ? NULL_RTX : target;
6794 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6800 if (modifier != EXPAND_NORMAL)
6801 result = force_operand (result, NULL);
6802 tmp = expand_expr (offset, NULL_RTX, tmode,
6803 modifier == EXPAND_INITIALIZER
6804 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6806 result = convert_memory_address (tmode, result);
6807 tmp = convert_memory_address (tmode, tmp);
6809 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6810 result = gen_rtx_PLUS (tmode, result, tmp);
6813 subtarget = bitpos ? NULL_RTX : target;
6814 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6815 1, OPTAB_LIB_WIDEN);
6821 /* Someone beforehand should have rejected taking the address
6822 of such an object. */
6823 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6825 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6826 if (modifier < EXPAND_SUM)
6827 result = force_operand (result, target);
6833 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6834 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6837 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6838 enum expand_modifier modifier)
6840 enum machine_mode rmode;
6843 /* Target mode of VOIDmode says "whatever's natural". */
6844 if (tmode == VOIDmode)
6845 tmode = TYPE_MODE (TREE_TYPE (exp));
6847 /* We can get called with some Weird Things if the user does silliness
6848 like "(short) &a". In that case, convert_memory_address won't do
6849 the right thing, so ignore the given target mode. */
6850 if (tmode != Pmode && tmode != ptr_mode)
6853 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6856 /* Despite expand_expr claims concerning ignoring TMODE when not
6857 strictly convenient, stuff breaks if we don't honor it. Note
6858 that combined with the above, we only do this for pointer modes. */
6859 rmode = GET_MODE (result);
6860 if (rmode == VOIDmode)
6863 result = convert_memory_address (tmode, result);
6869 /* expand_expr: generate code for computing expression EXP.
6870 An rtx for the computed value is returned. The value is never null.
6871 In the case of a void EXP, const0_rtx is returned.
6873 The value may be stored in TARGET if TARGET is nonzero.
6874 TARGET is just a suggestion; callers must assume that
6875 the rtx returned may not be the same as TARGET.
6877 If TARGET is CONST0_RTX, it means that the value will be ignored.
6879 If TMODE is not VOIDmode, it suggests generating the
6880 result in mode TMODE. But this is done only when convenient.
6881 Otherwise, TMODE is ignored and the value generated in its natural mode.
6882 TMODE is just a suggestion; callers must assume that
6883 the rtx returned may not have mode TMODE.
6885 Note that TARGET may have neither TMODE nor MODE. In that case, it
6886 probably will not be used.
6888 If MODIFIER is EXPAND_SUM then when EXP is an addition
6889 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6890 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6891 products as above, or REG or MEM, or constant.
6892 Ordinarily in such cases we would output mul or add instructions
6893 and then return a pseudo reg containing the sum.
6895 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6896 it also marks a label as absolutely required (it can't be dead).
6897 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6898 This is used for outputting expressions used in initializers.
6900 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6901 with a constant address even if that address is not normally legitimate.
6902 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6904 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6905 a call parameter. Such targets require special care as we haven't yet
6906 marked TARGET so that it's safe from being trashed by libcalls. We
6907 don't want to use TARGET for anything but the final result;
6908 Intermediate values must go elsewhere. Additionally, calls to
6909 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6911 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6912 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6913 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6914 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6917 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6918 enum expand_modifier, rtx *);
6921 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6922 enum expand_modifier modifier, rtx *alt_rtl)
6925 rtx ret, last = NULL;
6927 /* Handle ERROR_MARK before anybody tries to access its type. */
6928 if (TREE_CODE (exp) == ERROR_MARK
6929 || (!GIMPLE_TUPLE_P (exp) && TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
6931 ret = CONST0_RTX (tmode);
6932 return ret ? ret : const0_rtx;
6935 if (flag_non_call_exceptions)
6937 rn = lookup_stmt_eh_region (exp);
6938 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6940 last = get_last_insn ();
6943 /* If this is an expression of some kind and it has an associated line
6944 number, then emit the line number before expanding the expression.
6946 We need to save and restore the file and line information so that
6947 errors discovered during expansion are emitted with the right
6948 information. It would be better of the diagnostic routines
6949 used the file/line information embedded in the tree nodes rather
6951 if (cfun && EXPR_HAS_LOCATION (exp))
6953 location_t saved_location = input_location;
6954 input_location = EXPR_LOCATION (exp);
6955 set_curr_insn_source_location (input_location);
6957 /* Record where the insns produced belong. */
6958 set_curr_insn_block (TREE_BLOCK (exp));
6960 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6962 input_location = saved_location;
6966 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6969 /* If using non-call exceptions, mark all insns that may trap.
6970 expand_call() will mark CALL_INSNs before we get to this code,
6971 but it doesn't handle libcalls, and these may trap. */
6975 for (insn = next_real_insn (last); insn;
6976 insn = next_real_insn (insn))
6978 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6979 /* If we want exceptions for non-call insns, any
6980 may_trap_p instruction may throw. */
6981 && GET_CODE (PATTERN (insn)) != CLOBBER
6982 && GET_CODE (PATTERN (insn)) != USE
6983 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6985 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6995 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6996 enum expand_modifier modifier, rtx *alt_rtl)
6998 rtx op0, op1, op2, temp, decl_rtl;
7001 enum machine_mode mode;
7002 enum tree_code code = TREE_CODE (exp);
7004 rtx subtarget, original_target;
7006 tree context, subexp0, subexp1;
7007 bool reduce_bit_field = false;
7008 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
7009 ? reduce_to_bit_field_precision ((expr), \
7014 if (GIMPLE_STMT_P (exp))
7016 type = void_type_node;
7022 type = TREE_TYPE (exp);
7023 mode = TYPE_MODE (type);
7024 unsignedp = TYPE_UNSIGNED (type);
7026 if (lang_hooks.reduce_bit_field_operations
7027 && TREE_CODE (type) == INTEGER_TYPE
7028 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
7030 /* An operation in what may be a bit-field type needs the
7031 result to be reduced to the precision of the bit-field type,
7032 which is narrower than that of the type's mode. */
7033 reduce_bit_field = true;
7034 if (modifier == EXPAND_STACK_PARM)
7038 /* Use subtarget as the target for operand 0 of a binary operation. */
7039 subtarget = get_subtarget (target);
7040 original_target = target;
7041 ignore = (target == const0_rtx
7042 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
7043 || code == CONVERT_EXPR || code == COND_EXPR
7044 || code == VIEW_CONVERT_EXPR)
7045 && TREE_CODE (type) == VOID_TYPE));
7047 /* If we are going to ignore this result, we need only do something
7048 if there is a side-effect somewhere in the expression. If there
7049 is, short-circuit the most common cases here. Note that we must
7050 not call expand_expr with anything but const0_rtx in case this
7051 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7055 if (! TREE_SIDE_EFFECTS (exp))
7058 /* Ensure we reference a volatile object even if value is ignored, but
7059 don't do this if all we are doing is taking its address. */
7060 if (TREE_THIS_VOLATILE (exp)
7061 && TREE_CODE (exp) != FUNCTION_DECL
7062 && mode != VOIDmode && mode != BLKmode
7063 && modifier != EXPAND_CONST_ADDRESS)
7065 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7067 temp = copy_to_reg (temp);
7071 if (TREE_CODE_CLASS (code) == tcc_unary
7072 || code == COMPONENT_REF || code == INDIRECT_REF)
7073 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7076 else if (TREE_CODE_CLASS (code) == tcc_binary
7077 || TREE_CODE_CLASS (code) == tcc_comparison
7078 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7080 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7081 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7084 else if (code == BIT_FIELD_REF)
7086 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7087 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7088 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7100 tree function = decl_function_context (exp);
7102 temp = label_rtx (exp);
7103 temp = gen_rtx_LABEL_REF (Pmode, temp);
7105 if (function != current_function_decl
7107 LABEL_REF_NONLOCAL_P (temp) = 1;
7109 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7114 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7119 /* If a static var's type was incomplete when the decl was written,
7120 but the type is complete now, lay out the decl now. */
7121 if (DECL_SIZE (exp) == 0
7122 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7123 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7124 layout_decl (exp, 0);
7126 /* TLS emulation hook - replace __thread vars with
7127 *__emutls_get_address (&_emutls.var). */
7128 if (! targetm.have_tls
7129 && TREE_CODE (exp) == VAR_DECL
7130 && DECL_THREAD_LOCAL_P (exp))
7132 exp = build_fold_indirect_ref (emutls_var_address (exp));
7133 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7136 /* ... fall through ... */
7140 decl_rtl = DECL_RTL (exp);
7141 gcc_assert (decl_rtl);
7142 decl_rtl = copy_rtx (decl_rtl);
7144 /* Ensure variable marked as used even if it doesn't go through
7145 a parser. If it hasn't be used yet, write out an external
7147 if (! TREE_USED (exp))
7149 assemble_external (exp);
7150 TREE_USED (exp) = 1;
7153 /* Show we haven't gotten RTL for this yet. */
7156 /* Variables inherited from containing functions should have
7157 been lowered by this point. */
7158 context = decl_function_context (exp);
7159 gcc_assert (!context
7160 || context == current_function_decl
7161 || TREE_STATIC (exp)
7162 /* ??? C++ creates functions that are not TREE_STATIC. */
7163 || TREE_CODE (exp) == FUNCTION_DECL);
7165 /* This is the case of an array whose size is to be determined
7166 from its initializer, while the initializer is still being parsed.
7169 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7170 temp = validize_mem (decl_rtl);
7172 /* If DECL_RTL is memory, we are in the normal case and either
7173 the address is not valid or it is not a register and -fforce-addr
7174 is specified, get the address into a register. */
7176 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7179 *alt_rtl = decl_rtl;
7180 decl_rtl = use_anchored_address (decl_rtl);
7181 if (modifier != EXPAND_CONST_ADDRESS
7182 && modifier != EXPAND_SUM
7183 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
7184 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
7185 temp = replace_equiv_address (decl_rtl,
7186 copy_rtx (XEXP (decl_rtl, 0)));
7189 /* If we got something, return it. But first, set the alignment
7190 if the address is a register. */
7193 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7194 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7199 /* If the mode of DECL_RTL does not match that of the decl, it
7200 must be a promoted value. We return a SUBREG of the wanted mode,
7201 but mark it so that we know that it was already extended. */
7203 if (REG_P (decl_rtl)
7204 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7206 enum machine_mode pmode;
7208 /* Get the signedness used for this variable. Ensure we get the
7209 same mode we got when the variable was declared. */
7210 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7211 (TREE_CODE (exp) == RESULT_DECL
7212 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7213 gcc_assert (GET_MODE (decl_rtl) == pmode);
7215 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7216 SUBREG_PROMOTED_VAR_P (temp) = 1;
7217 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7224 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7225 TREE_INT_CST_HIGH (exp), mode);
7231 tree tmp = NULL_TREE;
7232 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7233 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
7234 return const_vector_from_tree (exp);
7235 if (GET_MODE_CLASS (mode) == MODE_INT)
7237 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7239 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7242 tmp = build_constructor_from_list (type,
7243 TREE_VECTOR_CST_ELTS (exp));
7244 return expand_expr (tmp, ignore ? const0_rtx : target,
7249 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7252 /* If optimized, generate immediate CONST_DOUBLE
7253 which will be turned into memory by reload if necessary.
7255 We used to force a register so that loop.c could see it. But
7256 this does not allow gen_* patterns to perform optimizations with
7257 the constants. It also produces two insns in cases like "x = 1.0;".
7258 On most machines, floating-point constants are not permitted in
7259 many insns, so we'd end up copying it to a register in any case.
7261 Now, we do the copying in expand_binop, if appropriate. */
7262 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7263 TYPE_MODE (TREE_TYPE (exp)));
7266 /* Handle evaluating a complex constant in a CONCAT target. */
7267 if (original_target && GET_CODE (original_target) == CONCAT)
7269 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7272 rtarg = XEXP (original_target, 0);
7273 itarg = XEXP (original_target, 1);
7275 /* Move the real and imaginary parts separately. */
7276 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7277 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7280 emit_move_insn (rtarg, op0);
7282 emit_move_insn (itarg, op1);
7284 return original_target;
7287 /* ... fall through ... */
7290 temp = expand_expr_constant (exp, 1, modifier);
7292 /* temp contains a constant address.
7293 On RISC machines where a constant address isn't valid,
7294 make some insns to get that address into a register. */
7295 if (modifier != EXPAND_CONST_ADDRESS
7296 && modifier != EXPAND_INITIALIZER
7297 && modifier != EXPAND_SUM
7298 && (! memory_address_p (mode, XEXP (temp, 0))
7299 || flag_force_addr))
7300 return replace_equiv_address (temp,
7301 copy_rtx (XEXP (temp, 0)));
7306 tree val = TREE_OPERAND (exp, 0);
7307 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7309 if (!SAVE_EXPR_RESOLVED_P (exp))
7311 /* We can indeed still hit this case, typically via builtin
7312 expanders calling save_expr immediately before expanding
7313 something. Assume this means that we only have to deal
7314 with non-BLKmode values. */
7315 gcc_assert (GET_MODE (ret) != BLKmode);
7317 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7318 DECL_ARTIFICIAL (val) = 1;
7319 DECL_IGNORED_P (val) = 1;
7320 TREE_OPERAND (exp, 0) = val;
7321 SAVE_EXPR_RESOLVED_P (exp) = 1;
7323 if (!CONSTANT_P (ret))
7324 ret = copy_to_reg (ret);
7325 SET_DECL_RTL (val, ret);
7332 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7333 expand_goto (TREE_OPERAND (exp, 0));
7335 expand_computed_goto (TREE_OPERAND (exp, 0));
7339 /* If we don't need the result, just ensure we evaluate any
7343 unsigned HOST_WIDE_INT idx;
7346 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7347 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7352 /* Try to avoid creating a temporary at all. This is possible
7353 if all of the initializer is zero.
7354 FIXME: try to handle all [0..255] initializers we can handle
7356 else if (TREE_STATIC (exp)
7357 && !TREE_ADDRESSABLE (exp)
7358 && target != 0 && mode == BLKmode
7359 && all_zeros_p (exp))
7361 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7365 /* All elts simple constants => refer to a constant in memory. But
7366 if this is a non-BLKmode mode, let it store a field at a time
7367 since that should make a CONST_INT or CONST_DOUBLE when we
7368 fold. Likewise, if we have a target we can use, it is best to
7369 store directly into the target unless the type is large enough
7370 that memcpy will be used. If we are making an initializer and
7371 all operands are constant, put it in memory as well.
7373 FIXME: Avoid trying to fill vector constructors piece-meal.
7374 Output them with output_constant_def below unless we're sure
7375 they're zeros. This should go away when vector initializers
7376 are treated like VECTOR_CST instead of arrays.
7378 else if ((TREE_STATIC (exp)
7379 && ((mode == BLKmode
7380 && ! (target != 0 && safe_from_p (target, exp, 1)))
7381 || TREE_ADDRESSABLE (exp)
7382 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7383 && (! MOVE_BY_PIECES_P
7384 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7386 && ! mostly_zeros_p (exp))))
7387 || ((modifier == EXPAND_INITIALIZER
7388 || modifier == EXPAND_CONST_ADDRESS)
7389 && TREE_CONSTANT (exp)))
7391 rtx constructor = expand_expr_constant (exp, 1, modifier);
7393 if (modifier != EXPAND_CONST_ADDRESS
7394 && modifier != EXPAND_INITIALIZER
7395 && modifier != EXPAND_SUM)
7396 constructor = validize_mem (constructor);
7402 /* Handle calls that pass values in multiple non-contiguous
7403 locations. The Irix 6 ABI has examples of this. */
7404 if (target == 0 || ! safe_from_p (target, exp, 1)
7405 || GET_CODE (target) == PARALLEL
7406 || modifier == EXPAND_STACK_PARM)
7408 = assign_temp (build_qualified_type (type,
7410 | (TREE_READONLY (exp)
7411 * TYPE_QUAL_CONST))),
7412 0, TREE_ADDRESSABLE (exp), 1);
7414 store_constructor (exp, target, 0, int_expr_size (exp));
7418 case MISALIGNED_INDIRECT_REF:
7419 case ALIGN_INDIRECT_REF:
7422 tree exp1 = TREE_OPERAND (exp, 0);
7424 if (modifier != EXPAND_WRITE)
7428 t = fold_read_from_constant_string (exp);
7430 return expand_expr (t, target, tmode, modifier);
7433 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7434 op0 = memory_address (mode, op0);
7436 if (code == ALIGN_INDIRECT_REF)
7438 int align = TYPE_ALIGN_UNIT (type);
7439 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7440 op0 = memory_address (mode, op0);
7443 temp = gen_rtx_MEM (mode, op0);
7445 set_mem_attributes (temp, exp, 0);
7447 /* Resolve the misalignment now, so that we don't have to remember
7448 to resolve it later. Of course, this only works for reads. */
7449 /* ??? When we get around to supporting writes, we'll have to handle
7450 this in store_expr directly. The vectorizer isn't generating
7451 those yet, however. */
7452 if (code == MISALIGNED_INDIRECT_REF)
7457 gcc_assert (modifier == EXPAND_NORMAL
7458 || modifier == EXPAND_STACK_PARM);
7460 /* The vectorizer should have already checked the mode. */
7461 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7462 gcc_assert (icode != CODE_FOR_nothing);
7464 /* We've already validated the memory, and we're creating a
7465 new pseudo destination. The predicates really can't fail. */
7466 reg = gen_reg_rtx (mode);
7468 /* Nor can the insn generator. */
7469 insn = GEN_FCN (icode) (reg, temp);
7478 case TARGET_MEM_REF:
7480 struct mem_address addr;
7482 get_address_description (exp, &addr);
7483 op0 = addr_for_mem_ref (&addr, true);
7484 op0 = memory_address (mode, op0);
7485 temp = gen_rtx_MEM (mode, op0);
7486 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7493 tree array = TREE_OPERAND (exp, 0);
7494 tree index = TREE_OPERAND (exp, 1);
7496 /* Fold an expression like: "foo"[2].
7497 This is not done in fold so it won't happen inside &.
7498 Don't fold if this is for wide characters since it's too
7499 difficult to do correctly and this is a very rare case. */
7501 if (modifier != EXPAND_CONST_ADDRESS
7502 && modifier != EXPAND_INITIALIZER
7503 && modifier != EXPAND_MEMORY)
7505 tree t = fold_read_from_constant_string (exp);
7508 return expand_expr (t, target, tmode, modifier);
7511 /* If this is a constant index into a constant array,
7512 just get the value from the array. Handle both the cases when
7513 we have an explicit constructor and when our operand is a variable
7514 that was declared const. */
7516 if (modifier != EXPAND_CONST_ADDRESS
7517 && modifier != EXPAND_INITIALIZER
7518 && modifier != EXPAND_MEMORY
7519 && TREE_CODE (array) == CONSTRUCTOR
7520 && ! TREE_SIDE_EFFECTS (array)
7521 && TREE_CODE (index) == INTEGER_CST)
7523 unsigned HOST_WIDE_INT ix;
7526 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7528 if (tree_int_cst_equal (field, index))
7530 if (!TREE_SIDE_EFFECTS (value))
7531 return expand_expr (fold (value), target, tmode, modifier);
7536 else if (optimize >= 1
7537 && modifier != EXPAND_CONST_ADDRESS
7538 && modifier != EXPAND_INITIALIZER
7539 && modifier != EXPAND_MEMORY
7540 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7541 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7542 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7543 && targetm.binds_local_p (array))
7545 if (TREE_CODE (index) == INTEGER_CST)
7547 tree init = DECL_INITIAL (array);
7549 if (TREE_CODE (init) == CONSTRUCTOR)
7551 unsigned HOST_WIDE_INT ix;
7554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7556 if (tree_int_cst_equal (field, index))
7558 if (!TREE_SIDE_EFFECTS (value))
7559 return expand_expr (fold (value), target, tmode,
7564 else if(TREE_CODE (init) == STRING_CST)
7566 tree index1 = index;
7567 tree low_bound = array_ref_low_bound (exp);
7568 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7570 /* Optimize the special-case of a zero lower bound.
7572 We convert the low_bound to sizetype to avoid some problems
7573 with constant folding. (E.g. suppose the lower bound is 1,
7574 and its mode is QI. Without the conversion,l (ARRAY
7575 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7576 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7578 if (! integer_zerop (low_bound))
7579 index1 = size_diffop (index1, fold_convert (sizetype,
7582 if (0 > compare_tree_int (index1,
7583 TREE_STRING_LENGTH (init)))
7585 tree type = TREE_TYPE (TREE_TYPE (init));
7586 enum machine_mode mode = TYPE_MODE (type);
7588 if (GET_MODE_CLASS (mode) == MODE_INT
7589 && GET_MODE_SIZE (mode) == 1)
7590 return gen_int_mode (TREE_STRING_POINTER (init)
7591 [TREE_INT_CST_LOW (index1)],
7598 goto normal_inner_ref;
7601 /* If the operand is a CONSTRUCTOR, we can just extract the
7602 appropriate field if it is present. */
7603 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7605 unsigned HOST_WIDE_INT idx;
7608 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7610 if (field == TREE_OPERAND (exp, 1)
7611 /* We can normally use the value of the field in the
7612 CONSTRUCTOR. However, if this is a bitfield in
7613 an integral mode that we can fit in a HOST_WIDE_INT,
7614 we must mask only the number of bits in the bitfield,
7615 since this is done implicitly by the constructor. If
7616 the bitfield does not meet either of those conditions,
7617 we can't do this optimization. */
7618 && (! DECL_BIT_FIELD (field)
7619 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7620 && (GET_MODE_BITSIZE (DECL_MODE (field))
7621 <= HOST_BITS_PER_WIDE_INT))))
7623 if (DECL_BIT_FIELD (field)
7624 && modifier == EXPAND_STACK_PARM)
7626 op0 = expand_expr (value, target, tmode, modifier);
7627 if (DECL_BIT_FIELD (field))
7629 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7630 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7632 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7634 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7635 op0 = expand_and (imode, op0, op1, target);
7640 = build_int_cst (NULL_TREE,
7641 GET_MODE_BITSIZE (imode) - bitsize);
7643 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7645 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7653 goto normal_inner_ref;
7656 case ARRAY_RANGE_REF:
7659 enum machine_mode mode1;
7660 HOST_WIDE_INT bitsize, bitpos;
7663 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7664 &mode1, &unsignedp, &volatilep, true);
7667 /* If we got back the original object, something is wrong. Perhaps
7668 we are evaluating an expression too early. In any event, don't
7669 infinitely recurse. */
7670 gcc_assert (tem != exp);
7672 /* If TEM's type is a union of variable size, pass TARGET to the inner
7673 computation, since it will need a temporary and TARGET is known
7674 to have to do. This occurs in unchecked conversion in Ada. */
7678 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7679 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7681 && modifier != EXPAND_STACK_PARM
7682 ? target : NULL_RTX),
7684 (modifier == EXPAND_INITIALIZER
7685 || modifier == EXPAND_CONST_ADDRESS
7686 || modifier == EXPAND_STACK_PARM)
7687 ? modifier : EXPAND_NORMAL);
7689 /* If this is a constant, put it into a register if it is a legitimate
7690 constant, OFFSET is 0, and we won't try to extract outside the
7691 register (in case we were passed a partially uninitialized object
7692 or a view_conversion to a larger size). Force the constant to
7693 memory otherwise. */
7694 if (CONSTANT_P (op0))
7696 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7697 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7699 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7700 op0 = force_reg (mode, op0);
7702 op0 = validize_mem (force_const_mem (mode, op0));
7705 /* Otherwise, if this object not in memory and we either have an
7706 offset, a BLKmode result, or a reference outside the object, put it
7707 there. Such cases can occur in Ada if we have unchecked conversion
7708 of an expression from a scalar type to an array or record type or
7709 for an ARRAY_RANGE_REF whose type is BLKmode. */
7710 else if (!MEM_P (op0)
7712 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7713 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7715 tree nt = build_qualified_type (TREE_TYPE (tem),
7716 (TYPE_QUALS (TREE_TYPE (tem))
7717 | TYPE_QUAL_CONST));
7718 rtx memloc = assign_temp (nt, 1, 1, 1);
7720 emit_move_insn (memloc, op0);
7726 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7729 gcc_assert (MEM_P (op0));
7731 #ifdef POINTERS_EXTEND_UNSIGNED
7732 if (GET_MODE (offset_rtx) != Pmode)
7733 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7735 if (GET_MODE (offset_rtx) != ptr_mode)
7736 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7739 if (GET_MODE (op0) == BLKmode
7740 /* A constant address in OP0 can have VOIDmode, we must
7741 not try to call force_reg in that case. */
7742 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7744 && (bitpos % bitsize) == 0
7745 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7746 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7748 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7752 op0 = offset_address (op0, offset_rtx,
7753 highest_pow2_factor (offset));
7756 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7757 record its alignment as BIGGEST_ALIGNMENT. */
7758 if (MEM_P (op0) && bitpos == 0 && offset != 0
7759 && is_aligning_offset (offset, tem))
7760 set_mem_align (op0, BIGGEST_ALIGNMENT);
7762 /* Don't forget about volatility even if this is a bitfield. */
7763 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7765 if (op0 == orig_op0)
7766 op0 = copy_rtx (op0);
7768 MEM_VOLATILE_P (op0) = 1;
7771 /* The following code doesn't handle CONCAT.
7772 Assume only bitpos == 0 can be used for CONCAT, due to
7773 one element arrays having the same mode as its element. */
7774 if (GET_CODE (op0) == CONCAT)
7776 gcc_assert (bitpos == 0
7777 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7781 /* In cases where an aligned union has an unaligned object
7782 as a field, we might be extracting a BLKmode value from
7783 an integer-mode (e.g., SImode) object. Handle this case
7784 by doing the extract into an object as wide as the field
7785 (which we know to be the width of a basic mode), then
7786 storing into memory, and changing the mode to BLKmode. */
7787 if (mode1 == VOIDmode
7788 || REG_P (op0) || GET_CODE (op0) == SUBREG
7789 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7790 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7791 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7792 && modifier != EXPAND_CONST_ADDRESS
7793 && modifier != EXPAND_INITIALIZER)
7794 /* If the field isn't aligned enough to fetch as a memref,
7795 fetch it as a bit field. */
7796 || (mode1 != BLKmode
7797 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7798 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7800 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7801 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7802 && ((modifier == EXPAND_CONST_ADDRESS
7803 || modifier == EXPAND_INITIALIZER)
7805 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7806 || (bitpos % BITS_PER_UNIT != 0)))
7807 /* If the type and the field are a constant size and the
7808 size of the type isn't the same size as the bitfield,
7809 we must use bitfield operations. */
7811 && TYPE_SIZE (TREE_TYPE (exp))
7812 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7813 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7816 enum machine_mode ext_mode = mode;
7818 if (ext_mode == BLKmode
7819 && ! (target != 0 && MEM_P (op0)
7821 && bitpos % BITS_PER_UNIT == 0))
7822 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7824 if (ext_mode == BLKmode)
7827 target = assign_temp (type, 0, 1, 1);
7832 /* In this case, BITPOS must start at a byte boundary and
7833 TARGET, if specified, must be a MEM. */
7834 gcc_assert (MEM_P (op0)
7835 && (!target || MEM_P (target))
7836 && !(bitpos % BITS_PER_UNIT));
7838 emit_block_move (target,
7839 adjust_address (op0, VOIDmode,
7840 bitpos / BITS_PER_UNIT),
7841 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7843 (modifier == EXPAND_STACK_PARM
7844 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7849 op0 = validize_mem (op0);
7851 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7852 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7854 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7855 (modifier == EXPAND_STACK_PARM
7856 ? NULL_RTX : target),
7857 ext_mode, ext_mode);
7859 /* If the result is a record type and BITSIZE is narrower than
7860 the mode of OP0, an integral mode, and this is a big endian
7861 machine, we must put the field into the high-order bits. */
7862 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7863 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7864 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7865 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7866 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7870 /* If the result type is BLKmode, store the data into a temporary
7871 of the appropriate type, but with the mode corresponding to the
7872 mode for the data we have (op0's mode). It's tempting to make
7873 this a constant type, since we know it's only being stored once,
7874 but that can cause problems if we are taking the address of this
7875 COMPONENT_REF because the MEM of any reference via that address
7876 will have flags corresponding to the type, which will not
7877 necessarily be constant. */
7878 if (mode == BLKmode)
7880 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7883 /* If the reference doesn't use the alias set of its type,
7884 we cannot create the temporary using that type. */
7885 if (component_uses_parent_alias_set (exp))
7887 new = assign_stack_local (ext_mode, size, 0);
7888 set_mem_alias_set (new, get_alias_set (exp));
7891 new = assign_stack_temp_for_type (ext_mode, size, 0, type);
7893 emit_move_insn (new, op0);
7894 op0 = copy_rtx (new);
7895 PUT_MODE (op0, BLKmode);
7896 set_mem_attributes (op0, exp, 1);
7902 /* If the result is BLKmode, use that to access the object
7904 if (mode == BLKmode)
7907 /* Get a reference to just this component. */
7908 if (modifier == EXPAND_CONST_ADDRESS
7909 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7910 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7912 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7914 if (op0 == orig_op0)
7915 op0 = copy_rtx (op0);
7917 set_mem_attributes (op0, exp, 0);
7918 if (REG_P (XEXP (op0, 0)))
7919 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7921 MEM_VOLATILE_P (op0) |= volatilep;
7922 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7923 || modifier == EXPAND_CONST_ADDRESS
7924 || modifier == EXPAND_INITIALIZER)
7926 else if (target == 0)
7927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7929 convert_move (target, op0, unsignedp);
7934 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7937 /* Check for a built-in function. */
7938 if (TREE_CODE (CALL_EXPR_FN (exp)) == ADDR_EXPR
7939 && (TREE_CODE (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7941 && DECL_BUILT_IN (TREE_OPERAND (CALL_EXPR_FN (exp), 0)))
7943 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (CALL_EXPR_FN (exp), 0))
7944 == BUILT_IN_FRONTEND)
7945 return lang_hooks.expand_expr (exp, original_target,
7949 return expand_builtin (exp, target, subtarget, tmode, ignore);
7952 return expand_call (exp, target, ignore);
7954 case NON_LVALUE_EXPR:
7957 if (TREE_OPERAND (exp, 0) == error_mark_node)
7960 if (TREE_CODE (type) == UNION_TYPE)
7962 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7964 /* If both input and output are BLKmode, this conversion isn't doing
7965 anything except possibly changing memory attribute. */
7966 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7968 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7971 result = copy_rtx (result);
7972 set_mem_attributes (result, exp, 0);
7978 if (TYPE_MODE (type) != BLKmode)
7979 target = gen_reg_rtx (TYPE_MODE (type));
7981 target = assign_temp (type, 0, 1, 1);
7985 /* Store data into beginning of memory target. */
7986 store_expr (TREE_OPERAND (exp, 0),
7987 adjust_address (target, TYPE_MODE (valtype), 0),
7988 modifier == EXPAND_STACK_PARM,
7993 gcc_assert (REG_P (target));
7995 /* Store this field into a union of the proper type. */
7996 store_field (target,
7997 MIN ((int_size_in_bytes (TREE_TYPE
7998 (TREE_OPERAND (exp, 0)))
8000 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8001 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8005 /* Return the entire union. */
8009 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8011 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8014 /* If the signedness of the conversion differs and OP0 is
8015 a promoted SUBREG, clear that indication since we now
8016 have to do the proper extension. */
8017 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8018 && GET_CODE (op0) == SUBREG)
8019 SUBREG_PROMOTED_VAR_P (op0) = 0;
8021 return REDUCE_BIT_FIELD (op0);
8024 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8025 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8026 if (GET_MODE (op0) == mode)
8029 /* If OP0 is a constant, just convert it into the proper mode. */
8030 else if (CONSTANT_P (op0))
8032 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8033 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8035 if (modifier == EXPAND_INITIALIZER)
8036 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8037 subreg_lowpart_offset (mode,
8040 op0= convert_modes (mode, inner_mode, op0,
8041 TYPE_UNSIGNED (inner_type));
8044 else if (modifier == EXPAND_INITIALIZER)
8045 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8047 else if (target == 0)
8048 op0 = convert_to_mode (mode, op0,
8049 TYPE_UNSIGNED (TREE_TYPE
8050 (TREE_OPERAND (exp, 0))));
8053 convert_move (target, op0,
8054 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8058 return REDUCE_BIT_FIELD (op0);
8060 case VIEW_CONVERT_EXPR:
8061 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8063 /* If the input and output modes are both the same, we are done. */
8064 if (TYPE_MODE (type) == GET_MODE (op0))
8066 /* If neither mode is BLKmode, and both modes are the same size
8067 then we can use gen_lowpart. */
8068 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
8069 && GET_MODE_SIZE (TYPE_MODE (type))
8070 == GET_MODE_SIZE (GET_MODE (op0)))
8072 if (GET_CODE (op0) == SUBREG)
8073 op0 = force_reg (GET_MODE (op0), op0);
8074 op0 = gen_lowpart (TYPE_MODE (type), op0);
8076 /* If both modes are integral, then we can convert from one to the
8078 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
8079 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
8080 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
8081 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8082 /* As a last resort, spill op0 to memory, and reload it in a
8084 else if (!MEM_P (op0))
8086 /* If the operand is not a MEM, force it into memory. Since we
8087 are going to be changing the mode of the MEM, don't call
8088 force_const_mem for constants because we don't allow pool
8089 constants to change mode. */
8090 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8092 gcc_assert (!TREE_ADDRESSABLE (exp));
8094 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8096 = assign_stack_temp_for_type
8097 (TYPE_MODE (inner_type),
8098 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8100 emit_move_insn (target, op0);
8104 /* At this point, OP0 is in the correct mode. If the output type is such
8105 that the operand is known to be aligned, indicate that it is.
8106 Otherwise, we need only be concerned about alignment for non-BLKmode
8110 op0 = copy_rtx (op0);
8112 if (TYPE_ALIGN_OK (type))
8113 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8114 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8115 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8117 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8118 HOST_WIDE_INT temp_size
8119 = MAX (int_size_in_bytes (inner_type),
8120 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
8121 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8122 temp_size, 0, type);
8123 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
8125 gcc_assert (!TREE_ADDRESSABLE (exp));
8127 if (GET_MODE (op0) == BLKmode)
8128 emit_block_move (new_with_op0_mode, op0,
8129 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8130 (modifier == EXPAND_STACK_PARM
8131 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8133 emit_move_insn (new_with_op0_mode, op0);
8138 op0 = adjust_address (op0, TYPE_MODE (type), 0);
8143 case POINTER_PLUS_EXPR:
8144 /* Even though the sizetype mode and the pointer's mode can be different
8145 expand is able to handle this correctly and get the correct result out
8146 of the PLUS_EXPR code. */
8149 /* Check if this is a case for multiplication and addition. */
8150 if (TREE_CODE (type) == INTEGER_TYPE
8151 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8153 tree subsubexp0, subsubexp1;
8154 enum tree_code code0, code1;
8156 subexp0 = TREE_OPERAND (exp, 0);
8157 subsubexp0 = TREE_OPERAND (subexp0, 0);
8158 subsubexp1 = TREE_OPERAND (subexp0, 1);
8159 code0 = TREE_CODE (subsubexp0);
8160 code1 = TREE_CODE (subsubexp1);
8161 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8162 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8163 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8164 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8165 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8166 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8167 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8169 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8170 enum machine_mode innermode = TYPE_MODE (op0type);
8171 bool zextend_p = TYPE_UNSIGNED (op0type);
8172 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8173 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8174 && (optab_handler (this_optab, mode)->insn_code
8175 != CODE_FOR_nothing))
8177 expand_operands (TREE_OPERAND (subsubexp0, 0),
8178 TREE_OPERAND (subsubexp1, 0),
8179 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8180 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8181 VOIDmode, EXPAND_NORMAL);
8182 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8185 return REDUCE_BIT_FIELD (temp);
8190 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8191 something else, make sure we add the register to the constant and
8192 then to the other thing. This case can occur during strength
8193 reduction and doing it this way will produce better code if the
8194 frame pointer or argument pointer is eliminated.
8196 fold-const.c will ensure that the constant is always in the inner
8197 PLUS_EXPR, so the only case we need to do anything about is if
8198 sp, ap, or fp is our second argument, in which case we must swap
8199 the innermost first argument and our second argument. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8202 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8203 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8204 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8205 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8206 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8208 tree t = TREE_OPERAND (exp, 1);
8210 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8211 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8214 /* If the result is to be ptr_mode and we are adding an integer to
8215 something, we might be forming a constant. So try to use
8216 plus_constant. If it produces a sum and we can't accept it,
8217 use force_operand. This allows P = &ARR[const] to generate
8218 efficient code on machines where a SYMBOL_REF is not a valid
8221 If this is an EXPAND_SUM call, always return the sum. */
8222 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8223 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8225 if (modifier == EXPAND_STACK_PARM)
8227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8228 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8229 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8233 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8235 /* Use immed_double_const to ensure that the constant is
8236 truncated according to the mode of OP1, then sign extended
8237 to a HOST_WIDE_INT. Using the constant directly can result
8238 in non-canonical RTL in a 64x32 cross compile. */
8240 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8242 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8243 op1 = plus_constant (op1, INTVAL (constant_part));
8244 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8245 op1 = force_operand (op1, target);
8246 return REDUCE_BIT_FIELD (op1);
8249 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8250 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8251 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8255 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8256 (modifier == EXPAND_INITIALIZER
8257 ? EXPAND_INITIALIZER : EXPAND_SUM));
8258 if (! CONSTANT_P (op0))
8260 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8261 VOIDmode, modifier);
8262 /* Return a PLUS if modifier says it's OK. */
8263 if (modifier == EXPAND_SUM
8264 || modifier == EXPAND_INITIALIZER)
8265 return simplify_gen_binary (PLUS, mode, op0, op1);
8268 /* Use immed_double_const to ensure that the constant is
8269 truncated according to the mode of OP1, then sign extended
8270 to a HOST_WIDE_INT. Using the constant directly can result
8271 in non-canonical RTL in a 64x32 cross compile. */
8273 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8275 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8276 op0 = plus_constant (op0, INTVAL (constant_part));
8277 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8278 op0 = force_operand (op0, target);
8279 return REDUCE_BIT_FIELD (op0);
8283 /* No sense saving up arithmetic to be done
8284 if it's all in the wrong mode to form part of an address.
8285 And force_operand won't know whether to sign-extend or
8287 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8288 || mode != ptr_mode)
8290 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8291 subtarget, &op0, &op1, 0);
8292 if (op0 == const0_rtx)
8294 if (op1 == const0_rtx)
8299 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8300 subtarget, &op0, &op1, modifier);
8301 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8304 /* Check if this is a case for multiplication and subtraction. */
8305 if (TREE_CODE (type) == INTEGER_TYPE
8306 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8308 tree subsubexp0, subsubexp1;
8309 enum tree_code code0, code1;
8311 subexp1 = TREE_OPERAND (exp, 1);
8312 subsubexp0 = TREE_OPERAND (subexp1, 0);
8313 subsubexp1 = TREE_OPERAND (subexp1, 1);
8314 code0 = TREE_CODE (subsubexp0);
8315 code1 = TREE_CODE (subsubexp1);
8316 if (code0 == NOP_EXPR && code1 == NOP_EXPR
8317 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8318 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8319 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8320 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8321 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8322 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8324 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8325 enum machine_mode innermode = TYPE_MODE (op0type);
8326 bool zextend_p = TYPE_UNSIGNED (op0type);
8327 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8328 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8329 && (optab_handler (this_optab, mode)->insn_code
8330 != CODE_FOR_nothing))
8332 expand_operands (TREE_OPERAND (subsubexp0, 0),
8333 TREE_OPERAND (subsubexp1, 0),
8334 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8335 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8336 VOIDmode, EXPAND_NORMAL);
8337 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8340 return REDUCE_BIT_FIELD (temp);
8345 /* For initializers, we are allowed to return a MINUS of two
8346 symbolic constants. Here we handle all cases when both operands
8348 /* Handle difference of two symbolic constants,
8349 for the sake of an initializer. */
8350 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8351 && really_constant_p (TREE_OPERAND (exp, 0))
8352 && really_constant_p (TREE_OPERAND (exp, 1)))
8354 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8355 NULL_RTX, &op0, &op1, modifier);
8357 /* If the last operand is a CONST_INT, use plus_constant of
8358 the negated constant. Else make the MINUS. */
8359 if (GET_CODE (op1) == CONST_INT)
8360 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8362 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8365 /* No sense saving up arithmetic to be done
8366 if it's all in the wrong mode to form part of an address.
8367 And force_operand won't know whether to sign-extend or
8369 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8370 || mode != ptr_mode)
8373 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8374 subtarget, &op0, &op1, modifier);
8376 /* Convert A - const to A + (-const). */
8377 if (GET_CODE (op1) == CONST_INT)
8379 op1 = negate_rtx (mode, op1);
8380 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8386 /* If first operand is constant, swap them.
8387 Thus the following special case checks need only
8388 check the second operand. */
8389 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8391 tree t1 = TREE_OPERAND (exp, 0);
8392 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8393 TREE_OPERAND (exp, 1) = t1;
8396 /* Attempt to return something suitable for generating an
8397 indexed address, for machines that support that. */
8399 if (modifier == EXPAND_SUM && mode == ptr_mode
8400 && host_integerp (TREE_OPERAND (exp, 1), 0))
8402 tree exp1 = TREE_OPERAND (exp, 1);
8404 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8408 op0 = force_operand (op0, NULL_RTX);
8410 op0 = copy_to_mode_reg (mode, op0);
8412 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8413 gen_int_mode (tree_low_cst (exp1, 0),
8414 TYPE_MODE (TREE_TYPE (exp1)))));
8417 if (modifier == EXPAND_STACK_PARM)
8420 /* Check for multiplying things that have been extended
8421 from a narrower type. If this machine supports multiplying
8422 in that narrower type with a result in the desired type,
8423 do it that way, and avoid the explicit type-conversion. */
8425 subexp0 = TREE_OPERAND (exp, 0);
8426 subexp1 = TREE_OPERAND (exp, 1);
8427 /* First, check if we have a multiplication of one signed and one
8428 unsigned operand. */
8429 if (TREE_CODE (subexp0) == NOP_EXPR
8430 && TREE_CODE (subexp1) == NOP_EXPR
8431 && TREE_CODE (type) == INTEGER_TYPE
8432 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8433 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8434 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8435 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8436 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8437 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8439 enum machine_mode innermode
8440 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8441 this_optab = usmul_widen_optab;
8442 if (mode == GET_MODE_WIDER_MODE (innermode))
8444 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8446 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8447 expand_operands (TREE_OPERAND (subexp0, 0),
8448 TREE_OPERAND (subexp1, 0),
8449 NULL_RTX, &op0, &op1, 0);
8451 expand_operands (TREE_OPERAND (subexp0, 0),
8452 TREE_OPERAND (subexp1, 0),
8453 NULL_RTX, &op1, &op0, 0);
8459 /* Check for a multiplication with matching signedness. */
8460 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8461 && TREE_CODE (type) == INTEGER_TYPE
8462 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8463 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8464 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8465 && int_fits_type_p (TREE_OPERAND (exp, 1),
8466 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8467 /* Don't use a widening multiply if a shift will do. */
8468 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8469 > HOST_BITS_PER_WIDE_INT)
8470 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8472 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8473 && (TYPE_PRECISION (TREE_TYPE
8474 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8475 == TYPE_PRECISION (TREE_TYPE
8477 (TREE_OPERAND (exp, 0), 0))))
8478 /* If both operands are extended, they must either both
8479 be zero-extended or both be sign-extended. */
8480 && (TYPE_UNSIGNED (TREE_TYPE
8481 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8482 == TYPE_UNSIGNED (TREE_TYPE
8484 (TREE_OPERAND (exp, 0), 0)))))))
8486 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8487 enum machine_mode innermode = TYPE_MODE (op0type);
8488 bool zextend_p = TYPE_UNSIGNED (op0type);
8489 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8490 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8492 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8494 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8496 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8497 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8498 TREE_OPERAND (exp, 1),
8499 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8501 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8502 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8503 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8506 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8507 && innermode == word_mode)
8510 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8511 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8512 op1 = convert_modes (innermode, mode,
8513 expand_normal (TREE_OPERAND (exp, 1)),
8516 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8517 temp = expand_binop (mode, other_optab, op0, op1, target,
8518 unsignedp, OPTAB_LIB_WIDEN);
8519 hipart = gen_highpart (innermode, temp);
8520 htem = expand_mult_highpart_adjust (innermode, hipart,
8524 emit_move_insn (hipart, htem);
8525 return REDUCE_BIT_FIELD (temp);
8529 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8530 subtarget, &op0, &op1, 0);
8531 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8533 case TRUNC_DIV_EXPR:
8534 case FLOOR_DIV_EXPR:
8536 case ROUND_DIV_EXPR:
8537 case EXACT_DIV_EXPR:
8538 if (modifier == EXPAND_STACK_PARM)
8540 /* Possible optimization: compute the dividend with EXPAND_SUM
8541 then if the divisor is constant can optimize the case
8542 where some terms of the dividend have coeffs divisible by it. */
8543 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8544 subtarget, &op0, &op1, 0);
8545 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8550 case TRUNC_MOD_EXPR:
8551 case FLOOR_MOD_EXPR:
8553 case ROUND_MOD_EXPR:
8554 if (modifier == EXPAND_STACK_PARM)
8556 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8557 subtarget, &op0, &op1, 0);
8558 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8560 case FIX_TRUNC_EXPR:
8561 op0 = expand_normal (TREE_OPERAND (exp, 0));
8562 if (target == 0 || modifier == EXPAND_STACK_PARM)
8563 target = gen_reg_rtx (mode);
8564 expand_fix (target, op0, unsignedp);
8568 op0 = expand_normal (TREE_OPERAND (exp, 0));
8569 if (target == 0 || modifier == EXPAND_STACK_PARM)
8570 target = gen_reg_rtx (mode);
8571 /* expand_float can't figure out what to do if FROM has VOIDmode.
8572 So give it the correct mode. With -O, cse will optimize this. */
8573 if (GET_MODE (op0) == VOIDmode)
8574 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8576 expand_float (target, op0,
8577 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8582 VOIDmode, EXPAND_NORMAL);
8583 if (modifier == EXPAND_STACK_PARM)
8585 temp = expand_unop (mode,
8586 optab_for_tree_code (NEGATE_EXPR, type),
8589 return REDUCE_BIT_FIELD (temp);
8592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8593 VOIDmode, EXPAND_NORMAL);
8594 if (modifier == EXPAND_STACK_PARM)
8597 /* ABS_EXPR is not valid for complex arguments. */
8598 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8599 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8601 /* Unsigned abs is simply the operand. Testing here means we don't
8602 risk generating incorrect code below. */
8603 if (TYPE_UNSIGNED (type))
8606 return expand_abs (mode, op0, target, unsignedp,
8607 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8611 target = original_target;
8613 || modifier == EXPAND_STACK_PARM
8614 || (MEM_P (target) && MEM_VOLATILE_P (target))
8615 || GET_MODE (target) != mode
8617 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8618 target = gen_reg_rtx (mode);
8619 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8620 target, &op0, &op1, 0);
8622 /* First try to do it with a special MIN or MAX instruction.
8623 If that does not win, use a conditional jump to select the proper
8625 this_optab = optab_for_tree_code (code, type);
8626 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8631 /* At this point, a MEM target is no longer useful; we will get better
8634 if (! REG_P (target))
8635 target = gen_reg_rtx (mode);
8637 /* If op1 was placed in target, swap op0 and op1. */
8638 if (target != op0 && target == op1)
8645 /* We generate better code and avoid problems with op1 mentioning
8646 target by forcing op1 into a pseudo if it isn't a constant. */
8647 if (! CONSTANT_P (op1))
8648 op1 = force_reg (mode, op1);
8651 enum rtx_code comparison_code;
8654 if (code == MAX_EXPR)
8655 comparison_code = unsignedp ? GEU : GE;
8657 comparison_code = unsignedp ? LEU : LE;
8659 /* Canonicalize to comparisons against 0. */
8660 if (op1 == const1_rtx)
8662 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8663 or (a != 0 ? a : 1) for unsigned.
8664 For MIN we are safe converting (a <= 1 ? a : 1)
8665 into (a <= 0 ? a : 1) */
8666 cmpop1 = const0_rtx;
8667 if (code == MAX_EXPR)
8668 comparison_code = unsignedp ? NE : GT;
8670 if (op1 == constm1_rtx && !unsignedp)
8672 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8673 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8674 cmpop1 = const0_rtx;
8675 if (code == MIN_EXPR)
8676 comparison_code = LT;
8678 #ifdef HAVE_conditional_move
8679 /* Use a conditional move if possible. */
8680 if (can_conditionally_move_p (mode))
8684 /* ??? Same problem as in expmed.c: emit_conditional_move
8685 forces a stack adjustment via compare_from_rtx, and we
8686 lose the stack adjustment if the sequence we are about
8687 to create is discarded. */
8688 do_pending_stack_adjust ();
8692 /* Try to emit the conditional move. */
8693 insn = emit_conditional_move (target, comparison_code,
8698 /* If we could do the conditional move, emit the sequence,
8702 rtx seq = get_insns ();
8708 /* Otherwise discard the sequence and fall back to code with
8714 emit_move_insn (target, op0);
8716 temp = gen_label_rtx ();
8717 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8718 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8720 emit_move_insn (target, op1);
8725 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8726 VOIDmode, EXPAND_NORMAL);
8727 if (modifier == EXPAND_STACK_PARM)
8729 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8733 /* ??? Can optimize bitwise operations with one arg constant.
8734 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8735 and (a bitwise1 b) bitwise2 b (etc)
8736 but that is probably not worth while. */
8738 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8739 boolean values when we want in all cases to compute both of them. In
8740 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8741 as actual zero-or-1 values and then bitwise anding. In cases where
8742 there cannot be any side effects, better code would be made by
8743 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8744 how to recognize those cases. */
8746 case TRUTH_AND_EXPR:
8747 code = BIT_AND_EXPR;
8752 code = BIT_IOR_EXPR;
8756 case TRUTH_XOR_EXPR:
8757 code = BIT_XOR_EXPR;
8765 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8767 if (modifier == EXPAND_STACK_PARM)
8769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8770 VOIDmode, EXPAND_NORMAL);
8771 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8774 /* Could determine the answer when only additive constants differ. Also,
8775 the addition of one can be handled by changing the condition. */
8782 case UNORDERED_EXPR:
8790 temp = do_store_flag (exp,
8791 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8792 tmode != VOIDmode ? tmode : mode, 0);
8796 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8797 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8799 && REG_P (original_target)
8800 && (GET_MODE (original_target)
8801 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8803 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8804 VOIDmode, EXPAND_NORMAL);
8806 /* If temp is constant, we can just compute the result. */
8807 if (GET_CODE (temp) == CONST_INT)
8809 if (INTVAL (temp) != 0)
8810 emit_move_insn (target, const1_rtx);
8812 emit_move_insn (target, const0_rtx);
8817 if (temp != original_target)
8819 enum machine_mode mode1 = GET_MODE (temp);
8820 if (mode1 == VOIDmode)
8821 mode1 = tmode != VOIDmode ? tmode : mode;
8823 temp = copy_to_mode_reg (mode1, temp);
8826 op1 = gen_label_rtx ();
8827 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8828 GET_MODE (temp), unsignedp, op1);
8829 emit_move_insn (temp, const1_rtx);
8834 /* If no set-flag instruction, must generate a conditional store
8835 into a temporary variable. Drop through and handle this
8840 || modifier == EXPAND_STACK_PARM
8841 || ! safe_from_p (target, exp, 1)
8842 /* Make sure we don't have a hard reg (such as function's return
8843 value) live across basic blocks, if not optimizing. */
8844 || (!optimize && REG_P (target)
8845 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8846 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8849 emit_move_insn (target, const0_rtx);
8851 op1 = gen_label_rtx ();
8852 jumpifnot (exp, op1);
8855 emit_move_insn (target, const1_rtx);
8858 return ignore ? const0_rtx : target;
8860 case TRUTH_NOT_EXPR:
8861 if (modifier == EXPAND_STACK_PARM)
8863 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
8864 VOIDmode, EXPAND_NORMAL);
8865 /* The parser is careful to generate TRUTH_NOT_EXPR
8866 only with operands that are always zero or one. */
8867 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8868 target, 1, OPTAB_LIB_WIDEN);
8872 case STATEMENT_LIST:
8874 tree_stmt_iterator iter;
8876 gcc_assert (ignore);
8878 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8879 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8884 /* A COND_EXPR with its type being VOID_TYPE represents a
8885 conditional jump and is handled in
8886 expand_gimple_cond_expr. */
8887 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8889 /* Note that COND_EXPRs whose type is a structure or union
8890 are required to be constructed to contain assignments of
8891 a temporary variable, so that we can evaluate them here
8892 for side effect only. If type is void, we must do likewise. */
8894 gcc_assert (!TREE_ADDRESSABLE (type)
8896 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8897 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8899 /* If we are not to produce a result, we have no target. Otherwise,
8900 if a target was specified use it; it will not be used as an
8901 intermediate target unless it is safe. If no target, use a
8904 if (modifier != EXPAND_STACK_PARM
8906 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8907 && GET_MODE (original_target) == mode
8908 #ifdef HAVE_conditional_move
8909 && (! can_conditionally_move_p (mode)
8910 || REG_P (original_target))
8912 && !MEM_P (original_target))
8913 temp = original_target;
8915 temp = assign_temp (type, 0, 0, 1);
8917 do_pending_stack_adjust ();
8919 op0 = gen_label_rtx ();
8920 op1 = gen_label_rtx ();
8921 jumpifnot (TREE_OPERAND (exp, 0), op0);
8922 store_expr (TREE_OPERAND (exp, 1), temp,
8923 modifier == EXPAND_STACK_PARM,
8926 emit_jump_insn (gen_jump (op1));
8929 store_expr (TREE_OPERAND (exp, 2), temp,
8930 modifier == EXPAND_STACK_PARM,
8938 target = expand_vec_cond_expr (exp, target);
8943 tree lhs = TREE_OPERAND (exp, 0);
8944 tree rhs = TREE_OPERAND (exp, 1);
8945 gcc_assert (ignore);
8946 expand_assignment (lhs, rhs, false);
8950 case GIMPLE_MODIFY_STMT:
8952 tree lhs = GIMPLE_STMT_OPERAND (exp, 0);
8953 tree rhs = GIMPLE_STMT_OPERAND (exp, 1);
8955 gcc_assert (ignore);
8957 /* Check for |= or &= of a bitfield of size one into another bitfield
8958 of size 1. In this case, (unless we need the result of the
8959 assignment) we can do this more efficiently with a
8960 test followed by an assignment, if necessary.
8962 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8963 things change so we do, this code should be enhanced to
8965 if (TREE_CODE (lhs) == COMPONENT_REF
8966 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8967 || TREE_CODE (rhs) == BIT_AND_EXPR)
8968 && TREE_OPERAND (rhs, 0) == lhs
8969 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8970 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8971 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8973 rtx label = gen_label_rtx ();
8974 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8975 do_jump (TREE_OPERAND (rhs, 1),
8978 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
8979 MOVE_NONTEMPORAL (exp));
8980 do_pending_stack_adjust ();
8985 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
8990 if (!TREE_OPERAND (exp, 0))
8991 expand_null_return ();
8993 expand_return (TREE_OPERAND (exp, 0));
8997 return expand_expr_addr_expr (exp, target, tmode, modifier);
9000 /* Get the rtx code of the operands. */
9001 op0 = expand_normal (TREE_OPERAND (exp, 0));
9002 op1 = expand_normal (TREE_OPERAND (exp, 1));
9005 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9007 /* Move the real (op0) and imaginary (op1) parts to their location. */
9008 write_complex_part (target, op0, false);
9009 write_complex_part (target, op1, true);
9014 op0 = expand_normal (TREE_OPERAND (exp, 0));
9015 return read_complex_part (op0, false);
9018 op0 = expand_normal (TREE_OPERAND (exp, 0));
9019 return read_complex_part (op0, true);
9022 expand_resx_expr (exp);
9025 case TRY_CATCH_EXPR:
9027 case EH_FILTER_EXPR:
9028 case TRY_FINALLY_EXPR:
9029 /* Lowered by tree-eh.c. */
9032 case WITH_CLEANUP_EXPR:
9033 case CLEANUP_POINT_EXPR:
9035 case CASE_LABEL_EXPR:
9041 case PREINCREMENT_EXPR:
9042 case PREDECREMENT_EXPR:
9043 case POSTINCREMENT_EXPR:
9044 case POSTDECREMENT_EXPR:
9047 case TRUTH_ANDIF_EXPR:
9048 case TRUTH_ORIF_EXPR:
9049 /* Lowered by gimplify.c. */
9052 case CHANGE_DYNAMIC_TYPE_EXPR:
9053 /* This is ignored at the RTL level. The tree level set
9054 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9055 overkill for the RTL layer but is all that we can
9060 return get_exception_pointer (cfun);
9063 return get_exception_filter (cfun);
9066 /* Function descriptors are not valid except for as
9067 initialization constants, and should not be expanded. */
9075 expand_label (TREE_OPERAND (exp, 0));
9079 expand_asm_expr (exp);
9082 case WITH_SIZE_EXPR:
9083 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9084 have pulled out the size to use in whatever context it needed. */
9085 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9088 case REALIGN_LOAD_EXPR:
9090 tree oprnd0 = TREE_OPERAND (exp, 0);
9091 tree oprnd1 = TREE_OPERAND (exp, 1);
9092 tree oprnd2 = TREE_OPERAND (exp, 2);
9095 this_optab = optab_for_tree_code (code, type);
9096 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9097 op2 = expand_normal (oprnd2);
9098 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9106 tree oprnd0 = TREE_OPERAND (exp, 0);
9107 tree oprnd1 = TREE_OPERAND (exp, 1);
9108 tree oprnd2 = TREE_OPERAND (exp, 2);
9111 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9112 op2 = expand_normal (oprnd2);
9113 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9118 case WIDEN_SUM_EXPR:
9120 tree oprnd0 = TREE_OPERAND (exp, 0);
9121 tree oprnd1 = TREE_OPERAND (exp, 1);
9123 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9124 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9129 case REDUC_MAX_EXPR:
9130 case REDUC_MIN_EXPR:
9131 case REDUC_PLUS_EXPR:
9133 op0 = expand_normal (TREE_OPERAND (exp, 0));
9134 this_optab = optab_for_tree_code (code, type);
9135 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9140 case VEC_EXTRACT_EVEN_EXPR:
9141 case VEC_EXTRACT_ODD_EXPR:
9143 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9144 NULL_RTX, &op0, &op1, 0);
9145 this_optab = optab_for_tree_code (code, type);
9146 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9152 case VEC_INTERLEAVE_HIGH_EXPR:
9153 case VEC_INTERLEAVE_LOW_EXPR:
9155 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9156 NULL_RTX, &op0, &op1, 0);
9157 this_optab = optab_for_tree_code (code, type);
9158 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9164 case VEC_LSHIFT_EXPR:
9165 case VEC_RSHIFT_EXPR:
9167 target = expand_vec_shift_expr (exp, target);
9171 case VEC_UNPACK_HI_EXPR:
9172 case VEC_UNPACK_LO_EXPR:
9174 op0 = expand_normal (TREE_OPERAND (exp, 0));
9175 this_optab = optab_for_tree_code (code, type);
9176 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9182 case VEC_UNPACK_FLOAT_HI_EXPR:
9183 case VEC_UNPACK_FLOAT_LO_EXPR:
9185 op0 = expand_normal (TREE_OPERAND (exp, 0));
9186 /* The signedness is determined from input operand. */
9187 this_optab = optab_for_tree_code (code,
9188 TREE_TYPE (TREE_OPERAND (exp, 0)));
9189 temp = expand_widen_pattern_expr
9190 (exp, op0, NULL_RTX, NULL_RTX,
9191 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9197 case VEC_WIDEN_MULT_HI_EXPR:
9198 case VEC_WIDEN_MULT_LO_EXPR:
9200 tree oprnd0 = TREE_OPERAND (exp, 0);
9201 tree oprnd1 = TREE_OPERAND (exp, 1);
9203 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9204 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9206 gcc_assert (target);
9210 case VEC_PACK_TRUNC_EXPR:
9211 case VEC_PACK_SAT_EXPR:
9212 case VEC_PACK_FIX_TRUNC_EXPR:
9214 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9219 return lang_hooks.expand_expr (exp, original_target, tmode,
9223 /* Here to do an ordinary binary operator. */
9225 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9226 subtarget, &op0, &op1, 0);
9228 this_optab = optab_for_tree_code (code, type);
9230 if (modifier == EXPAND_STACK_PARM)
9232 temp = expand_binop (mode, this_optab, op0, op1, target,
9233 unsignedp, OPTAB_LIB_WIDEN);
9235 return REDUCE_BIT_FIELD (temp);
9237 #undef REDUCE_BIT_FIELD
9239 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9240 signedness of TYPE), possibly returning the result in TARGET. */
9242 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9244 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9245 if (target && GET_MODE (target) != GET_MODE (exp))
9247 /* For constant values, reduce using build_int_cst_type. */
9248 if (GET_CODE (exp) == CONST_INT)
9250 HOST_WIDE_INT value = INTVAL (exp);
9251 tree t = build_int_cst_type (type, value);
9252 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9254 else if (TYPE_UNSIGNED (type))
9257 if (prec < HOST_BITS_PER_WIDE_INT)
9258 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9261 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9262 ((unsigned HOST_WIDE_INT) 1
9263 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9265 return expand_and (GET_MODE (exp), exp, mask, target);
9269 tree count = build_int_cst (NULL_TREE,
9270 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9271 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9272 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9276 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9277 when applied to the address of EXP produces an address known to be
9278 aligned more than BIGGEST_ALIGNMENT. */
9281 is_aligning_offset (const_tree offset, const_tree exp)
9283 /* Strip off any conversions. */
9284 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9285 || TREE_CODE (offset) == NOP_EXPR
9286 || TREE_CODE (offset) == CONVERT_EXPR)
9287 offset = TREE_OPERAND (offset, 0);
9289 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9290 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9291 if (TREE_CODE (offset) != BIT_AND_EXPR
9292 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9293 || compare_tree_int (TREE_OPERAND (offset, 1),
9294 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9295 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9298 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9299 It must be NEGATE_EXPR. Then strip any more conversions. */
9300 offset = TREE_OPERAND (offset, 0);
9301 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9302 || TREE_CODE (offset) == NOP_EXPR
9303 || TREE_CODE (offset) == CONVERT_EXPR)
9304 offset = TREE_OPERAND (offset, 0);
9306 if (TREE_CODE (offset) != NEGATE_EXPR)
9309 offset = TREE_OPERAND (offset, 0);
9310 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9311 || TREE_CODE (offset) == NOP_EXPR
9312 || TREE_CODE (offset) == CONVERT_EXPR)
9313 offset = TREE_OPERAND (offset, 0);
9315 /* This must now be the address of EXP. */
9316 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9319 /* Return the tree node if an ARG corresponds to a string constant or zero
9320 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9321 in bytes within the string that ARG is accessing. The type of the
9322 offset will be `sizetype'. */
9325 string_constant (tree arg, tree *ptr_offset)
9327 tree array, offset, lower_bound;
9330 if (TREE_CODE (arg) == ADDR_EXPR)
9332 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9334 *ptr_offset = size_zero_node;
9335 return TREE_OPERAND (arg, 0);
9337 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9339 array = TREE_OPERAND (arg, 0);
9340 offset = size_zero_node;
9342 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9344 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9345 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9346 if (TREE_CODE (array) != STRING_CST
9347 && TREE_CODE (array) != VAR_DECL)
9350 /* Check if the array has a nonzero lower bound. */
9351 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9352 if (!integer_zerop (lower_bound))
9354 /* If the offset and base aren't both constants, return 0. */
9355 if (TREE_CODE (lower_bound) != INTEGER_CST)
9357 if (TREE_CODE (offset) != INTEGER_CST)
9359 /* Adjust offset by the lower bound. */
9360 offset = size_diffop (fold_convert (sizetype, offset),
9361 fold_convert (sizetype, lower_bound));
9367 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9369 tree arg0 = TREE_OPERAND (arg, 0);
9370 tree arg1 = TREE_OPERAND (arg, 1);
9375 if (TREE_CODE (arg0) == ADDR_EXPR
9376 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9377 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9379 array = TREE_OPERAND (arg0, 0);
9382 else if (TREE_CODE (arg1) == ADDR_EXPR
9383 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9384 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9386 array = TREE_OPERAND (arg1, 0);
9395 if (TREE_CODE (array) == STRING_CST)
9397 *ptr_offset = fold_convert (sizetype, offset);
9400 else if (TREE_CODE (array) == VAR_DECL)
9404 /* Variables initialized to string literals can be handled too. */
9405 if (DECL_INITIAL (array) == NULL_TREE
9406 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9409 /* If they are read-only, non-volatile and bind locally. */
9410 if (! TREE_READONLY (array)
9411 || TREE_SIDE_EFFECTS (array)
9412 || ! targetm.binds_local_p (array))
9415 /* Avoid const char foo[4] = "abcde"; */
9416 if (DECL_SIZE_UNIT (array) == NULL_TREE
9417 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9418 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9419 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9422 /* If variable is bigger than the string literal, OFFSET must be constant
9423 and inside of the bounds of the string literal. */
9424 offset = fold_convert (sizetype, offset);
9425 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9426 && (! host_integerp (offset, 1)
9427 || compare_tree_int (offset, length) >= 0))
9430 *ptr_offset = offset;
9431 return DECL_INITIAL (array);
9437 /* Generate code to calculate EXP using a store-flag instruction
9438 and return an rtx for the result. EXP is either a comparison
9439 or a TRUTH_NOT_EXPR whose operand is a comparison.
9441 If TARGET is nonzero, store the result there if convenient.
9443 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9446 Return zero if there is no suitable set-flag instruction
9447 available on this machine.
9449 Once expand_expr has been called on the arguments of the comparison,
9450 we are committed to doing the store flag, since it is not safe to
9451 re-evaluate the expression. We emit the store-flag insn by calling
9452 emit_store_flag, but only expand the arguments if we have a reason
9453 to believe that emit_store_flag will be successful. If we think that
9454 it will, but it isn't, we have to simulate the store-flag with a
9455 set/jump/set sequence. */
9458 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9461 tree arg0, arg1, type;
9463 enum machine_mode operand_mode;
9467 enum insn_code icode;
9468 rtx subtarget = target;
9471 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9472 result at the end. We can't simply invert the test since it would
9473 have already been inverted if it were valid. This case occurs for
9474 some floating-point comparisons. */
9476 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9477 invert = 1, exp = TREE_OPERAND (exp, 0);
9479 arg0 = TREE_OPERAND (exp, 0);
9480 arg1 = TREE_OPERAND (exp, 1);
9482 /* Don't crash if the comparison was erroneous. */
9483 if (arg0 == error_mark_node || arg1 == error_mark_node)
9486 type = TREE_TYPE (arg0);
9487 operand_mode = TYPE_MODE (type);
9488 unsignedp = TYPE_UNSIGNED (type);
9490 /* We won't bother with BLKmode store-flag operations because it would mean
9491 passing a lot of information to emit_store_flag. */
9492 if (operand_mode == BLKmode)
9495 /* We won't bother with store-flag operations involving function pointers
9496 when function pointers must be canonicalized before comparisons. */
9497 #ifdef HAVE_canonicalize_funcptr_for_compare
9498 if (HAVE_canonicalize_funcptr_for_compare
9499 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9500 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9502 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9503 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9504 == FUNCTION_TYPE))))
9511 /* Get the rtx comparison code to use. We know that EXP is a comparison
9512 operation of some type. Some comparisons against 1 and -1 can be
9513 converted to comparisons with zero. Do so here so that the tests
9514 below will be aware that we have a comparison with zero. These
9515 tests will not catch constants in the first operand, but constants
9516 are rarely passed as the first operand. */
9518 switch (TREE_CODE (exp))
9527 if (integer_onep (arg1))
9528 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9530 code = unsignedp ? LTU : LT;
9533 if (! unsignedp && integer_all_onesp (arg1))
9534 arg1 = integer_zero_node, code = LT;
9536 code = unsignedp ? LEU : LE;
9539 if (! unsignedp && integer_all_onesp (arg1))
9540 arg1 = integer_zero_node, code = GE;
9542 code = unsignedp ? GTU : GT;
9545 if (integer_onep (arg1))
9546 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9548 code = unsignedp ? GEU : GE;
9551 case UNORDERED_EXPR:
9580 /* Put a constant second. */
9581 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9583 tem = arg0; arg0 = arg1; arg1 = tem;
9584 code = swap_condition (code);
9587 /* If this is an equality or inequality test of a single bit, we can
9588 do this by shifting the bit being tested to the low-order bit and
9589 masking the result with the constant 1. If the condition was EQ,
9590 we xor it with 1. This does not require an scc insn and is faster
9591 than an scc insn even if we have it.
9593 The code to make this transformation was moved into fold_single_bit_test,
9594 so we just call into the folder and expand its result. */
9596 if ((code == NE || code == EQ)
9597 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9598 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9600 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9601 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9603 target, VOIDmode, EXPAND_NORMAL);
9606 /* Now see if we are likely to be able to do this. Return if not. */
9607 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9610 icode = setcc_gen_code[(int) code];
9612 if (icode == CODE_FOR_nothing)
9614 enum machine_mode wmode;
9616 for (wmode = operand_mode;
9617 icode == CODE_FOR_nothing && wmode != VOIDmode;
9618 wmode = GET_MODE_WIDER_MODE (wmode))
9619 icode = optab_handler (cstore_optab, wmode)->insn_code;
9622 if (icode == CODE_FOR_nothing
9623 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9625 /* We can only do this if it is one of the special cases that
9626 can be handled without an scc insn. */
9627 if ((code == LT && integer_zerop (arg1))
9628 || (! only_cheap && code == GE && integer_zerop (arg1)))
9630 else if (! only_cheap && (code == NE || code == EQ)
9631 && TREE_CODE (type) != REAL_TYPE
9632 && ((optab_handler (abs_optab, operand_mode)->insn_code
9633 != CODE_FOR_nothing)
9634 || (optab_handler (ffs_optab, operand_mode)->insn_code
9635 != CODE_FOR_nothing)))
9641 if (! get_subtarget (target)
9642 || GET_MODE (subtarget) != operand_mode)
9645 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9648 target = gen_reg_rtx (mode);
9650 result = emit_store_flag (target, code, op0, op1,
9651 operand_mode, unsignedp, 1);
9656 result = expand_binop (mode, xor_optab, result, const1_rtx,
9657 result, 0, OPTAB_LIB_WIDEN);
9661 /* If this failed, we have to do this with set/compare/jump/set code. */
9663 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9664 target = gen_reg_rtx (GET_MODE (target));
9666 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9667 label = gen_label_rtx ();
9668 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9671 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9678 /* Stubs in case we haven't got a casesi insn. */
9680 # define HAVE_casesi 0
9681 # define gen_casesi(a, b, c, d, e) (0)
9682 # define CODE_FOR_casesi CODE_FOR_nothing
9685 /* If the machine does not have a case insn that compares the bounds,
9686 this means extra overhead for dispatch tables, which raises the
9687 threshold for using them. */
9688 #ifndef CASE_VALUES_THRESHOLD
9689 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9690 #endif /* CASE_VALUES_THRESHOLD */
9693 case_values_threshold (void)
9695 return CASE_VALUES_THRESHOLD;
9698 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9699 0 otherwise (i.e. if there is no casesi instruction). */
9701 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9702 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9704 enum machine_mode index_mode = SImode;
9705 int index_bits = GET_MODE_BITSIZE (index_mode);
9706 rtx op1, op2, index;
9707 enum machine_mode op_mode;
9712 /* Convert the index to SImode. */
9713 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9715 enum machine_mode omode = TYPE_MODE (index_type);
9716 rtx rangertx = expand_normal (range);
9718 /* We must handle the endpoints in the original mode. */
9719 index_expr = build2 (MINUS_EXPR, index_type,
9720 index_expr, minval);
9721 minval = integer_zero_node;
9722 index = expand_normal (index_expr);
9723 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9724 omode, 1, default_label);
9725 /* Now we can safely truncate. */
9726 index = convert_to_mode (index_mode, index, 0);
9730 if (TYPE_MODE (index_type) != index_mode)
9732 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9733 index_expr = fold_convert (index_type, index_expr);
9736 index = expand_normal (index_expr);
9739 do_pending_stack_adjust ();
9741 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9742 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9744 index = copy_to_mode_reg (op_mode, index);
9746 op1 = expand_normal (minval);
9748 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9749 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9750 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9751 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9753 op1 = copy_to_mode_reg (op_mode, op1);
9755 op2 = expand_normal (range);
9757 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9758 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9759 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9760 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9762 op2 = copy_to_mode_reg (op_mode, op2);
9764 emit_jump_insn (gen_casesi (index, op1, op2,
9765 table_label, default_label));
9769 /* Attempt to generate a tablejump instruction; same concept. */
9770 #ifndef HAVE_tablejump
9771 #define HAVE_tablejump 0
9772 #define gen_tablejump(x, y) (0)
9775 /* Subroutine of the next function.
9777 INDEX is the value being switched on, with the lowest value
9778 in the table already subtracted.
9779 MODE is its expected mode (needed if INDEX is constant).
9780 RANGE is the length of the jump table.
9781 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9783 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9784 index value is out of range. */
9787 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9792 if (INTVAL (range) > cfun->max_jumptable_ents)
9793 cfun->max_jumptable_ents = INTVAL (range);
9795 /* Do an unsigned comparison (in the proper mode) between the index
9796 expression and the value which represents the length of the range.
9797 Since we just finished subtracting the lower bound of the range
9798 from the index expression, this comparison allows us to simultaneously
9799 check that the original index expression value is both greater than
9800 or equal to the minimum value of the range and less than or equal to
9801 the maximum value of the range. */
9803 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9806 /* If index is in range, it must fit in Pmode.
9807 Convert to Pmode so we can index with it. */
9809 index = convert_to_mode (Pmode, index, 1);
9811 /* Don't let a MEM slip through, because then INDEX that comes
9812 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9813 and break_out_memory_refs will go to work on it and mess it up. */
9814 #ifdef PIC_CASE_VECTOR_ADDRESS
9815 if (flag_pic && !REG_P (index))
9816 index = copy_to_mode_reg (Pmode, index);
9819 /* If flag_force_addr were to affect this address
9820 it could interfere with the tricky assumptions made
9821 about addresses that contain label-refs,
9822 which may be valid only very near the tablejump itself. */
9823 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9824 GET_MODE_SIZE, because this indicates how large insns are. The other
9825 uses should all be Pmode, because they are addresses. This code
9826 could fail if addresses and insns are not the same size. */
9827 index = gen_rtx_PLUS (Pmode,
9828 gen_rtx_MULT (Pmode, index,
9829 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9830 gen_rtx_LABEL_REF (Pmode, table_label));
9831 #ifdef PIC_CASE_VECTOR_ADDRESS
9833 index = PIC_CASE_VECTOR_ADDRESS (index);
9836 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9837 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9838 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9839 convert_move (temp, vector, 0);
9841 emit_jump_insn (gen_tablejump (temp, table_label));
9843 /* If we are generating PIC code or if the table is PC-relative, the
9844 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9845 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9850 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9851 rtx table_label, rtx default_label)
9855 if (! HAVE_tablejump)
9858 index_expr = fold_build2 (MINUS_EXPR, index_type,
9859 fold_convert (index_type, index_expr),
9860 fold_convert (index_type, minval));
9861 index = expand_normal (index_expr);
9862 do_pending_stack_adjust ();
9864 do_tablejump (index, TYPE_MODE (index_type),
9865 convert_modes (TYPE_MODE (index_type),
9866 TYPE_MODE (TREE_TYPE (range)),
9867 expand_normal (range),
9868 TYPE_UNSIGNED (TREE_TYPE (range))),
9869 table_label, default_label);
9873 /* Nonzero if the mode is a valid vector mode for this architecture.
9874 This returns nonzero even if there is no hardware support for the
9875 vector mode, but we can emulate with narrower modes. */
9878 vector_mode_valid_p (enum machine_mode mode)
9880 enum mode_class class = GET_MODE_CLASS (mode);
9881 enum machine_mode innermode;
9883 /* Doh! What's going on? */
9884 if (class != MODE_VECTOR_INT
9885 && class != MODE_VECTOR_FLOAT)
9888 /* Hardware support. Woo hoo! */
9889 if (targetm.vector_mode_supported_p (mode))
9892 innermode = GET_MODE_INNER (mode);
9894 /* We should probably return 1 if requesting V4DI and we have no DI,
9895 but we have V2DI, but this is probably very unlikely. */
9897 /* If we have support for the inner mode, we can safely emulate it.
9898 We may not have V2DI, but me can emulate with a pair of DIs. */
9899 return targetm.scalar_mode_supported_p (innermode);
9902 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9904 const_vector_from_tree (tree exp)
9909 enum machine_mode inner, mode;
9911 mode = TYPE_MODE (TREE_TYPE (exp));
9913 if (initializer_zerop (exp))
9914 return CONST0_RTX (mode);
9916 units = GET_MODE_NUNITS (mode);
9917 inner = GET_MODE_INNER (mode);
9919 v = rtvec_alloc (units);
9921 link = TREE_VECTOR_CST_ELTS (exp);
9922 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9924 elt = TREE_VALUE (link);
9926 if (TREE_CODE (elt) == REAL_CST)
9927 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9930 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9931 TREE_INT_CST_HIGH (elt),
9935 /* Initialize remaining elements to 0. */
9936 for (; i < units; ++i)
9937 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9939 return gen_rtx_CONST_VECTOR (mode, v);
9941 #include "gt-expr.h"