1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
33 #include "hard-reg-set.h"
36 #include "insn-config.h"
37 #include "insn-attr.h"
38 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
45 #include "typeclass.h"
48 #include "langhooks.h"
51 #include "tree-iterator.h"
52 #include "tree-pass.h"
53 #include "tree-flow.h"
57 /* Decide whether a function's arguments should be processed
58 from first to last or from last to first.
60 They should if the stack and args grow in opposite directions, but
61 only if we have push insns. */
65 #ifndef PUSH_ARGS_REVERSED
66 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
67 #define PUSH_ARGS_REVERSED /* If it's last to first. */
73 #ifndef STACK_PUSH_CODE
74 #ifdef STACK_GROWS_DOWNWARD
75 #define STACK_PUSH_CODE PRE_DEC
77 #define STACK_PUSH_CODE PRE_INC
82 /* If this is nonzero, we do not bother generating VOLATILE
83 around volatile memory references, and we are willing to
84 output indirect addresses. If cse is to follow, we reject
85 indirect addresses so a useful potential cse is generated;
86 if it is used only once, instruction combination will produce
87 the same indirect address eventually. */
90 /* This structure is used by move_by_pieces to describe the move to
101 int explicit_inc_from;
102 unsigned HOST_WIDE_INT len;
103 HOST_WIDE_INT offset;
107 /* This structure is used by store_by_pieces to describe the clear to
110 struct store_by_pieces
116 unsigned HOST_WIDE_INT len;
117 HOST_WIDE_INT offset;
118 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
127 struct move_by_pieces *);
128 static bool block_move_libcall_safe_for_call_parm (void);
129 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static tree clear_storage_libcall_fn (int);
138 static rtx compress_float_constant (rtx, rtx);
139 static rtx get_subtarget (rtx);
140 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
141 HOST_WIDE_INT, enum machine_mode,
142 tree, tree, int, int);
143 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
144 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
149 static int is_aligning_offset (tree, tree);
150 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
151 enum expand_modifier);
152 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
153 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
155 static void emit_single_push_insn (enum machine_mode, rtx, tree);
157 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
158 static rtx const_vector_from_tree (tree);
159 static void write_complex_part (rtx, rtx, bool);
161 /* Record for each mode whether we can move a register directly to or
162 from an object of that mode in memory. If we can't, we won't try
163 to use that mode directly when accessing a field of that mode. */
165 static char direct_load[NUM_MACHINE_MODES];
166 static char direct_store[NUM_MACHINE_MODES];
168 /* Record for each mode whether we can float-extend from memory. */
170 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
172 /* This macro is used to determine whether move_by_pieces should be called
173 to perform a structure copy. */
174 #ifndef MOVE_BY_PIECES_P
175 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
176 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
177 < (unsigned int) MOVE_RATIO)
180 /* This macro is used to determine whether clear_by_pieces should be
181 called to clear storage. */
182 #ifndef CLEAR_BY_PIECES_P
183 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
185 < (unsigned int) CLEAR_RATIO)
188 /* This macro is used to determine whether store_by_pieces should be
189 called to "memset" storage with byte values other than zero, or
190 to "memcpy" storage when the source is a constant string. */
191 #ifndef STORE_BY_PIECES_P
192 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) MOVE_RATIO)
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block sets. */
201 enum insn_code setmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of three different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
207 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
209 /* Synchronization primitives. */
210 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
211 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
212 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
213 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
214 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
215 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
216 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
217 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
218 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
229 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
230 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
231 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
233 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
235 #ifndef SLOW_UNALIGNED_ACCESS
236 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
239 /* This is run once per compilation to set up which modes can be used
240 directly in memory and to initialize the block move optab. */
243 init_expr_once (void)
246 enum machine_mode mode;
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
254 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
257 /* A scratch register we can modify in-place below to avoid
258 useless RTL allocations. */
259 reg = gen_rtx_REG (VOIDmode, -1);
261 insn = rtx_alloc (INSN);
262 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
263 PATTERN (insn) = pat;
265 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
266 mode = (enum machine_mode) ((int) mode + 1))
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
272 PUT_MODE (mem1, mode);
273 PUT_MODE (reg, mode);
275 /* See if there is some register that can be used in this mode and
276 directly loaded or stored from memory. */
278 if (mode != VOIDmode && mode != BLKmode)
279 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
280 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 if (! HARD_REGNO_MODE_OK (regno, mode))
289 SET_DEST (pat) = reg;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_load[(int) mode] = 1;
293 SET_SRC (pat) = mem1;
294 SET_DEST (pat) = reg;
295 if (recog (pat, insn, &num_clobbers) >= 0)
296 direct_load[(int) mode] = 1;
299 SET_DEST (pat) = mem;
300 if (recog (pat, insn, &num_clobbers) >= 0)
301 direct_store[(int) mode] = 1;
304 SET_DEST (pat) = mem1;
305 if (recog (pat, insn, &num_clobbers) >= 0)
306 direct_store[(int) mode] = 1;
310 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
312 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
313 mode = GET_MODE_WIDER_MODE (mode))
315 enum machine_mode srcmode;
316 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
317 srcmode = GET_MODE_WIDER_MODE (srcmode))
321 ic = can_extend_p (mode, srcmode, 0);
322 if (ic == CODE_FOR_nothing)
325 PUT_MODE (mem, srcmode);
327 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
328 float_extend_from_mem[mode][srcmode] = true;
333 /* This is run at the start of compiling a function. */
338 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
341 /* Copy data from FROM to TO, where the machine modes are not the same.
342 Both modes may be integer, or both may be floating.
343 UNSIGNEDP should be nonzero if FROM is an unsigned type.
344 This causes zero-extension instead of sign-extension. */
347 convert_move (rtx to, rtx from, int unsignedp)
349 enum machine_mode to_mode = GET_MODE (to);
350 enum machine_mode from_mode = GET_MODE (from);
351 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
352 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
356 /* rtx code for making an equivalent value. */
357 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
358 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
361 gcc_assert (to_real == from_real);
363 /* If the source and destination are already the same, then there's
368 /* If FROM is a SUBREG that indicates that we have already done at least
369 the required extension, strip it. We don't handle such SUBREGs as
372 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
373 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
374 >= GET_MODE_SIZE (to_mode))
375 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
376 from = gen_lowpart (to_mode, from), from_mode = to_mode;
378 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
380 if (to_mode == from_mode
381 || (from_mode == VOIDmode && CONSTANT_P (from)))
383 emit_move_insn (to, from);
387 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
389 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
391 if (VECTOR_MODE_P (to_mode))
392 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
394 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
396 emit_move_insn (to, from);
400 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
402 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
403 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
412 gcc_assert ((GET_MODE_PRECISION (from_mode)
413 != GET_MODE_PRECISION (to_mode))
414 || (DECIMAL_FLOAT_MODE_P (from_mode)
415 != DECIMAL_FLOAT_MODE_P (to_mode)));
417 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
418 /* Conversion between decimal float and binary float, same size. */
419 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
420 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
425 /* Try converting directly if the insn is supported. */
427 code = tab->handlers[to_mode][from_mode].insn_code;
428 if (code != CODE_FOR_nothing)
430 emit_unop_insn (code, to, from,
431 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
435 /* Otherwise use a libcall. */
436 libcall = tab->handlers[to_mode][from_mode].libfunc;
438 /* Is this conversion implemented yet? */
439 gcc_assert (libcall);
442 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
444 insns = get_insns ();
446 emit_libcall_block (insns, to, value,
447 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
449 : gen_rtx_FLOAT_EXTEND (to_mode, from));
453 /* Handle pointer conversion. */ /* SPEE 900220. */
454 /* Targets are expected to provide conversion insns between PxImode and
455 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
456 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
458 enum machine_mode full_mode
459 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
461 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
462 != CODE_FOR_nothing);
464 if (full_mode != from_mode)
465 from = convert_to_mode (full_mode, from, unsignedp);
466 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
470 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
473 enum machine_mode full_mode
474 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
476 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
477 != CODE_FOR_nothing);
479 if (to_mode == full_mode)
481 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
486 new_from = gen_reg_rtx (full_mode);
487 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
488 new_from, from, UNKNOWN);
490 /* else proceed to integer conversions below. */
491 from_mode = full_mode;
495 /* Now both modes are integers. */
497 /* Handle expanding beyond a word. */
498 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
499 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
506 enum machine_mode lowpart_mode;
507 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
513 /* If FROM is a SUBREG, put it into a register. Do this
514 so that we always generate the same set of insns for
515 better cse'ing; if an intermediate assignment occurred,
516 we won't be doing the operation directly on the SUBREG. */
517 if (optimize > 0 && GET_CODE (from) == SUBREG)
518 from = force_reg (from_mode, from);
519 emit_unop_insn (code, to, from, equiv_code);
522 /* Next, try converting via full word. */
523 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
524 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
525 != CODE_FOR_nothing))
529 if (reg_overlap_mentioned_p (to, from))
530 from = force_reg (from_mode, from);
531 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
533 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
534 emit_unop_insn (code, to,
535 gen_lowpart (word_mode, to), equiv_code);
539 /* No special multiword conversion insn; do it by hand. */
542 /* Since we will turn this into a no conflict block, we must ensure
543 that the source does not overlap the target. */
545 if (reg_overlap_mentioned_p (to, from))
546 from = force_reg (from_mode, from);
548 /* Get a copy of FROM widened to a word, if necessary. */
549 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
550 lowpart_mode = word_mode;
552 lowpart_mode = from_mode;
554 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
556 lowpart = gen_lowpart (lowpart_mode, to);
557 emit_move_insn (lowpart, lowfrom);
559 /* Compute the value to put in each remaining word. */
561 fill_value = const0_rtx;
566 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
567 && STORE_FLAG_VALUE == -1)
569 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
571 fill_value = gen_reg_rtx (word_mode);
572 emit_insn (gen_slt (fill_value));
578 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
579 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
581 fill_value = convert_to_mode (word_mode, fill_value, 1);
585 /* Fill the remaining words. */
586 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
588 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
589 rtx subword = operand_subword (to, index, 1, to_mode);
591 gcc_assert (subword);
593 if (fill_value != subword)
594 emit_move_insn (subword, fill_value);
597 insns = get_insns ();
600 emit_no_conflict_block (insns, to, from, NULL_RTX,
601 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
605 /* Truncating multi-word to a word or less. */
606 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
607 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
610 && ! MEM_VOLATILE_P (from)
611 && direct_load[(int) to_mode]
612 && ! mode_dependent_address_p (XEXP (from, 0)))
614 || GET_CODE (from) == SUBREG))
615 from = force_reg (from_mode, from);
616 convert_move (to, gen_lowpart (word_mode, from), 0);
620 /* Now follow all the conversions between integers
621 no more than a word long. */
623 /* For truncation, usually we can just refer to FROM in a narrower mode. */
624 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
625 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
626 GET_MODE_BITSIZE (from_mode)))
629 && ! MEM_VOLATILE_P (from)
630 && direct_load[(int) to_mode]
631 && ! mode_dependent_address_p (XEXP (from, 0)))
633 || GET_CODE (from) == SUBREG))
634 from = force_reg (from_mode, from);
635 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
636 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
637 from = copy_to_reg (from);
638 emit_move_insn (to, gen_lowpart (to_mode, from));
642 /* Handle extension. */
643 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
645 /* Convert directly if that works. */
646 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
649 emit_unop_insn (code, to, from, equiv_code);
654 enum machine_mode intermediate;
658 /* Search for a mode to convert via. */
659 for (intermediate = from_mode; intermediate != VOIDmode;
660 intermediate = GET_MODE_WIDER_MODE (intermediate))
661 if (((can_extend_p (to_mode, intermediate, unsignedp)
663 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
664 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
665 GET_MODE_BITSIZE (intermediate))))
666 && (can_extend_p (intermediate, from_mode, unsignedp)
667 != CODE_FOR_nothing))
669 convert_move (to, convert_to_mode (intermediate, from,
670 unsignedp), unsignedp);
674 /* No suitable intermediate mode.
675 Generate what we need with shifts. */
676 shift_amount = build_int_cst (NULL_TREE,
677 GET_MODE_BITSIZE (to_mode)
678 - GET_MODE_BITSIZE (from_mode));
679 from = gen_lowpart (to_mode, force_reg (from_mode, from));
680 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
682 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
685 emit_move_insn (to, tmp);
690 /* Support special truncate insns for certain modes. */
691 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
693 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
698 /* Handle truncation of volatile memrefs, and so on;
699 the things that couldn't be truncated directly,
700 and for which there was no special instruction.
702 ??? Code above formerly short-circuited this, for most integer
703 mode pairs, with a force_reg in from_mode followed by a recursive
704 call to this routine. Appears always to have been wrong. */
705 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
707 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
708 emit_move_insn (to, temp);
712 /* Mode combination is not recognized. */
716 /* Return an rtx for a value that would result
717 from converting X to mode MODE.
718 Both X and MODE may be floating, or both integer.
719 UNSIGNEDP is nonzero if X is an unsigned value.
720 This can be done by referring to a part of X in place
721 or by copying to a new temporary with conversion. */
724 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
726 return convert_modes (mode, VOIDmode, x, unsignedp);
729 /* Return an rtx for a value that would result
730 from converting X from mode OLDMODE to mode MODE.
731 Both modes may be floating, or both integer.
732 UNSIGNEDP is nonzero if X is an unsigned value.
734 This can be done by referring to a part of X in place
735 or by copying to a new temporary with conversion.
737 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
740 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
744 /* If FROM is a SUBREG that indicates that we have already done at least
745 the required extension, strip it. */
747 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
748 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
749 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
750 x = gen_lowpart (mode, x);
752 if (GET_MODE (x) != VOIDmode)
753 oldmode = GET_MODE (x);
758 /* There is one case that we must handle specially: If we are converting
759 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
760 we are to interpret the constant as unsigned, gen_lowpart will do
761 the wrong if the constant appears negative. What we want to do is
762 make the high-order word of the constant zero, not all ones. */
764 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
765 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
766 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
768 HOST_WIDE_INT val = INTVAL (x);
770 if (oldmode != VOIDmode
771 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
773 int width = GET_MODE_BITSIZE (oldmode);
775 /* We need to zero extend VAL. */
776 val &= ((HOST_WIDE_INT) 1 << width) - 1;
779 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
782 /* We can do this with a gen_lowpart if both desired and current modes
783 are integer, and this is either a constant integer, a register, or a
784 non-volatile MEM. Except for the constant case where MODE is no
785 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
787 if ((GET_CODE (x) == CONST_INT
788 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
789 || (GET_MODE_CLASS (mode) == MODE_INT
790 && GET_MODE_CLASS (oldmode) == MODE_INT
791 && (GET_CODE (x) == CONST_DOUBLE
792 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
793 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
794 && direct_load[(int) mode])
796 && (! HARD_REGISTER_P (x)
797 || HARD_REGNO_MODE_OK (REGNO (x), mode))
798 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
799 GET_MODE_BITSIZE (GET_MODE (x)))))))))
801 /* ?? If we don't know OLDMODE, we have to assume here that
802 X does not need sign- or zero-extension. This may not be
803 the case, but it's the best we can do. */
804 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
805 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
807 HOST_WIDE_INT val = INTVAL (x);
808 int width = GET_MODE_BITSIZE (oldmode);
810 /* We must sign or zero-extend in this case. Start by
811 zero-extending, then sign extend if we need to. */
812 val &= ((HOST_WIDE_INT) 1 << width) - 1;
814 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
815 val |= (HOST_WIDE_INT) (-1) << width;
817 return gen_int_mode (val, mode);
820 return gen_lowpart (mode, x);
823 /* Converting from integer constant into mode is always equivalent to an
825 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
827 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
828 return simplify_gen_subreg (mode, x, oldmode, 0);
831 temp = gen_reg_rtx (mode);
832 convert_move (temp, x, unsignedp);
836 /* STORE_MAX_PIECES is the number of bytes at a time that we can
837 store efficiently. Due to internal GCC limitations, this is
838 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
839 for an immediate constant. */
841 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
843 /* Determine whether the LEN bytes can be moved by using several move
844 instructions. Return nonzero if a call to move_by_pieces should
848 can_move_by_pieces (unsigned HOST_WIDE_INT len,
849 unsigned int align ATTRIBUTE_UNUSED)
851 return MOVE_BY_PIECES_P (len, align);
854 /* Generate several move instructions to copy LEN bytes from block FROM to
855 block TO. (These are MEM rtx's with BLKmode).
857 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
858 used to push FROM to the stack.
860 ALIGN is maximum stack alignment we can assume.
862 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
863 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
867 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
868 unsigned int align, int endp)
870 struct move_by_pieces data;
871 rtx to_addr, from_addr = XEXP (from, 0);
872 unsigned int max_size = MOVE_MAX_PIECES + 1;
873 enum machine_mode mode = VOIDmode, tmode;
874 enum insn_code icode;
876 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
879 data.from_addr = from_addr;
882 to_addr = XEXP (to, 0);
885 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
886 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
888 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
895 #ifdef STACK_GROWS_DOWNWARD
901 data.to_addr = to_addr;
904 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
905 || GET_CODE (from_addr) == POST_INC
906 || GET_CODE (from_addr) == POST_DEC);
908 data.explicit_inc_from = 0;
909 data.explicit_inc_to = 0;
910 if (data.reverse) data.offset = len;
913 /* If copying requires more than two move insns,
914 copy addresses to registers (to make displacements shorter)
915 and use post-increment if available. */
916 if (!(data.autinc_from && data.autinc_to)
917 && move_by_pieces_ninsns (len, align, max_size) > 2)
919 /* Find the mode of the largest move... */
920 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
921 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
922 if (GET_MODE_SIZE (tmode) < max_size)
925 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
927 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
928 data.autinc_from = 1;
929 data.explicit_inc_from = -1;
931 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
933 data.from_addr = copy_addr_to_reg (from_addr);
934 data.autinc_from = 1;
935 data.explicit_inc_from = 1;
937 if (!data.autinc_from && CONSTANT_P (from_addr))
938 data.from_addr = copy_addr_to_reg (from_addr);
939 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
941 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
943 data.explicit_inc_to = -1;
945 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
947 data.to_addr = copy_addr_to_reg (to_addr);
949 data.explicit_inc_to = 1;
951 if (!data.autinc_to && CONSTANT_P (to_addr))
952 data.to_addr = copy_addr_to_reg (to_addr);
955 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
956 if (align >= GET_MODE_ALIGNMENT (tmode))
957 align = GET_MODE_ALIGNMENT (tmode);
960 enum machine_mode xmode;
962 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
964 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
965 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
966 || SLOW_UNALIGNED_ACCESS (tmode, align))
969 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
972 /* First move what we can in the largest integer mode, then go to
973 successively smaller modes. */
977 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
978 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
979 if (GET_MODE_SIZE (tmode) < max_size)
982 if (mode == VOIDmode)
985 icode = mov_optab->handlers[(int) mode].insn_code;
986 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
987 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
989 max_size = GET_MODE_SIZE (mode);
992 /* The code above should have handled everything. */
993 gcc_assert (!data.len);
999 gcc_assert (!data.reverse);
1004 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1005 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1007 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1010 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1017 to1 = adjust_address (data.to, QImode, data.offset);
1025 /* Return number of insns required to move L bytes by pieces.
1026 ALIGN (in bits) is maximum alignment we can assume. */
1028 static unsigned HOST_WIDE_INT
1029 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1030 unsigned int max_size)
1032 unsigned HOST_WIDE_INT n_insns = 0;
1033 enum machine_mode tmode;
1035 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1036 if (align >= GET_MODE_ALIGNMENT (tmode))
1037 align = GET_MODE_ALIGNMENT (tmode);
1040 enum machine_mode tmode, xmode;
1042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1045 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1046 || SLOW_UNALIGNED_ACCESS (tmode, align))
1049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1052 while (max_size > 1)
1054 enum machine_mode mode = VOIDmode;
1055 enum insn_code icode;
1057 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1058 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1059 if (GET_MODE_SIZE (tmode) < max_size)
1062 if (mode == VOIDmode)
1065 icode = mov_optab->handlers[(int) mode].insn_code;
1066 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1067 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1069 max_size = GET_MODE_SIZE (mode);
1076 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1077 with move instructions for mode MODE. GENFUN is the gen_... function
1078 to make a move insn for that mode. DATA has all the other info. */
1081 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1082 struct move_by_pieces *data)
1084 unsigned int size = GET_MODE_SIZE (mode);
1085 rtx to1 = NULL_RTX, from1;
1087 while (data->len >= size)
1090 data->offset -= size;
1094 if (data->autinc_to)
1095 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1098 to1 = adjust_address (data->to, mode, data->offset);
1101 if (data->autinc_from)
1102 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1105 from1 = adjust_address (data->from, mode, data->offset);
1107 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1108 emit_insn (gen_add2_insn (data->to_addr,
1109 GEN_INT (-(HOST_WIDE_INT)size)));
1110 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1111 emit_insn (gen_add2_insn (data->from_addr,
1112 GEN_INT (-(HOST_WIDE_INT)size)));
1115 emit_insn ((*genfun) (to1, from1));
1118 #ifdef PUSH_ROUNDING
1119 emit_single_push_insn (mode, from1, NULL);
1125 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1126 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1127 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1128 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1130 if (! data->reverse)
1131 data->offset += size;
1137 /* Emit code to move a block Y to a block X. This may be done with
1138 string-move instructions, with multiple scalar move instructions,
1139 or with a library call.
1141 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1142 SIZE is an rtx that says how long they are.
1143 ALIGN is the maximum alignment we can assume they have.
1144 METHOD describes what kind of copy this is, and what mechanisms may be used.
1146 Return the address of the new block, if memcpy is called and returns it,
1150 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1158 case BLOCK_OP_NORMAL:
1159 case BLOCK_OP_TAILCALL:
1160 may_use_call = true;
1163 case BLOCK_OP_CALL_PARM:
1164 may_use_call = block_move_libcall_safe_for_call_parm ();
1166 /* Make inhibit_defer_pop nonzero around the library call
1167 to force it to pop the arguments right away. */
1171 case BLOCK_OP_NO_LIBCALL:
1172 may_use_call = false;
1179 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1181 gcc_assert (MEM_P (x));
1182 gcc_assert (MEM_P (y));
1185 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1186 block copy is more efficient for other large modes, e.g. DCmode. */
1187 x = adjust_address (x, BLKmode, 0);
1188 y = adjust_address (y, BLKmode, 0);
1190 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1191 can be incorrect is coming from __builtin_memcpy. */
1192 if (GET_CODE (size) == CONST_INT)
1194 if (INTVAL (size) == 0)
1197 x = shallow_copy_rtx (x);
1198 y = shallow_copy_rtx (y);
1199 set_mem_size (x, size);
1200 set_mem_size (y, size);
1203 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1204 move_by_pieces (x, y, INTVAL (size), align, 0);
1205 else if (emit_block_move_via_movmem (x, y, size, align))
1207 else if (may_use_call)
1208 retval = emit_block_move_via_libcall (x, y, size,
1209 method == BLOCK_OP_TAILCALL);
1211 emit_block_move_via_loop (x, y, size, align);
1213 if (method == BLOCK_OP_CALL_PARM)
1219 /* A subroutine of emit_block_move. Returns true if calling the
1220 block move libcall will not clobber any parameters which may have
1221 already been placed on the stack. */
1224 block_move_libcall_safe_for_call_parm (void)
1226 /* If arguments are pushed on the stack, then they're safe. */
1230 /* If registers go on the stack anyway, any argument is sure to clobber
1231 an outgoing argument. */
1232 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1234 tree fn = emit_block_move_libcall_fn (false);
1236 if (REG_PARM_STACK_SPACE (fn) != 0)
1241 /* If any argument goes in memory, then it might clobber an outgoing
1244 CUMULATIVE_ARGS args_so_far;
1247 fn = emit_block_move_libcall_fn (false);
1248 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1250 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1251 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1253 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1254 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1255 if (!tmp || !REG_P (tmp))
1257 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1259 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1265 /* A subroutine of emit_block_move. Expand a movmem pattern;
1266 return true if successful. */
1269 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1271 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1272 int save_volatile_ok = volatile_ok;
1273 enum machine_mode mode;
1275 /* Since this is a move insn, we don't care about volatility. */
1278 /* Try the most limited insn first, because there's no point
1279 including more than one in the machine description unless
1280 the more limited one has some advantage. */
1282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1283 mode = GET_MODE_WIDER_MODE (mode))
1285 enum insn_code code = movmem_optab[(int) mode];
1286 insn_operand_predicate_fn pred;
1288 if (code != CODE_FOR_nothing
1289 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1290 here because if SIZE is less than the mode mask, as it is
1291 returned by the macro, it will definitely be less than the
1292 actual mode mask. */
1293 && ((GET_CODE (size) == CONST_INT
1294 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1295 <= (GET_MODE_MASK (mode) >> 1)))
1296 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1297 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1298 || (*pred) (x, BLKmode))
1299 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1300 || (*pred) (y, BLKmode))
1301 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1302 || (*pred) (opalign, VOIDmode)))
1305 rtx last = get_last_insn ();
1308 op2 = convert_to_mode (mode, size, 1);
1309 pred = insn_data[(int) code].operand[2].predicate;
1310 if (pred != 0 && ! (*pred) (op2, mode))
1311 op2 = copy_to_mode_reg (mode, op2);
1313 /* ??? When called via emit_block_move_for_call, it'd be
1314 nice if there were some way to inform the backend, so
1315 that it doesn't fail the expansion because it thinks
1316 emitting the libcall would be more efficient. */
1318 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1322 volatile_ok = save_volatile_ok;
1326 delete_insns_since (last);
1330 volatile_ok = save_volatile_ok;
1334 /* A subroutine of emit_block_move. Expand a call to memcpy.
1335 Return the return value from memcpy, 0 otherwise. */
1338 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1340 rtx dst_addr, src_addr;
1341 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1342 enum machine_mode size_mode;
1345 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1346 pseudos. We can then place those new pseudos into a VAR_DECL and
1349 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1350 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1352 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1353 src_addr = convert_memory_address (ptr_mode, src_addr);
1355 dst_tree = make_tree (ptr_type_node, dst_addr);
1356 src_tree = make_tree (ptr_type_node, src_addr);
1358 size_mode = TYPE_MODE (sizetype);
1360 size = convert_to_mode (size_mode, size, 1);
1361 size = copy_to_mode_reg (size_mode, size);
1363 /* It is incorrect to use the libcall calling conventions to call
1364 memcpy in this context. This could be a user call to memcpy and
1365 the user may wish to examine the return value from memcpy. For
1366 targets where libcalls and normal calls have different conventions
1367 for returning pointers, we could end up generating incorrect code. */
1369 size_tree = make_tree (sizetype, size);
1371 fn = emit_block_move_libcall_fn (true);
1372 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1373 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1374 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1376 /* Now we have to build up the CALL_EXPR itself. */
1377 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1378 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1379 call_expr, arg_list, NULL_TREE);
1380 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1382 retval = expand_normal (call_expr);
1387 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1388 for the function we use for block copies. The first time FOR_CALL
1389 is true, we call assemble_external. */
1391 static GTY(()) tree block_move_fn;
1394 init_block_move_fn (const char *asmspec)
1400 fn = get_identifier ("memcpy");
1401 args = build_function_type_list (ptr_type_node, ptr_type_node,
1402 const_ptr_type_node, sizetype,
1405 fn = build_decl (FUNCTION_DECL, fn, args);
1406 DECL_EXTERNAL (fn) = 1;
1407 TREE_PUBLIC (fn) = 1;
1408 DECL_ARTIFICIAL (fn) = 1;
1409 TREE_NOTHROW (fn) = 1;
1410 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1411 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1417 set_user_assembler_name (block_move_fn, asmspec);
1421 emit_block_move_libcall_fn (int for_call)
1423 static bool emitted_extern;
1426 init_block_move_fn (NULL);
1428 if (for_call && !emitted_extern)
1430 emitted_extern = true;
1431 make_decl_rtl (block_move_fn);
1432 assemble_external (block_move_fn);
1435 return block_move_fn;
1438 /* A subroutine of emit_block_move. Copy the data via an explicit
1439 loop. This is used only when libcalls are forbidden. */
1440 /* ??? It'd be nice to copy in hunks larger than QImode. */
1443 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1444 unsigned int align ATTRIBUTE_UNUSED)
1446 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1447 enum machine_mode iter_mode;
1449 iter_mode = GET_MODE (size);
1450 if (iter_mode == VOIDmode)
1451 iter_mode = word_mode;
1453 top_label = gen_label_rtx ();
1454 cmp_label = gen_label_rtx ();
1455 iter = gen_reg_rtx (iter_mode);
1457 emit_move_insn (iter, const0_rtx);
1459 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1460 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1461 do_pending_stack_adjust ();
1463 emit_jump (cmp_label);
1464 emit_label (top_label);
1466 tmp = convert_modes (Pmode, iter_mode, iter, true);
1467 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1468 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1469 x = change_address (x, QImode, x_addr);
1470 y = change_address (y, QImode, y_addr);
1472 emit_move_insn (x, y);
1474 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1475 true, OPTAB_LIB_WIDEN);
1477 emit_move_insn (iter, tmp);
1479 emit_label (cmp_label);
1481 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1485 /* Copy all or part of a value X into registers starting at REGNO.
1486 The number of registers to be filled is NREGS. */
1489 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1492 #ifdef HAVE_load_multiple
1500 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1501 x = validize_mem (force_const_mem (mode, x));
1503 /* See if the machine can do this with a load multiple insn. */
1504 #ifdef HAVE_load_multiple
1505 if (HAVE_load_multiple)
1507 last = get_last_insn ();
1508 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1516 delete_insns_since (last);
1520 for (i = 0; i < nregs; i++)
1521 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1522 operand_subword_force (x, i, mode));
1525 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_from_reg (int regno, rtx x, int nregs)
1536 /* See if the machine can do this with a store multiple insn. */
1537 #ifdef HAVE_store_multiple
1538 if (HAVE_store_multiple)
1540 rtx last = get_last_insn ();
1541 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1549 delete_insns_since (last);
1553 for (i = 0; i < nregs; i++)
1555 rtx tem = operand_subword (x, i, 1, BLKmode);
1559 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1563 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1564 ORIG, where ORIG is a non-consecutive group of registers represented by
1565 a PARALLEL. The clone is identical to the original except in that the
1566 original set of registers is replaced by a new set of pseudo registers.
1567 The new set has the same modes as the original set. */
1570 gen_group_rtx (rtx orig)
1575 gcc_assert (GET_CODE (orig) == PARALLEL);
1577 length = XVECLEN (orig, 0);
1578 tmps = alloca (sizeof (rtx) * length);
1580 /* Skip a NULL entry in first slot. */
1581 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1586 for (; i < length; i++)
1588 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1589 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1591 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1594 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1597 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1598 except that values are placed in TMPS[i], and must later be moved
1599 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1602 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1606 enum machine_mode m = GET_MODE (orig_src);
1608 gcc_assert (GET_CODE (dst) == PARALLEL);
1611 && !SCALAR_INT_MODE_P (m)
1612 && !MEM_P (orig_src)
1613 && GET_CODE (orig_src) != CONCAT)
1615 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1616 if (imode == BLKmode)
1617 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1619 src = gen_reg_rtx (imode);
1620 if (imode != BLKmode)
1621 src = gen_lowpart (GET_MODE (orig_src), src);
1622 emit_move_insn (src, orig_src);
1623 /* ...and back again. */
1624 if (imode != BLKmode)
1625 src = gen_lowpart (imode, src);
1626 emit_group_load_1 (tmps, dst, src, type, ssize);
1630 /* Check for a NULL entry, used to indicate that the parameter goes
1631 both on the stack and in registers. */
1632 if (XEXP (XVECEXP (dst, 0, 0), 0))
1637 /* Process the pieces. */
1638 for (i = start; i < XVECLEN (dst, 0); i++)
1640 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1641 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1642 unsigned int bytelen = GET_MODE_SIZE (mode);
1645 /* Handle trailing fragments that run over the size of the struct. */
1646 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1648 /* Arrange to shift the fragment to where it belongs.
1649 extract_bit_field loads to the lsb of the reg. */
1651 #ifdef BLOCK_REG_PADDING
1652 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1653 == (BYTES_BIG_ENDIAN ? upward : downward)
1658 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1659 bytelen = ssize - bytepos;
1660 gcc_assert (bytelen > 0);
1663 /* If we won't be loading directly from memory, protect the real source
1664 from strange tricks we might play; but make sure that the source can
1665 be loaded directly into the destination. */
1667 if (!MEM_P (orig_src)
1668 && (!CONSTANT_P (orig_src)
1669 || (GET_MODE (orig_src) != mode
1670 && GET_MODE (orig_src) != VOIDmode)))
1672 if (GET_MODE (orig_src) == VOIDmode)
1673 src = gen_reg_rtx (mode);
1675 src = gen_reg_rtx (GET_MODE (orig_src));
1677 emit_move_insn (src, orig_src);
1680 /* Optimize the access just a bit. */
1682 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1683 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1684 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1685 && bytelen == GET_MODE_SIZE (mode))
1687 tmps[i] = gen_reg_rtx (mode);
1688 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1690 else if (COMPLEX_MODE_P (mode)
1691 && GET_MODE (src) == mode
1692 && bytelen == GET_MODE_SIZE (mode))
1693 /* Let emit_move_complex do the bulk of the work. */
1695 else if (GET_CODE (src) == CONCAT)
1697 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1698 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1700 if ((bytepos == 0 && bytelen == slen0)
1701 || (bytepos != 0 && bytepos + bytelen <= slen))
1703 /* The following assumes that the concatenated objects all
1704 have the same size. In this case, a simple calculation
1705 can be used to determine the object and the bit field
1707 tmps[i] = XEXP (src, bytepos / slen0);
1708 if (! CONSTANT_P (tmps[i])
1709 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1710 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1711 (bytepos % slen0) * BITS_PER_UNIT,
1712 1, NULL_RTX, mode, mode);
1718 gcc_assert (!bytepos);
1719 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1720 emit_move_insn (mem, src);
1721 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1722 0, 1, NULL_RTX, mode, mode);
1725 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1726 SIMD register, which is currently broken. While we get GCC
1727 to emit proper RTL for these cases, let's dump to memory. */
1728 else if (VECTOR_MODE_P (GET_MODE (dst))
1731 int slen = GET_MODE_SIZE (GET_MODE (src));
1734 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1735 emit_move_insn (mem, src);
1736 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1738 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1739 && XVECLEN (dst, 0) > 1)
1740 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1741 else if (CONSTANT_P (src)
1742 || (REG_P (src) && GET_MODE (src) == mode))
1745 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1746 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1750 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1751 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1755 /* Emit code to move a block SRC of type TYPE to a block DST,
1756 where DST is non-consecutive registers represented by a PARALLEL.
1757 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1761 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1766 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1767 emit_group_load_1 (tmps, dst, src, type, ssize);
1769 /* Copy the extracted pieces into the proper (probable) hard regs. */
1770 for (i = 0; i < XVECLEN (dst, 0); i++)
1772 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1775 emit_move_insn (d, tmps[i]);
1779 /* Similar, but load SRC into new pseudos in a format that looks like
1780 PARALLEL. This can later be fed to emit_group_move to get things
1781 in the right place. */
1784 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1789 vec = rtvec_alloc (XVECLEN (parallel, 0));
1790 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1792 /* Convert the vector to look just like the original PARALLEL, except
1793 with the computed values. */
1794 for (i = 0; i < XVECLEN (parallel, 0); i++)
1796 rtx e = XVECEXP (parallel, 0, i);
1797 rtx d = XEXP (e, 0);
1801 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1802 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1804 RTVEC_ELT (vec, i) = e;
1807 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1810 /* Emit code to move a block SRC to block DST, where SRC and DST are
1811 non-consecutive groups of registers, each represented by a PARALLEL. */
1814 emit_group_move (rtx dst, rtx src)
1818 gcc_assert (GET_CODE (src) == PARALLEL
1819 && GET_CODE (dst) == PARALLEL
1820 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1822 /* Skip first entry if NULL. */
1823 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1824 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1825 XEXP (XVECEXP (src, 0, i), 0));
1828 /* Move a group of registers represented by a PARALLEL into pseudos. */
1831 emit_group_move_into_temps (rtx src)
1833 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1836 for (i = 0; i < XVECLEN (src, 0); i++)
1838 rtx e = XVECEXP (src, 0, i);
1839 rtx d = XEXP (e, 0);
1842 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1843 RTVEC_ELT (vec, i) = e;
1846 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1849 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1850 where SRC is non-consecutive registers represented by a PARALLEL.
1851 SSIZE represents the total size of block ORIG_DST, or -1 if not
1855 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1858 int start, finish, i;
1859 enum machine_mode m = GET_MODE (orig_dst);
1861 gcc_assert (GET_CODE (src) == PARALLEL);
1863 if (!SCALAR_INT_MODE_P (m)
1864 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1866 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1867 if (imode == BLKmode)
1868 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1870 dst = gen_reg_rtx (imode);
1871 emit_group_store (dst, src, type, ssize);
1872 if (imode != BLKmode)
1873 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1874 emit_move_insn (orig_dst, dst);
1878 /* Check for a NULL entry, used to indicate that the parameter goes
1879 both on the stack and in registers. */
1880 if (XEXP (XVECEXP (src, 0, 0), 0))
1884 finish = XVECLEN (src, 0);
1886 tmps = alloca (sizeof (rtx) * finish);
1888 /* Copy the (probable) hard regs into pseudos. */
1889 for (i = start; i < finish; i++)
1891 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1892 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1894 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1895 emit_move_insn (tmps[i], reg);
1901 /* If we won't be storing directly into memory, protect the real destination
1902 from strange tricks we might play. */
1904 if (GET_CODE (dst) == PARALLEL)
1908 /* We can get a PARALLEL dst if there is a conditional expression in
1909 a return statement. In that case, the dst and src are the same,
1910 so no action is necessary. */
1911 if (rtx_equal_p (dst, src))
1914 /* It is unclear if we can ever reach here, but we may as well handle
1915 it. Allocate a temporary, and split this into a store/load to/from
1918 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1919 emit_group_store (temp, src, type, ssize);
1920 emit_group_load (dst, temp, type, ssize);
1923 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1925 enum machine_mode outer = GET_MODE (dst);
1926 enum machine_mode inner;
1927 HOST_WIDE_INT bytepos;
1931 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1932 dst = gen_reg_rtx (outer);
1934 /* Make life a bit easier for combine. */
1935 /* If the first element of the vector is the low part
1936 of the destination mode, use a paradoxical subreg to
1937 initialize the destination. */
1940 inner = GET_MODE (tmps[start]);
1941 bytepos = subreg_lowpart_offset (inner, outer);
1942 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1944 temp = simplify_gen_subreg (outer, tmps[start],
1948 emit_move_insn (dst, temp);
1955 /* If the first element wasn't the low part, try the last. */
1957 && start < finish - 1)
1959 inner = GET_MODE (tmps[finish - 1]);
1960 bytepos = subreg_lowpart_offset (inner, outer);
1961 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1963 temp = simplify_gen_subreg (outer, tmps[finish - 1],
1967 emit_move_insn (dst, temp);
1974 /* Otherwise, simply initialize the result to zero. */
1976 emit_move_insn (dst, CONST0_RTX (outer));
1979 /* Process the pieces. */
1980 for (i = start; i < finish; i++)
1982 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1983 enum machine_mode mode = GET_MODE (tmps[i]);
1984 unsigned int bytelen = GET_MODE_SIZE (mode);
1987 /* Handle trailing fragments that run over the size of the struct. */
1988 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1990 /* store_bit_field always takes its value from the lsb.
1991 Move the fragment to the lsb if it's not already there. */
1993 #ifdef BLOCK_REG_PADDING
1994 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1995 == (BYTES_BIG_ENDIAN ? upward : downward)
2001 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2002 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2003 build_int_cst (NULL_TREE, shift),
2006 bytelen = ssize - bytepos;
2009 if (GET_CODE (dst) == CONCAT)
2011 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2012 dest = XEXP (dst, 0);
2013 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2015 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2016 dest = XEXP (dst, 1);
2020 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2021 dest = assign_stack_temp (GET_MODE (dest),
2022 GET_MODE_SIZE (GET_MODE (dest)), 0);
2023 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2030 /* Optimize the access just a bit. */
2032 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2033 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2034 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2035 && bytelen == GET_MODE_SIZE (mode))
2036 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2038 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2042 /* Copy from the pseudo into the (probable) hard reg. */
2043 if (orig_dst != dst)
2044 emit_move_insn (orig_dst, dst);
2047 /* Generate code to copy a BLKmode object of TYPE out of a
2048 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2049 is null, a stack temporary is created. TGTBLK is returned.
2051 The purpose of this routine is to handle functions that return
2052 BLKmode structures in registers. Some machines (the PA for example)
2053 want to return all small structures in registers regardless of the
2054 structure's alignment. */
2057 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2059 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2060 rtx src = NULL, dst = NULL;
2061 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2062 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2066 tgtblk = assign_temp (build_qualified_type (type,
2068 | TYPE_QUAL_CONST)),
2070 preserve_temp_slots (tgtblk);
2073 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2074 into a new pseudo which is a full word. */
2076 if (GET_MODE (srcreg) != BLKmode
2077 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2078 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2080 /* If the structure doesn't take up a whole number of words, see whether
2081 SRCREG is padded on the left or on the right. If it's on the left,
2082 set PADDING_CORRECTION to the number of bits to skip.
2084 In most ABIs, the structure will be returned at the least end of
2085 the register, which translates to right padding on little-endian
2086 targets and left padding on big-endian targets. The opposite
2087 holds if the structure is returned at the most significant
2088 end of the register. */
2089 if (bytes % UNITS_PER_WORD != 0
2090 && (targetm.calls.return_in_msb (type)
2092 : BYTES_BIG_ENDIAN))
2094 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2096 /* Copy the structure BITSIZE bites at a time.
2098 We could probably emit more efficient code for machines which do not use
2099 strict alignment, but it doesn't seem worth the effort at the current
2101 for (bitpos = 0, xbitpos = padding_correction;
2102 bitpos < bytes * BITS_PER_UNIT;
2103 bitpos += bitsize, xbitpos += bitsize)
2105 /* We need a new source operand each time xbitpos is on a
2106 word boundary and when xbitpos == padding_correction
2107 (the first time through). */
2108 if (xbitpos % BITS_PER_WORD == 0
2109 || xbitpos == padding_correction)
2110 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2113 /* We need a new destination operand each time bitpos is on
2115 if (bitpos % BITS_PER_WORD == 0)
2116 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2118 /* Use xbitpos for the source extraction (right justified) and
2119 xbitpos for the destination store (left justified). */
2120 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2121 extract_bit_field (src, bitsize,
2122 xbitpos % BITS_PER_WORD, 1,
2123 NULL_RTX, word_mode, word_mode));
2129 /* Add a USE expression for REG to the (possibly empty) list pointed
2130 to by CALL_FUSAGE. REG must denote a hard register. */
2133 use_reg (rtx *call_fusage, rtx reg)
2135 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2138 = gen_rtx_EXPR_LIST (VOIDmode,
2139 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2142 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2143 starting at REGNO. All of these registers must be hard registers. */
2146 use_regs (rtx *call_fusage, int regno, int nregs)
2150 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2152 for (i = 0; i < nregs; i++)
2153 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2156 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2157 PARALLEL REGS. This is for calls that pass values in multiple
2158 non-contiguous locations. The Irix 6 ABI has examples of this. */
2161 use_group_regs (rtx *call_fusage, rtx regs)
2165 for (i = 0; i < XVECLEN (regs, 0); i++)
2167 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2169 /* A NULL entry means the parameter goes both on the stack and in
2170 registers. This can also be a MEM for targets that pass values
2171 partially on the stack and partially in registers. */
2172 if (reg != 0 && REG_P (reg))
2173 use_reg (call_fusage, reg);
2178 /* Determine whether the LEN bytes generated by CONSTFUN can be
2179 stored to memory using several move instructions. CONSTFUNDATA is
2180 a pointer which will be passed as argument in every CONSTFUN call.
2181 ALIGN is maximum alignment we can assume. Return nonzero if a
2182 call to store_by_pieces should succeed. */
2185 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2186 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2187 void *constfundata, unsigned int align)
2189 unsigned HOST_WIDE_INT l;
2190 unsigned int max_size;
2191 HOST_WIDE_INT offset = 0;
2192 enum machine_mode mode, tmode;
2193 enum insn_code icode;
2200 if (! STORE_BY_PIECES_P (len, align))
2203 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2204 if (align >= GET_MODE_ALIGNMENT (tmode))
2205 align = GET_MODE_ALIGNMENT (tmode);
2208 enum machine_mode xmode;
2210 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2212 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2213 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2214 || SLOW_UNALIGNED_ACCESS (tmode, align))
2217 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2220 /* We would first store what we can in the largest integer mode, then go to
2221 successively smaller modes. */
2224 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2229 max_size = STORE_MAX_PIECES + 1;
2230 while (max_size > 1)
2232 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2233 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2234 if (GET_MODE_SIZE (tmode) < max_size)
2237 if (mode == VOIDmode)
2240 icode = mov_optab->handlers[(int) mode].insn_code;
2241 if (icode != CODE_FOR_nothing
2242 && align >= GET_MODE_ALIGNMENT (mode))
2244 unsigned int size = GET_MODE_SIZE (mode);
2251 cst = (*constfun) (constfundata, offset, mode);
2252 if (!LEGITIMATE_CONSTANT_P (cst))
2262 max_size = GET_MODE_SIZE (mode);
2265 /* The code above should have handled everything. */
2272 /* Generate several move instructions to store LEN bytes generated by
2273 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2274 pointer which will be passed as argument in every CONSTFUN call.
2275 ALIGN is maximum alignment we can assume.
2276 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2277 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2281 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2282 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2283 void *constfundata, unsigned int align, int endp)
2285 struct store_by_pieces data;
2289 gcc_assert (endp != 2);
2293 gcc_assert (STORE_BY_PIECES_P (len, align));
2294 data.constfun = constfun;
2295 data.constfundata = constfundata;
2298 store_by_pieces_1 (&data, align);
2303 gcc_assert (!data.reverse);
2308 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2309 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2311 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2314 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2321 to1 = adjust_address (data.to, QImode, data.offset);
2329 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2330 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2333 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2335 struct store_by_pieces data;
2340 data.constfun = clear_by_pieces_1;
2341 data.constfundata = NULL;
2344 store_by_pieces_1 (&data, align);
2347 /* Callback routine for clear_by_pieces.
2348 Return const0_rtx unconditionally. */
2351 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2352 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2353 enum machine_mode mode ATTRIBUTE_UNUSED)
2358 /* Subroutine of clear_by_pieces and store_by_pieces.
2359 Generate several move instructions to store LEN bytes of block TO. (A MEM
2360 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2363 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2364 unsigned int align ATTRIBUTE_UNUSED)
2366 rtx to_addr = XEXP (data->to, 0);
2367 unsigned int max_size = STORE_MAX_PIECES + 1;
2368 enum machine_mode mode = VOIDmode, tmode;
2369 enum insn_code icode;
2372 data->to_addr = to_addr;
2374 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2375 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2377 data->explicit_inc_to = 0;
2379 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2381 data->offset = data->len;
2383 /* If storing requires more than two move insns,
2384 copy addresses to registers (to make displacements shorter)
2385 and use post-increment if available. */
2386 if (!data->autinc_to
2387 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2389 /* Determine the main mode we'll be using. */
2390 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2391 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2392 if (GET_MODE_SIZE (tmode) < max_size)
2395 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2397 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2398 data->autinc_to = 1;
2399 data->explicit_inc_to = -1;
2402 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2403 && ! data->autinc_to)
2405 data->to_addr = copy_addr_to_reg (to_addr);
2406 data->autinc_to = 1;
2407 data->explicit_inc_to = 1;
2410 if ( !data->autinc_to && CONSTANT_P (to_addr))
2411 data->to_addr = copy_addr_to_reg (to_addr);
2414 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2415 if (align >= GET_MODE_ALIGNMENT (tmode))
2416 align = GET_MODE_ALIGNMENT (tmode);
2419 enum machine_mode xmode;
2421 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2423 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2424 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2425 || SLOW_UNALIGNED_ACCESS (tmode, align))
2428 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2431 /* First store what we can in the largest integer mode, then go to
2432 successively smaller modes. */
2434 while (max_size > 1)
2436 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2437 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) < max_size)
2441 if (mode == VOIDmode)
2444 icode = mov_optab->handlers[(int) mode].insn_code;
2445 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2446 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2448 max_size = GET_MODE_SIZE (mode);
2451 /* The code above should have handled everything. */
2452 gcc_assert (!data->len);
2455 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2456 with move instructions for mode MODE. GENFUN is the gen_... function
2457 to make a move insn for that mode. DATA has all the other info. */
2460 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2461 struct store_by_pieces *data)
2463 unsigned int size = GET_MODE_SIZE (mode);
2466 while (data->len >= size)
2469 data->offset -= size;
2471 if (data->autinc_to)
2472 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2475 to1 = adjust_address (data->to, mode, data->offset);
2477 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2478 emit_insn (gen_add2_insn (data->to_addr,
2479 GEN_INT (-(HOST_WIDE_INT) size)));
2481 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2482 emit_insn ((*genfun) (to1, cst));
2484 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2485 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2487 if (! data->reverse)
2488 data->offset += size;
2494 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2495 its length in bytes. */
2498 clear_storage (rtx object, rtx size, enum block_op_methods method)
2500 enum machine_mode mode = GET_MODE (object);
2503 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2505 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2506 just move a zero. Otherwise, do this a piece at a time. */
2508 && GET_CODE (size) == CONST_INT
2509 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2511 rtx zero = CONST0_RTX (mode);
2514 emit_move_insn (object, zero);
2518 if (COMPLEX_MODE_P (mode))
2520 zero = CONST0_RTX (GET_MODE_INNER (mode));
2523 write_complex_part (object, zero, 0);
2524 write_complex_part (object, zero, 1);
2530 if (size == const0_rtx)
2533 align = MEM_ALIGN (object);
2535 if (GET_CODE (size) == CONST_INT
2536 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2537 clear_by_pieces (object, INTVAL (size), align);
2538 else if (set_storage_via_setmem (object, size, const0_rtx, align))
2541 return set_storage_via_libcall (object, size, const0_rtx,
2542 method == BLOCK_OP_TAILCALL);
2547 /* A subroutine of clear_storage. Expand a call to memset.
2548 Return the return value of memset, 0 otherwise. */
2551 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2553 tree call_expr, arg_list, fn, object_tree, size_tree, val_tree;
2554 enum machine_mode size_mode;
2557 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2558 place those into new pseudos into a VAR_DECL and use them later. */
2560 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2562 size_mode = TYPE_MODE (sizetype);
2563 size = convert_to_mode (size_mode, size, 1);
2564 size = copy_to_mode_reg (size_mode, size);
2566 /* It is incorrect to use the libcall calling conventions to call
2567 memset in this context. This could be a user call to memset and
2568 the user may wish to examine the return value from memset. For
2569 targets where libcalls and normal calls have different conventions
2570 for returning pointers, we could end up generating incorrect code. */
2572 object_tree = make_tree (ptr_type_node, object);
2573 if (GET_CODE (val) != CONST_INT)
2574 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2575 size_tree = make_tree (sizetype, size);
2576 val_tree = make_tree (integer_type_node, val);
2578 fn = clear_storage_libcall_fn (true);
2579 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2580 arg_list = tree_cons (NULL_TREE, val_tree, arg_list);
2581 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2583 /* Now we have to build up the CALL_EXPR itself. */
2584 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2585 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2586 call_expr, arg_list, NULL_TREE);
2587 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2589 retval = expand_normal (call_expr);
2594 /* A subroutine of set_storage_via_libcall. Create the tree node
2595 for the function we use for block clears. The first time FOR_CALL
2596 is true, we call assemble_external. */
2598 static GTY(()) tree block_clear_fn;
2601 init_block_clear_fn (const char *asmspec)
2603 if (!block_clear_fn)
2607 fn = get_identifier ("memset");
2608 args = build_function_type_list (ptr_type_node, ptr_type_node,
2609 integer_type_node, sizetype,
2612 fn = build_decl (FUNCTION_DECL, fn, args);
2613 DECL_EXTERNAL (fn) = 1;
2614 TREE_PUBLIC (fn) = 1;
2615 DECL_ARTIFICIAL (fn) = 1;
2616 TREE_NOTHROW (fn) = 1;
2617 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2618 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2620 block_clear_fn = fn;
2624 set_user_assembler_name (block_clear_fn, asmspec);
2628 clear_storage_libcall_fn (int for_call)
2630 static bool emitted_extern;
2632 if (!block_clear_fn)
2633 init_block_clear_fn (NULL);
2635 if (for_call && !emitted_extern)
2637 emitted_extern = true;
2638 make_decl_rtl (block_clear_fn);
2639 assemble_external (block_clear_fn);
2642 return block_clear_fn;
2645 /* Expand a setmem pattern; return true if successful. */
2648 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align)
2650 /* Try the most limited insn first, because there's no point
2651 including more than one in the machine description unless
2652 the more limited one has some advantage. */
2654 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2655 enum machine_mode mode;
2657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2658 mode = GET_MODE_WIDER_MODE (mode))
2660 enum insn_code code = setmem_optab[(int) mode];
2661 insn_operand_predicate_fn pred;
2663 if (code != CODE_FOR_nothing
2664 /* We don't need MODE to be narrower than
2665 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2666 the mode mask, as it is returned by the macro, it will
2667 definitely be less than the actual mode mask. */
2668 && ((GET_CODE (size) == CONST_INT
2669 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2670 <= (GET_MODE_MASK (mode) >> 1)))
2671 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2672 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2673 || (*pred) (object, BLKmode))
2674 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2675 || (*pred) (opalign, VOIDmode)))
2678 enum machine_mode char_mode;
2679 rtx last = get_last_insn ();
2682 opsize = convert_to_mode (mode, size, 1);
2683 pred = insn_data[(int) code].operand[1].predicate;
2684 if (pred != 0 && ! (*pred) (opsize, mode))
2685 opsize = copy_to_mode_reg (mode, opsize);
2688 char_mode = insn_data[(int) code].operand[2].mode;
2689 if (char_mode != VOIDmode)
2691 opchar = convert_to_mode (char_mode, opchar, 1);
2692 pred = insn_data[(int) code].operand[2].predicate;
2693 if (pred != 0 && ! (*pred) (opchar, char_mode))
2694 opchar = copy_to_mode_reg (char_mode, opchar);
2697 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2704 delete_insns_since (last);
2712 /* Write to one of the components of the complex value CPLX. Write VAL to
2713 the real part if IMAG_P is false, and the imaginary part if its true. */
2716 write_complex_part (rtx cplx, rtx val, bool imag_p)
2718 enum machine_mode cmode;
2719 enum machine_mode imode;
2722 if (GET_CODE (cplx) == CONCAT)
2724 emit_move_insn (XEXP (cplx, imag_p), val);
2728 cmode = GET_MODE (cplx);
2729 imode = GET_MODE_INNER (cmode);
2730 ibitsize = GET_MODE_BITSIZE (imode);
2732 /* For MEMs simplify_gen_subreg may generate an invalid new address
2733 because, e.g., the original address is considered mode-dependent
2734 by the target, which restricts simplify_subreg from invoking
2735 adjust_address_nv. Instead of preparing fallback support for an
2736 invalid address, we call adjust_address_nv directly. */
2739 emit_move_insn (adjust_address_nv (cplx, imode,
2740 imag_p ? GET_MODE_SIZE (imode) : 0),
2745 /* If the sub-object is at least word sized, then we know that subregging
2746 will work. This special case is important, since store_bit_field
2747 wants to operate on integer modes, and there's rarely an OImode to
2748 correspond to TCmode. */
2749 if (ibitsize >= BITS_PER_WORD
2750 /* For hard regs we have exact predicates. Assume we can split
2751 the original object if it spans an even number of hard regs.
2752 This special case is important for SCmode on 64-bit platforms
2753 where the natural size of floating-point regs is 32-bit. */
2755 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2756 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2758 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2759 imag_p ? GET_MODE_SIZE (imode) : 0);
2762 emit_move_insn (part, val);
2766 /* simplify_gen_subreg may fail for sub-word MEMs. */
2767 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2770 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2773 /* Extract one of the components of the complex value CPLX. Extract the
2774 real part if IMAG_P is false, and the imaginary part if it's true. */
2777 read_complex_part (rtx cplx, bool imag_p)
2779 enum machine_mode cmode, imode;
2782 if (GET_CODE (cplx) == CONCAT)
2783 return XEXP (cplx, imag_p);
2785 cmode = GET_MODE (cplx);
2786 imode = GET_MODE_INNER (cmode);
2787 ibitsize = GET_MODE_BITSIZE (imode);
2789 /* Special case reads from complex constants that got spilled to memory. */
2790 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2792 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2793 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2795 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2796 if (CONSTANT_CLASS_P (part))
2797 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2801 /* For MEMs simplify_gen_subreg may generate an invalid new address
2802 because, e.g., the original address is considered mode-dependent
2803 by the target, which restricts simplify_subreg from invoking
2804 adjust_address_nv. Instead of preparing fallback support for an
2805 invalid address, we call adjust_address_nv directly. */
2807 return adjust_address_nv (cplx, imode,
2808 imag_p ? GET_MODE_SIZE (imode) : 0);
2810 /* If the sub-object is at least word sized, then we know that subregging
2811 will work. This special case is important, since extract_bit_field
2812 wants to operate on integer modes, and there's rarely an OImode to
2813 correspond to TCmode. */
2814 if (ibitsize >= BITS_PER_WORD
2815 /* For hard regs we have exact predicates. Assume we can split
2816 the original object if it spans an even number of hard regs.
2817 This special case is important for SCmode on 64-bit platforms
2818 where the natural size of floating-point regs is 32-bit. */
2820 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2821 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2823 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2824 imag_p ? GET_MODE_SIZE (imode) : 0);
2828 /* simplify_gen_subreg may fail for sub-word MEMs. */
2829 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2832 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2833 true, NULL_RTX, imode, imode);
2836 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2837 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2838 represented in NEW_MODE. If FORCE is true, this will never happen, as
2839 we'll force-create a SUBREG if needed. */
2842 emit_move_change_mode (enum machine_mode new_mode,
2843 enum machine_mode old_mode, rtx x, bool force)
2849 /* We don't have to worry about changing the address since the
2850 size in bytes is supposed to be the same. */
2851 if (reload_in_progress)
2853 /* Copy the MEM to change the mode and move any
2854 substitutions from the old MEM to the new one. */
2855 ret = adjust_address_nv (x, new_mode, 0);
2856 copy_replacements (x, ret);
2859 ret = adjust_address (x, new_mode, 0);
2863 /* Note that we do want simplify_subreg's behavior of validating
2864 that the new mode is ok for a hard register. If we were to use
2865 simplify_gen_subreg, we would create the subreg, but would
2866 probably run into the target not being able to implement it. */
2867 /* Except, of course, when FORCE is true, when this is exactly what
2868 we want. Which is needed for CCmodes on some targets. */
2870 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2872 ret = simplify_subreg (new_mode, x, old_mode, 0);
2878 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2879 an integer mode of the same size as MODE. Returns the instruction
2880 emitted, or NULL if such a move could not be generated. */
2883 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2885 enum machine_mode imode;
2886 enum insn_code code;
2888 /* There must exist a mode of the exact size we require. */
2889 imode = int_mode_for_mode (mode);
2890 if (imode == BLKmode)
2893 /* The target must support moves in this mode. */
2894 code = mov_optab->handlers[imode].insn_code;
2895 if (code == CODE_FOR_nothing)
2898 x = emit_move_change_mode (imode, mode, x, force);
2901 y = emit_move_change_mode (imode, mode, y, force);
2904 return emit_insn (GEN_FCN (code) (x, y));
2907 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2908 Return an equivalent MEM that does not use an auto-increment. */
2911 emit_move_resolve_push (enum machine_mode mode, rtx x)
2913 enum rtx_code code = GET_CODE (XEXP (x, 0));
2914 HOST_WIDE_INT adjust;
2917 adjust = GET_MODE_SIZE (mode);
2918 #ifdef PUSH_ROUNDING
2919 adjust = PUSH_ROUNDING (adjust);
2921 if (code == PRE_DEC || code == POST_DEC)
2923 else if (code == PRE_MODIFY || code == POST_MODIFY)
2925 rtx expr = XEXP (XEXP (x, 0), 1);
2928 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
2929 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
2930 val = INTVAL (XEXP (expr, 1));
2931 if (GET_CODE (expr) == MINUS)
2933 gcc_assert (adjust == val || adjust == -val);
2937 /* Do not use anti_adjust_stack, since we don't want to update
2938 stack_pointer_delta. */
2939 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2940 GEN_INT (adjust), stack_pointer_rtx,
2941 0, OPTAB_LIB_WIDEN);
2942 if (temp != stack_pointer_rtx)
2943 emit_move_insn (stack_pointer_rtx, temp);
2950 temp = stack_pointer_rtx;
2955 temp = plus_constant (stack_pointer_rtx, -adjust);
2961 return replace_equiv_address (x, temp);
2964 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2965 X is known to satisfy push_operand, and MODE is known to be complex.
2966 Returns the last instruction emitted. */
2969 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2971 enum machine_mode submode = GET_MODE_INNER (mode);
2974 #ifdef PUSH_ROUNDING
2975 unsigned int submodesize = GET_MODE_SIZE (submode);
2977 /* In case we output to the stack, but the size is smaller than the
2978 machine can push exactly, we need to use move instructions. */
2979 if (PUSH_ROUNDING (submodesize) != submodesize)
2981 x = emit_move_resolve_push (mode, x);
2982 return emit_move_insn (x, y);
2986 /* Note that the real part always precedes the imag part in memory
2987 regardless of machine's endianness. */
2988 switch (GET_CODE (XEXP (x, 0)))
3002 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3003 read_complex_part (y, imag_first));
3004 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3005 read_complex_part (y, !imag_first));
3008 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3009 MODE is known to be complex. Returns the last instruction emitted. */
3012 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3016 /* Need to take special care for pushes, to maintain proper ordering
3017 of the data, and possibly extra padding. */
3018 if (push_operand (x, mode))
3019 return emit_move_complex_push (mode, x, y);
3021 /* See if we can coerce the target into moving both values at once. */
3023 /* Move floating point as parts. */
3024 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3025 && mov_optab->handlers[GET_MODE_INNER (mode)].insn_code != CODE_FOR_nothing)
3027 /* Not possible if the values are inherently not adjacent. */
3028 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3030 /* Is possible if both are registers (or subregs of registers). */
3031 else if (register_operand (x, mode) && register_operand (y, mode))
3033 /* If one of the operands is a memory, and alignment constraints
3034 are friendly enough, we may be able to do combined memory operations.
3035 We do not attempt this if Y is a constant because that combination is
3036 usually better with the by-parts thing below. */
3037 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3038 && (!STRICT_ALIGNMENT
3039 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3048 /* For memory to memory moves, optimal behavior can be had with the
3049 existing block move logic. */
3050 if (MEM_P (x) && MEM_P (y))
3052 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3053 BLOCK_OP_NO_LIBCALL);
3054 return get_last_insn ();
3057 ret = emit_move_via_integer (mode, x, y, true);
3062 /* Show the output dies here. This is necessary for SUBREGs
3063 of pseudos since we cannot track their lifetimes correctly;
3064 hard regs shouldn't appear here except as return values. */
3065 if (!reload_completed && !reload_in_progress
3066 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3067 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 write_complex_part (x, read_complex_part (y, false), false);
3070 write_complex_part (x, read_complex_part (y, true), true);
3071 return get_last_insn ();
3074 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3075 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3078 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3082 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3085 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
3086 if (code != CODE_FOR_nothing)
3088 x = emit_move_change_mode (CCmode, mode, x, true);
3089 y = emit_move_change_mode (CCmode, mode, y, true);
3090 return emit_insn (GEN_FCN (code) (x, y));
3094 /* Otherwise, find the MODE_INT mode of the same width. */
3095 ret = emit_move_via_integer (mode, x, y, false);
3096 gcc_assert (ret != NULL);
3100 /* Return true if word I of OP lies entirely in the
3101 undefined bits of a paradoxical subreg. */
3104 undefined_operand_subword_p (rtx op, int i)
3106 enum machine_mode innermode, innermostmode;
3108 if (GET_CODE (op) != SUBREG)
3110 innermode = GET_MODE (op);
3111 innermostmode = GET_MODE (SUBREG_REG (op));
3112 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3113 /* The SUBREG_BYTE represents offset, as if the value were stored in
3114 memory, except for a paradoxical subreg where we define
3115 SUBREG_BYTE to be 0; undo this exception as in
3117 if (SUBREG_BYTE (op) == 0
3118 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3120 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3121 if (WORDS_BIG_ENDIAN)
3122 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3123 if (BYTES_BIG_ENDIAN)
3124 offset += difference % UNITS_PER_WORD;
3126 if (offset >= GET_MODE_SIZE (innermostmode)
3127 || offset <= -GET_MODE_SIZE (word_mode))
3132 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3133 MODE is any multi-word or full-word mode that lacks a move_insn
3134 pattern. Note that you will get better code if you define such
3135 patterns, even if they must turn into multiple assembler instructions. */
3138 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3145 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3147 /* If X is a push on the stack, do the push now and replace
3148 X with a reference to the stack pointer. */
3149 if (push_operand (x, mode))
3150 x = emit_move_resolve_push (mode, x);
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && MEM_P (x)
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3156 x = replace_equiv_address_nv (x, inner);
3157 if (reload_in_progress && MEM_P (y)
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3159 y = replace_equiv_address_nv (y, inner);
3163 need_clobber = false;
3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3168 rtx xpart = operand_subword (x, i, 1, mode);
3171 /* Do not generate code for a move if it would come entirely
3172 from the undefined bits of a paradoxical subreg. */
3173 if (undefined_operand_subword_p (y, i))
3176 ypart = operand_subword (y, i, 1, mode);
3178 /* If we can't get a part of Y, put Y into memory if it is a
3179 constant. Otherwise, force it into a register. Then we must
3180 be able to get a part of Y. */
3181 if (ypart == 0 && CONSTANT_P (y))
3183 y = use_anchored_address (force_const_mem (mode, y));
3184 ypart = operand_subword (y, i, 1, mode);
3186 else if (ypart == 0)
3187 ypart = operand_subword_force (y, i, mode);
3189 gcc_assert (xpart && ypart);
3191 need_clobber |= (GET_CODE (xpart) == SUBREG);
3193 last_insn = emit_move_insn (xpart, ypart);
3199 /* Show the output dies here. This is necessary for SUBREGs
3200 of pseudos since we cannot track their lifetimes correctly;
3201 hard regs shouldn't appear here except as return values.
3202 We never want to emit such a clobber after reload. */
3204 && ! (reload_in_progress || reload_completed)
3205 && need_clobber != 0)
3206 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3213 /* Low level part of emit_move_insn.
3214 Called just like emit_move_insn, but assumes X and Y
3215 are basically valid. */
3218 emit_move_insn_1 (rtx x, rtx y)
3220 enum machine_mode mode = GET_MODE (x);
3221 enum insn_code code;
3223 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3225 code = mov_optab->handlers[mode].insn_code;
3226 if (code != CODE_FOR_nothing)
3227 return emit_insn (GEN_FCN (code) (x, y));
3229 /* Expand complex moves by moving real part and imag part. */
3230 if (COMPLEX_MODE_P (mode))
3231 return emit_move_complex (mode, x, y);
3233 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT)
3235 rtx result = emit_move_via_integer (mode, x, y, true);
3237 /* If we can't find an integer mode, use multi words. */
3241 return emit_move_multi_word (mode, x, y);
3244 if (GET_MODE_CLASS (mode) == MODE_CC)
3245 return emit_move_ccmode (mode, x, y);
3247 /* Try using a move pattern for the corresponding integer mode. This is
3248 only safe when simplify_subreg can convert MODE constants into integer
3249 constants. At present, it can only do this reliably if the value
3250 fits within a HOST_WIDE_INT. */
3251 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3253 rtx ret = emit_move_via_integer (mode, x, y, false);
3258 return emit_move_multi_word (mode, x, y);
3261 /* Generate code to copy Y into X.
3262 Both Y and X must have the same mode, except that
3263 Y can be a constant with VOIDmode.
3264 This mode cannot be BLKmode; use emit_block_move for that.
3266 Return the last instruction emitted. */
3269 emit_move_insn (rtx x, rtx y)
3271 enum machine_mode mode = GET_MODE (x);
3272 rtx y_cst = NULL_RTX;
3275 gcc_assert (mode != BLKmode
3276 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3281 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3282 && (last_insn = compress_float_constant (x, y)))
3287 if (!LEGITIMATE_CONSTANT_P (y))
3289 y = force_const_mem (mode, y);
3291 /* If the target's cannot_force_const_mem prevented the spill,
3292 assume that the target's move expanders will also take care
3293 of the non-legitimate constant. */
3297 y = use_anchored_address (y);
3301 /* If X or Y are memory references, verify that their addresses are valid
3304 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3305 && ! push_operand (x, GET_MODE (x)))
3307 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3308 x = validize_mem (x);
3311 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3313 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3314 y = validize_mem (y);
3316 gcc_assert (mode != BLKmode);
3318 last_insn = emit_move_insn_1 (x, y);
3320 if (y_cst && REG_P (x)
3321 && (set = single_set (last_insn)) != NULL_RTX
3322 && SET_DEST (set) == x
3323 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3324 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3329 /* If Y is representable exactly in a narrower mode, and the target can
3330 perform the extension directly from constant or memory, then emit the
3331 move as an extension. */
3334 compress_float_constant (rtx x, rtx y)
3336 enum machine_mode dstmode = GET_MODE (x);
3337 enum machine_mode orig_srcmode = GET_MODE (y);
3338 enum machine_mode srcmode;
3340 int oldcost, newcost;
3342 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3344 if (LEGITIMATE_CONSTANT_P (y))
3345 oldcost = rtx_cost (y, SET);
3347 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3349 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3350 srcmode != orig_srcmode;
3351 srcmode = GET_MODE_WIDER_MODE (srcmode))
3354 rtx trunc_y, last_insn;
3356 /* Skip if the target can't extend this way. */
3357 ic = can_extend_p (dstmode, srcmode, 0);
3358 if (ic == CODE_FOR_nothing)
3361 /* Skip if the narrowed value isn't exact. */
3362 if (! exact_real_truncate (srcmode, &r))
3365 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3367 if (LEGITIMATE_CONSTANT_P (trunc_y))
3369 /* Skip if the target needs extra instructions to perform
3371 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3373 /* This is valid, but may not be cheaper than the original. */
3374 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3375 if (oldcost < newcost)
3378 else if (float_extend_from_mem[dstmode][srcmode])
3380 trunc_y = force_const_mem (srcmode, trunc_y);
3381 /* This is valid, but may not be cheaper than the original. */
3382 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3383 if (oldcost < newcost)
3385 trunc_y = validize_mem (trunc_y);
3390 /* For CSE's benefit, force the compressed constant pool entry
3391 into a new pseudo. This constant may be used in different modes,
3392 and if not, combine will put things back together for us. */
3393 trunc_y = force_reg (srcmode, trunc_y);
3394 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3395 last_insn = get_last_insn ();
3398 set_unique_reg_note (last_insn, REG_EQUAL, y);
3406 /* Pushing data onto the stack. */
3408 /* Push a block of length SIZE (perhaps variable)
3409 and return an rtx to address the beginning of the block.
3410 The value may be virtual_outgoing_args_rtx.
3412 EXTRA is the number of bytes of padding to push in addition to SIZE.
3413 BELOW nonzero means this padding comes at low addresses;
3414 otherwise, the padding comes at high addresses. */
3417 push_block (rtx size, int extra, int below)
3421 size = convert_modes (Pmode, ptr_mode, size, 1);
3422 if (CONSTANT_P (size))
3423 anti_adjust_stack (plus_constant (size, extra));
3424 else if (REG_P (size) && extra == 0)
3425 anti_adjust_stack (size);
3428 temp = copy_to_mode_reg (Pmode, size);
3430 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3431 temp, 0, OPTAB_LIB_WIDEN);
3432 anti_adjust_stack (temp);
3435 #ifndef STACK_GROWS_DOWNWARD
3441 temp = virtual_outgoing_args_rtx;
3442 if (extra != 0 && below)
3443 temp = plus_constant (temp, extra);
3447 if (GET_CODE (size) == CONST_INT)
3448 temp = plus_constant (virtual_outgoing_args_rtx,
3449 -INTVAL (size) - (below ? 0 : extra));
3450 else if (extra != 0 && !below)
3451 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3452 negate_rtx (Pmode, plus_constant (size, extra)));
3454 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3455 negate_rtx (Pmode, size));
3458 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3461 #ifdef PUSH_ROUNDING
3463 /* Emit single push insn. */
3466 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3469 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3471 enum insn_code icode;
3472 insn_operand_predicate_fn pred;
3474 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3475 /* If there is push pattern, use it. Otherwise try old way of throwing
3476 MEM representing push operation to move expander. */
3477 icode = push_optab->handlers[(int) mode].insn_code;
3478 if (icode != CODE_FOR_nothing)
3480 if (((pred = insn_data[(int) icode].operand[0].predicate)
3481 && !((*pred) (x, mode))))
3482 x = force_reg (mode, x);
3483 emit_insn (GEN_FCN (icode) (x));
3486 if (GET_MODE_SIZE (mode) == rounded_size)
3487 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3488 /* If we are to pad downward, adjust the stack pointer first and
3489 then store X into the stack location using an offset. This is
3490 because emit_move_insn does not know how to pad; it does not have
3492 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3494 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3495 HOST_WIDE_INT offset;
3497 emit_move_insn (stack_pointer_rtx,
3498 expand_binop (Pmode,
3499 #ifdef STACK_GROWS_DOWNWARD
3505 GEN_INT (rounded_size),
3506 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3508 offset = (HOST_WIDE_INT) padding_size;
3509 #ifdef STACK_GROWS_DOWNWARD
3510 if (STACK_PUSH_CODE == POST_DEC)
3511 /* We have already decremented the stack pointer, so get the
3513 offset += (HOST_WIDE_INT) rounded_size;
3515 if (STACK_PUSH_CODE == POST_INC)
3516 /* We have already incremented the stack pointer, so get the
3518 offset -= (HOST_WIDE_INT) rounded_size;
3520 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3524 #ifdef STACK_GROWS_DOWNWARD
3525 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3526 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3527 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3529 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3530 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3531 GEN_INT (rounded_size));
3533 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3536 dest = gen_rtx_MEM (mode, dest_addr);
3540 set_mem_attributes (dest, type, 1);
3542 if (flag_optimize_sibling_calls)
3543 /* Function incoming arguments may overlap with sibling call
3544 outgoing arguments and we cannot allow reordering of reads
3545 from function arguments with stores to outgoing arguments
3546 of sibling calls. */
3547 set_mem_alias_set (dest, 0);
3549 emit_move_insn (dest, x);
3553 /* Generate code to push X onto the stack, assuming it has mode MODE and
3555 MODE is redundant except when X is a CONST_INT (since they don't
3557 SIZE is an rtx for the size of data to be copied (in bytes),
3558 needed only if X is BLKmode.
3560 ALIGN (in bits) is maximum alignment we can assume.
3562 If PARTIAL and REG are both nonzero, then copy that many of the first
3563 bytes of X into registers starting with REG, and push the rest of X.
3564 The amount of space pushed is decreased by PARTIAL bytes.
3565 REG must be a hard register in this case.
3566 If REG is zero but PARTIAL is not, take any all others actions for an
3567 argument partially in registers, but do not actually load any
3570 EXTRA is the amount in bytes of extra space to leave next to this arg.
3571 This is ignored if an argument block has already been allocated.
3573 On a machine that lacks real push insns, ARGS_ADDR is the address of
3574 the bottom of the argument block for this call. We use indexing off there
3575 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3576 argument block has not been preallocated.
3578 ARGS_SO_FAR is the size of args previously pushed for this call.
3580 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3581 for arguments passed in registers. If nonzero, it will be the number
3582 of bytes required. */
3585 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3586 unsigned int align, int partial, rtx reg, int extra,
3587 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3591 enum direction stack_direction
3592 #ifdef STACK_GROWS_DOWNWARD
3598 /* Decide where to pad the argument: `downward' for below,
3599 `upward' for above, or `none' for don't pad it.
3600 Default is below for small data on big-endian machines; else above. */
3601 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3603 /* Invert direction if stack is post-decrement.
3605 if (STACK_PUSH_CODE == POST_DEC)
3606 if (where_pad != none)
3607 where_pad = (where_pad == downward ? upward : downward);
3611 if (mode == BLKmode)
3613 /* Copy a block into the stack, entirely or partially. */
3620 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3621 used = partial - offset;
3625 /* USED is now the # of bytes we need not copy to the stack
3626 because registers will take care of them. */
3629 xinner = adjust_address (xinner, BLKmode, used);
3631 /* If the partial register-part of the arg counts in its stack size,
3632 skip the part of stack space corresponding to the registers.
3633 Otherwise, start copying to the beginning of the stack space,
3634 by setting SKIP to 0. */
3635 skip = (reg_parm_stack_space == 0) ? 0 : used;
3637 #ifdef PUSH_ROUNDING
3638 /* Do it with several push insns if that doesn't take lots of insns
3639 and if there is no difficulty with push insns that skip bytes
3640 on the stack for alignment purposes. */
3643 && GET_CODE (size) == CONST_INT
3645 && MEM_ALIGN (xinner) >= align
3646 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3647 /* Here we avoid the case of a structure whose weak alignment
3648 forces many pushes of a small amount of data,
3649 and such small pushes do rounding that causes trouble. */
3650 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3651 || align >= BIGGEST_ALIGNMENT
3652 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3653 == (align / BITS_PER_UNIT)))
3654 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3656 /* Push padding now if padding above and stack grows down,
3657 or if padding below and stack grows up.
3658 But if space already allocated, this has already been done. */
3659 if (extra && args_addr == 0
3660 && where_pad != none && where_pad != stack_direction)
3661 anti_adjust_stack (GEN_INT (extra));
3663 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3666 #endif /* PUSH_ROUNDING */
3670 /* Otherwise make space on the stack and copy the data
3671 to the address of that space. */
3673 /* Deduct words put into registers from the size we must copy. */
3676 if (GET_CODE (size) == CONST_INT)
3677 size = GEN_INT (INTVAL (size) - used);
3679 size = expand_binop (GET_MODE (size), sub_optab, size,
3680 GEN_INT (used), NULL_RTX, 0,
3684 /* Get the address of the stack space.
3685 In this case, we do not deal with EXTRA separately.
3686 A single stack adjust will do. */
3689 temp = push_block (size, extra, where_pad == downward);
3692 else if (GET_CODE (args_so_far) == CONST_INT)
3693 temp = memory_address (BLKmode,
3694 plus_constant (args_addr,
3695 skip + INTVAL (args_so_far)));
3697 temp = memory_address (BLKmode,
3698 plus_constant (gen_rtx_PLUS (Pmode,
3703 if (!ACCUMULATE_OUTGOING_ARGS)
3705 /* If the source is referenced relative to the stack pointer,
3706 copy it to another register to stabilize it. We do not need
3707 to do this if we know that we won't be changing sp. */
3709 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3710 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3711 temp = copy_to_reg (temp);
3714 target = gen_rtx_MEM (BLKmode, temp);
3716 /* We do *not* set_mem_attributes here, because incoming arguments
3717 may overlap with sibling call outgoing arguments and we cannot
3718 allow reordering of reads from function arguments with stores
3719 to outgoing arguments of sibling calls. We do, however, want
3720 to record the alignment of the stack slot. */
3721 /* ALIGN may well be better aligned than TYPE, e.g. due to
3722 PARM_BOUNDARY. Assume the caller isn't lying. */
3723 set_mem_align (target, align);
3725 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3728 else if (partial > 0)
3730 /* Scalar partly in registers. */
3732 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3735 /* # bytes of start of argument
3736 that we must make space for but need not store. */
3737 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3738 int args_offset = INTVAL (args_so_far);
3741 /* Push padding now if padding above and stack grows down,
3742 or if padding below and stack grows up.
3743 But if space already allocated, this has already been done. */
3744 if (extra && args_addr == 0
3745 && where_pad != none && where_pad != stack_direction)
3746 anti_adjust_stack (GEN_INT (extra));
3748 /* If we make space by pushing it, we might as well push
3749 the real data. Otherwise, we can leave OFFSET nonzero
3750 and leave the space uninitialized. */
3754 /* Now NOT_STACK gets the number of words that we don't need to
3755 allocate on the stack. Convert OFFSET to words too. */
3756 not_stack = (partial - offset) / UNITS_PER_WORD;
3757 offset /= UNITS_PER_WORD;
3759 /* If the partial register-part of the arg counts in its stack size,
3760 skip the part of stack space corresponding to the registers.
3761 Otherwise, start copying to the beginning of the stack space,
3762 by setting SKIP to 0. */
3763 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3765 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3766 x = validize_mem (force_const_mem (mode, x));
3768 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3769 SUBREGs of such registers are not allowed. */
3770 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3771 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3772 x = copy_to_reg (x);
3774 /* Loop over all the words allocated on the stack for this arg. */
3775 /* We can do it by words, because any scalar bigger than a word
3776 has a size a multiple of a word. */
3777 #ifndef PUSH_ARGS_REVERSED
3778 for (i = not_stack; i < size; i++)
3780 for (i = size - 1; i >= not_stack; i--)
3782 if (i >= not_stack + offset)
3783 emit_push_insn (operand_subword_force (x, i, mode),
3784 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3786 GEN_INT (args_offset + ((i - not_stack + skip)
3788 reg_parm_stack_space, alignment_pad);
3795 /* Push padding now if padding above and stack grows down,
3796 or if padding below and stack grows up.
3797 But if space already allocated, this has already been done. */
3798 if (extra && args_addr == 0
3799 && where_pad != none && where_pad != stack_direction)
3800 anti_adjust_stack (GEN_INT (extra));
3802 #ifdef PUSH_ROUNDING
3803 if (args_addr == 0 && PUSH_ARGS)
3804 emit_single_push_insn (mode, x, type);
3808 if (GET_CODE (args_so_far) == CONST_INT)
3810 = memory_address (mode,
3811 plus_constant (args_addr,
3812 INTVAL (args_so_far)));
3814 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3816 dest = gen_rtx_MEM (mode, addr);
3818 /* We do *not* set_mem_attributes here, because incoming arguments
3819 may overlap with sibling call outgoing arguments and we cannot
3820 allow reordering of reads from function arguments with stores
3821 to outgoing arguments of sibling calls. We do, however, want
3822 to record the alignment of the stack slot. */
3823 /* ALIGN may well be better aligned than TYPE, e.g. due to
3824 PARM_BOUNDARY. Assume the caller isn't lying. */
3825 set_mem_align (dest, align);
3827 emit_move_insn (dest, x);
3831 /* If part should go in registers, copy that part
3832 into the appropriate registers. Do this now, at the end,
3833 since mem-to-mem copies above may do function calls. */
3834 if (partial > 0 && reg != 0)
3836 /* Handle calls that pass values in multiple non-contiguous locations.
3837 The Irix 6 ABI has examples of this. */
3838 if (GET_CODE (reg) == PARALLEL)
3839 emit_group_load (reg, x, type, -1);
3842 gcc_assert (partial % UNITS_PER_WORD == 0);
3843 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3847 if (extra && args_addr == 0 && where_pad == stack_direction)
3848 anti_adjust_stack (GEN_INT (extra));
3850 if (alignment_pad && args_addr == 0)
3851 anti_adjust_stack (alignment_pad);
3854 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3858 get_subtarget (rtx x)
3862 /* Only registers can be subtargets. */
3864 /* Don't use hard regs to avoid extending their life. */
3865 || REGNO (x) < FIRST_PSEUDO_REGISTER
3869 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3870 FIELD is a bitfield. Returns true if the optimization was successful,
3871 and there's nothing else to do. */
3874 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3875 unsigned HOST_WIDE_INT bitpos,
3876 enum machine_mode mode1, rtx str_rtx,
3879 enum machine_mode str_mode = GET_MODE (str_rtx);
3880 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3885 if (mode1 != VOIDmode
3886 || bitsize >= BITS_PER_WORD
3887 || str_bitsize > BITS_PER_WORD
3888 || TREE_SIDE_EFFECTS (to)
3889 || TREE_THIS_VOLATILE (to))
3893 if (!BINARY_CLASS_P (src)
3894 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3897 op0 = TREE_OPERAND (src, 0);
3898 op1 = TREE_OPERAND (src, 1);
3901 if (!operand_equal_p (to, op0, 0))
3904 if (MEM_P (str_rtx))
3906 unsigned HOST_WIDE_INT offset1;
3908 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3909 str_mode = word_mode;
3910 str_mode = get_best_mode (bitsize, bitpos,
3911 MEM_ALIGN (str_rtx), str_mode, 0);
3912 if (str_mode == VOIDmode)
3914 str_bitsize = GET_MODE_BITSIZE (str_mode);
3917 bitpos %= str_bitsize;
3918 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3919 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3921 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3924 /* If the bit field covers the whole REG/MEM, store_field
3925 will likely generate better code. */
3926 if (bitsize >= str_bitsize)
3929 /* We can't handle fields split across multiple entities. */
3930 if (bitpos + bitsize > str_bitsize)
3933 if (BYTES_BIG_ENDIAN)
3934 bitpos = str_bitsize - bitpos - bitsize;
3936 switch (TREE_CODE (src))
3940 /* For now, just optimize the case of the topmost bitfield
3941 where we don't need to do any masking and also
3942 1 bit bitfields where xor can be used.
3943 We might win by one instruction for the other bitfields
3944 too if insv/extv instructions aren't used, so that
3945 can be added later. */
3946 if (bitpos + bitsize != str_bitsize
3947 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3950 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3951 value = convert_modes (str_mode,
3952 TYPE_MODE (TREE_TYPE (op1)), value,
3953 TYPE_UNSIGNED (TREE_TYPE (op1)));
3955 /* We may be accessing data outside the field, which means
3956 we can alias adjacent data. */
3957 if (MEM_P (str_rtx))
3959 str_rtx = shallow_copy_rtx (str_rtx);
3960 set_mem_alias_set (str_rtx, 0);
3961 set_mem_expr (str_rtx, 0);
3964 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3965 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3967 value = expand_and (str_mode, value, const1_rtx, NULL);
3970 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3971 build_int_cst (NULL_TREE, bitpos),
3973 result = expand_binop (str_mode, binop, str_rtx,
3974 value, str_rtx, 1, OPTAB_WIDEN);
3975 if (result != str_rtx)
3976 emit_move_insn (str_rtx, result);
3981 if (TREE_CODE (op1) != INTEGER_CST)
3983 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3984 value = convert_modes (GET_MODE (str_rtx),
3985 TYPE_MODE (TREE_TYPE (op1)), value,
3986 TYPE_UNSIGNED (TREE_TYPE (op1)));
3988 /* We may be accessing data outside the field, which means
3989 we can alias adjacent data. */
3990 if (MEM_P (str_rtx))
3992 str_rtx = shallow_copy_rtx (str_rtx);
3993 set_mem_alias_set (str_rtx, 0);
3994 set_mem_expr (str_rtx, 0);
3997 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
3998 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4000 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4002 value = expand_and (GET_MODE (str_rtx), value, mask,
4005 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4006 build_int_cst (NULL_TREE, bitpos),
4008 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4009 value, str_rtx, 1, OPTAB_WIDEN);
4010 if (result != str_rtx)
4011 emit_move_insn (str_rtx, result);
4022 /* Expand an assignment that stores the value of FROM into TO. */
4025 expand_assignment (tree to, tree from)
4030 /* Don't crash if the lhs of the assignment was erroneous. */
4031 if (TREE_CODE (to) == ERROR_MARK)
4033 result = expand_normal (from);
4037 /* Optimize away no-op moves without side-effects. */
4038 if (operand_equal_p (to, from, 0))
4041 /* Assignment of a structure component needs special treatment
4042 if the structure component's rtx is not simply a MEM.
4043 Assignment of an array element at a constant index, and assignment of
4044 an array element in an unaligned packed structure field, has the same
4046 if (handled_component_p (to)
4047 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4049 enum machine_mode mode1;
4050 HOST_WIDE_INT bitsize, bitpos;
4057 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4058 &unsignedp, &volatilep, true);
4060 /* If we are going to use store_bit_field and extract_bit_field,
4061 make sure to_rtx will be safe for multiple use. */
4063 to_rtx = expand_normal (tem);
4069 if (!MEM_P (to_rtx))
4071 /* We can get constant negative offsets into arrays with broken
4072 user code. Translate this to a trap instead of ICEing. */
4073 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4074 expand_builtin_trap ();
4075 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4078 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4079 #ifdef POINTERS_EXTEND_UNSIGNED
4080 if (GET_MODE (offset_rtx) != Pmode)
4081 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4083 if (GET_MODE (offset_rtx) != ptr_mode)
4084 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4087 /* A constant address in TO_RTX can have VOIDmode, we must not try
4088 to call force_reg for that case. Avoid that case. */
4090 && GET_MODE (to_rtx) == BLKmode
4091 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4093 && (bitpos % bitsize) == 0
4094 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4095 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4097 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4101 to_rtx = offset_address (to_rtx, offset_rtx,
4102 highest_pow2_factor_for_target (to,
4106 /* Handle expand_expr of a complex value returning a CONCAT. */
4107 if (GET_CODE (to_rtx) == CONCAT)
4109 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4111 gcc_assert (bitpos == 0);
4112 result = store_expr (from, to_rtx, false);
4116 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4117 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
4124 /* If the field is at offset zero, we could have been given the
4125 DECL_RTX of the parent struct. Don't munge it. */
4126 to_rtx = shallow_copy_rtx (to_rtx);
4128 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4130 /* Deal with volatile and readonly fields. The former is only
4131 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4133 MEM_VOLATILE_P (to_rtx) = 1;
4134 if (component_uses_parent_alias_set (to))
4135 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4138 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4142 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4143 TREE_TYPE (tem), get_alias_set (to));
4147 preserve_temp_slots (result);
4153 /* If the rhs is a function call and its value is not an aggregate,
4154 call the function before we start to compute the lhs.
4155 This is needed for correct code for cases such as
4156 val = setjmp (buf) on machines where reference to val
4157 requires loading up part of an address in a separate insn.
4159 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4160 since it might be a promoted variable where the zero- or sign- extension
4161 needs to be done. Handling this in the normal way is safe because no
4162 computation is done before the call. */
4163 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4164 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4165 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4166 && REG_P (DECL_RTL (to))))
4171 value = expand_normal (from);
4173 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4175 /* Handle calls that return values in multiple non-contiguous locations.
4176 The Irix 6 ABI has examples of this. */
4177 if (GET_CODE (to_rtx) == PARALLEL)
4178 emit_group_load (to_rtx, value, TREE_TYPE (from),
4179 int_size_in_bytes (TREE_TYPE (from)));
4180 else if (GET_MODE (to_rtx) == BLKmode)
4181 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4184 if (POINTER_TYPE_P (TREE_TYPE (to)))
4185 value = convert_memory_address (GET_MODE (to_rtx), value);
4186 emit_move_insn (to_rtx, value);
4188 preserve_temp_slots (to_rtx);
4194 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4195 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4198 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4200 /* Don't move directly into a return register. */
4201 if (TREE_CODE (to) == RESULT_DECL
4202 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4207 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4209 if (GET_CODE (to_rtx) == PARALLEL)
4210 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4211 int_size_in_bytes (TREE_TYPE (from)));
4213 emit_move_insn (to_rtx, temp);
4215 preserve_temp_slots (to_rtx);
4221 /* In case we are returning the contents of an object which overlaps
4222 the place the value is being stored, use a safe function when copying
4223 a value through a pointer into a structure value return block. */
4224 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4225 && current_function_returns_struct
4226 && !current_function_returns_pcc_struct)
4231 size = expr_size (from);
4232 from_rtx = expand_normal (from);
4234 emit_library_call (memmove_libfunc, LCT_NORMAL,
4235 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4236 XEXP (from_rtx, 0), Pmode,
4237 convert_to_mode (TYPE_MODE (sizetype),
4238 size, TYPE_UNSIGNED (sizetype)),
4239 TYPE_MODE (sizetype));
4241 preserve_temp_slots (to_rtx);
4247 /* Compute FROM and store the value in the rtx we got. */
4250 result = store_expr (from, to_rtx, 0);
4251 preserve_temp_slots (result);
4257 /* Generate code for computing expression EXP,
4258 and storing the value into TARGET.
4260 If the mode is BLKmode then we may return TARGET itself.
4261 It turns out that in BLKmode it doesn't cause a problem.
4262 because C has no operators that could combine two different
4263 assignments into the same BLKmode object with different values
4264 with no sequence point. Will other languages need this to
4267 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4268 stack, and block moves may need to be treated specially. */
4271 store_expr (tree exp, rtx target, int call_param_p)
4274 rtx alt_rtl = NULL_RTX;
4275 int dont_return_target = 0;
4277 if (VOID_TYPE_P (TREE_TYPE (exp)))
4279 /* C++ can generate ?: expressions with a throw expression in one
4280 branch and an rvalue in the other. Here, we resolve attempts to
4281 store the throw expression's nonexistent result. */
4282 gcc_assert (!call_param_p);
4283 expand_expr (exp, const0_rtx, VOIDmode, 0);
4286 if (TREE_CODE (exp) == COMPOUND_EXPR)
4288 /* Perform first part of compound expression, then assign from second
4290 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4291 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4292 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4294 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4296 /* For conditional expression, get safe form of the target. Then
4297 test the condition, doing the appropriate assignment on either
4298 side. This avoids the creation of unnecessary temporaries.
4299 For non-BLKmode, it is more efficient not to do this. */
4301 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4303 do_pending_stack_adjust ();
4305 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4306 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4307 emit_jump_insn (gen_jump (lab2));
4310 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4316 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4317 /* If this is a scalar in a register that is stored in a wider mode
4318 than the declared mode, compute the result into its declared mode
4319 and then convert to the wider mode. Our value is the computed
4322 rtx inner_target = 0;
4324 /* We can do the conversion inside EXP, which will often result
4325 in some optimizations. Do the conversion in two steps: first
4326 change the signedness, if needed, then the extend. But don't
4327 do this if the type of EXP is a subtype of something else
4328 since then the conversion might involve more than just
4329 converting modes. */
4330 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4331 && TREE_TYPE (TREE_TYPE (exp)) == 0
4332 && (!lang_hooks.reduce_bit_field_operations
4333 || (GET_MODE_PRECISION (GET_MODE (target))
4334 == TYPE_PRECISION (TREE_TYPE (exp)))))
4336 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4337 != SUBREG_PROMOTED_UNSIGNED_P (target))
4339 (lang_hooks.types.signed_or_unsigned_type
4340 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4342 exp = fold_convert (lang_hooks.types.type_for_mode
4343 (GET_MODE (SUBREG_REG (target)),
4344 SUBREG_PROMOTED_UNSIGNED_P (target)),
4347 inner_target = SUBREG_REG (target);
4350 temp = expand_expr (exp, inner_target, VOIDmode,
4351 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4353 /* If TEMP is a VOIDmode constant, use convert_modes to make
4354 sure that we properly convert it. */
4355 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4357 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4358 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4359 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4360 GET_MODE (target), temp,
4361 SUBREG_PROMOTED_UNSIGNED_P (target));
4364 convert_move (SUBREG_REG (target), temp,
4365 SUBREG_PROMOTED_UNSIGNED_P (target));
4371 temp = expand_expr_real (exp, target, GET_MODE (target),
4373 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4375 /* Return TARGET if it's a specified hardware register.
4376 If TARGET is a volatile mem ref, either return TARGET
4377 or return a reg copied *from* TARGET; ANSI requires this.
4379 Otherwise, if TEMP is not TARGET, return TEMP
4380 if it is constant (for efficiency),
4381 or if we really want the correct value. */
4382 if (!(target && REG_P (target)
4383 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4384 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4385 && ! rtx_equal_p (temp, target)
4386 && CONSTANT_P (temp))
4387 dont_return_target = 1;
4390 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4391 the same as that of TARGET, adjust the constant. This is needed, for
4392 example, in case it is a CONST_DOUBLE and we want only a word-sized
4394 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4395 && TREE_CODE (exp) != ERROR_MARK
4396 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4397 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4398 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4400 /* If value was not generated in the target, store it there.
4401 Convert the value to TARGET's type first if necessary and emit the
4402 pending incrementations that have been queued when expanding EXP.
4403 Note that we cannot emit the whole queue blindly because this will
4404 effectively disable the POST_INC optimization later.
4406 If TEMP and TARGET compare equal according to rtx_equal_p, but
4407 one or both of them are volatile memory refs, we have to distinguish
4409 - expand_expr has used TARGET. In this case, we must not generate
4410 another copy. This can be detected by TARGET being equal according
4412 - expand_expr has not used TARGET - that means that the source just
4413 happens to have the same RTX form. Since temp will have been created
4414 by expand_expr, it will compare unequal according to == .
4415 We must generate a copy in this case, to reach the correct number
4416 of volatile memory references. */
4418 if ((! rtx_equal_p (temp, target)
4419 || (temp != target && (side_effects_p (temp)
4420 || side_effects_p (target))))
4421 && TREE_CODE (exp) != ERROR_MARK
4422 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4423 but TARGET is not valid memory reference, TEMP will differ
4424 from TARGET although it is really the same location. */
4425 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4426 /* If there's nothing to copy, don't bother. Don't call
4427 expr_size unless necessary, because some front-ends (C++)
4428 expr_size-hook must not be given objects that are not
4429 supposed to be bit-copied or bit-initialized. */
4430 && expr_size (exp) != const0_rtx)
4432 if (GET_MODE (temp) != GET_MODE (target)
4433 && GET_MODE (temp) != VOIDmode)
4435 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4436 if (dont_return_target)
4438 /* In this case, we will return TEMP,
4439 so make sure it has the proper mode.
4440 But don't forget to store the value into TARGET. */
4441 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4442 emit_move_insn (target, temp);
4445 convert_move (target, temp, unsignedp);
4448 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4450 /* Handle copying a string constant into an array. The string
4451 constant may be shorter than the array. So copy just the string's
4452 actual length, and clear the rest. First get the size of the data
4453 type of the string, which is actually the size of the target. */
4454 rtx size = expr_size (exp);
4456 if (GET_CODE (size) == CONST_INT
4457 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4458 emit_block_move (target, temp, size,
4460 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4463 /* Compute the size of the data to copy from the string. */
4465 = size_binop (MIN_EXPR,
4466 make_tree (sizetype, size),
4467 size_int (TREE_STRING_LENGTH (exp)));
4469 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4471 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4474 /* Copy that much. */
4475 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4476 TYPE_UNSIGNED (sizetype));
4477 emit_block_move (target, temp, copy_size_rtx,
4479 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4481 /* Figure out how much is left in TARGET that we have to clear.
4482 Do all calculations in ptr_mode. */
4483 if (GET_CODE (copy_size_rtx) == CONST_INT)
4485 size = plus_constant (size, -INTVAL (copy_size_rtx));
4486 target = adjust_address (target, BLKmode,
4487 INTVAL (copy_size_rtx));
4491 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4492 copy_size_rtx, NULL_RTX, 0,
4495 #ifdef POINTERS_EXTEND_UNSIGNED
4496 if (GET_MODE (copy_size_rtx) != Pmode)
4497 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4498 TYPE_UNSIGNED (sizetype));
4501 target = offset_address (target, copy_size_rtx,
4502 highest_pow2_factor (copy_size));
4503 label = gen_label_rtx ();
4504 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4505 GET_MODE (size), 0, label);
4508 if (size != const0_rtx)
4509 clear_storage (target, size, BLOCK_OP_NORMAL);
4515 /* Handle calls that return values in multiple non-contiguous locations.
4516 The Irix 6 ABI has examples of this. */
4517 else if (GET_CODE (target) == PARALLEL)
4518 emit_group_load (target, temp, TREE_TYPE (exp),
4519 int_size_in_bytes (TREE_TYPE (exp)));
4520 else if (GET_MODE (temp) == BLKmode)
4521 emit_block_move (target, temp, expr_size (exp),
4523 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4526 temp = force_operand (temp, target);
4528 emit_move_insn (target, temp);
4535 /* Helper for categorize_ctor_elements. Identical interface. */
4538 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4539 HOST_WIDE_INT *p_elt_count,
4542 unsigned HOST_WIDE_INT idx;
4543 HOST_WIDE_INT nz_elts, elt_count;
4544 tree value, purpose;
4546 /* Whether CTOR is a valid constant initializer, in accordance with what
4547 initializer_constant_valid_p does. If inferred from the constructor
4548 elements, true until proven otherwise. */
4549 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4550 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4560 if (TREE_CODE (purpose) == RANGE_EXPR)
4562 tree lo_index = TREE_OPERAND (purpose, 0);
4563 tree hi_index = TREE_OPERAND (purpose, 1);
4565 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4566 mult = (tree_low_cst (hi_index, 1)
4567 - tree_low_cst (lo_index, 1) + 1);
4570 switch (TREE_CODE (value))
4574 HOST_WIDE_INT nz = 0, ic = 0;
4577 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4579 nz_elts += mult * nz;
4580 elt_count += mult * ic;
4582 if (const_from_elts_p && const_p)
4583 const_p = const_elt_p;
4589 if (!initializer_zerop (value))
4595 nz_elts += mult * TREE_STRING_LENGTH (value);
4596 elt_count += mult * TREE_STRING_LENGTH (value);
4600 if (!initializer_zerop (TREE_REALPART (value)))
4602 if (!initializer_zerop (TREE_IMAGPART (value)))
4610 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4612 if (!initializer_zerop (TREE_VALUE (v)))
4623 if (const_from_elts_p && const_p)
4624 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4631 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4632 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4635 bool clear_this = true;
4637 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4639 /* We don't expect more than one element of the union to be
4640 initialized. Not sure what we should do otherwise... */
4641 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4644 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4645 CONSTRUCTOR_ELTS (ctor),
4648 /* ??? We could look at each element of the union, and find the
4649 largest element. Which would avoid comparing the size of the
4650 initialized element against any tail padding in the union.
4651 Doesn't seem worth the effort... */
4652 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4653 TYPE_SIZE (init_sub_type)) == 1)
4655 /* And now we have to find out if the element itself is fully
4656 constructed. E.g. for union { struct { int a, b; } s; } u
4657 = { .s = { .a = 1 } }. */
4658 if (elt_count == count_type_elements (init_sub_type, false))
4663 *p_must_clear = clear_this;
4666 *p_nz_elts += nz_elts;
4667 *p_elt_count += elt_count;
4672 /* Examine CTOR to discover:
4673 * how many scalar fields are set to nonzero values,
4674 and place it in *P_NZ_ELTS;
4675 * how many scalar fields in total are in CTOR,
4676 and place it in *P_ELT_COUNT.
4677 * if a type is a union, and the initializer from the constructor
4678 is not the largest element in the union, then set *p_must_clear.
4680 Return whether or not CTOR is a valid static constant initializer, the same
4681 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4684 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4685 HOST_WIDE_INT *p_elt_count,
4690 *p_must_clear = false;
4693 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4696 /* Count the number of scalars in TYPE. Return -1 on overflow or
4697 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4698 array member at the end of the structure. */
4701 count_type_elements (tree type, bool allow_flexarr)
4703 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4704 switch (TREE_CODE (type))
4708 tree telts = array_type_nelts (type);
4709 if (telts && host_integerp (telts, 1))
4711 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4712 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4715 else if (max / n > m)
4723 HOST_WIDE_INT n = 0, t;
4726 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4727 if (TREE_CODE (f) == FIELD_DECL)
4729 t = count_type_elements (TREE_TYPE (f), false);
4732 /* Check for structures with flexible array member. */
4733 tree tf = TREE_TYPE (f);
4735 && TREE_CHAIN (f) == NULL
4736 && TREE_CODE (tf) == ARRAY_TYPE
4738 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4739 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4740 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4741 && int_size_in_bytes (type) >= 0)
4753 case QUAL_UNION_TYPE:
4755 /* Ho hum. How in the world do we guess here? Clearly it isn't
4756 right to count the fields. Guess based on the number of words. */
4757 HOST_WIDE_INT n = int_size_in_bytes (type);
4760 return n / UNITS_PER_WORD;
4767 return TYPE_VECTOR_SUBPARTS (type);
4775 case REFERENCE_TYPE:
4787 /* Return 1 if EXP contains mostly (3/4) zeros. */
4790 mostly_zeros_p (tree exp)
4792 if (TREE_CODE (exp) == CONSTRUCTOR)
4795 HOST_WIDE_INT nz_elts, count, elts;
4798 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4802 elts = count_type_elements (TREE_TYPE (exp), false);
4804 return nz_elts < elts / 4;
4807 return initializer_zerop (exp);
4810 /* Return 1 if EXP contains all zeros. */
4813 all_zeros_p (tree exp)
4815 if (TREE_CODE (exp) == CONSTRUCTOR)
4818 HOST_WIDE_INT nz_elts, count;
4821 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
4822 return nz_elts == 0;
4825 return initializer_zerop (exp);
4828 /* Helper function for store_constructor.
4829 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4830 TYPE is the type of the CONSTRUCTOR, not the element type.
4831 CLEARED is as for store_constructor.
4832 ALIAS_SET is the alias set to use for any stores.
4834 This provides a recursive shortcut back to store_constructor when it isn't
4835 necessary to go through store_field. This is so that we can pass through
4836 the cleared field to let store_constructor know that we may not have to
4837 clear a substructure if the outer structure has already been cleared. */
4840 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4841 HOST_WIDE_INT bitpos, enum machine_mode mode,
4842 tree exp, tree type, int cleared, int alias_set)
4844 if (TREE_CODE (exp) == CONSTRUCTOR
4845 /* We can only call store_constructor recursively if the size and
4846 bit position are on a byte boundary. */
4847 && bitpos % BITS_PER_UNIT == 0
4848 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4849 /* If we have a nonzero bitpos for a register target, then we just
4850 let store_field do the bitfield handling. This is unlikely to
4851 generate unnecessary clear instructions anyways. */
4852 && (bitpos == 0 || MEM_P (target)))
4856 = adjust_address (target,
4857 GET_MODE (target) == BLKmode
4859 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4860 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4863 /* Update the alias set, if required. */
4864 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4865 && MEM_ALIAS_SET (target) != 0)
4867 target = copy_rtx (target);
4868 set_mem_alias_set (target, alias_set);
4871 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4874 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4877 /* Store the value of constructor EXP into the rtx TARGET.
4878 TARGET is either a REG or a MEM; we know it cannot conflict, since
4879 safe_from_p has been called.
4880 CLEARED is true if TARGET is known to have been zero'd.
4881 SIZE is the number of bytes of TARGET we are allowed to modify: this
4882 may not be the same as the size of EXP if we are assigning to a field
4883 which has been packed to exclude padding bits. */
4886 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4888 tree type = TREE_TYPE (exp);
4889 #ifdef WORD_REGISTER_OPERATIONS
4890 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4893 switch (TREE_CODE (type))
4897 case QUAL_UNION_TYPE:
4899 unsigned HOST_WIDE_INT idx;
4902 /* If size is zero or the target is already cleared, do nothing. */
4903 if (size == 0 || cleared)
4905 /* We either clear the aggregate or indicate the value is dead. */
4906 else if ((TREE_CODE (type) == UNION_TYPE
4907 || TREE_CODE (type) == QUAL_UNION_TYPE)
4908 && ! CONSTRUCTOR_ELTS (exp))
4909 /* If the constructor is empty, clear the union. */
4911 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
4915 /* If we are building a static constructor into a register,
4916 set the initial value as zero so we can fold the value into
4917 a constant. But if more than one register is involved,
4918 this probably loses. */
4919 else if (REG_P (target) && TREE_STATIC (exp)
4920 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4922 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4926 /* If the constructor has fewer fields than the structure or
4927 if we are initializing the structure to mostly zeros, clear
4928 the whole structure first. Don't do this if TARGET is a
4929 register whose mode size isn't equal to SIZE since
4930 clear_storage can't handle this case. */
4932 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
4933 != fields_length (type))
4934 || mostly_zeros_p (exp))
4936 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4939 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
4944 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4946 /* Store each element of the constructor into the
4947 corresponding field of TARGET. */
4948 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
4950 enum machine_mode mode;
4951 HOST_WIDE_INT bitsize;
4952 HOST_WIDE_INT bitpos = 0;
4954 rtx to_rtx = target;
4956 /* Just ignore missing fields. We cleared the whole
4957 structure, above, if any fields are missing. */
4961 if (cleared && initializer_zerop (value))
4964 if (host_integerp (DECL_SIZE (field), 1))
4965 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4969 mode = DECL_MODE (field);
4970 if (DECL_BIT_FIELD (field))
4973 offset = DECL_FIELD_OFFSET (field);
4974 if (host_integerp (offset, 0)
4975 && host_integerp (bit_position (field), 0))
4977 bitpos = int_bit_position (field);
4981 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4988 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4989 make_tree (TREE_TYPE (exp),
4992 offset_rtx = expand_normal (offset);
4993 gcc_assert (MEM_P (to_rtx));
4995 #ifdef POINTERS_EXTEND_UNSIGNED
4996 if (GET_MODE (offset_rtx) != Pmode)
4997 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4999 if (GET_MODE (offset_rtx) != ptr_mode)
5000 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5003 to_rtx = offset_address (to_rtx, offset_rtx,
5004 highest_pow2_factor (offset));
5007 #ifdef WORD_REGISTER_OPERATIONS
5008 /* If this initializes a field that is smaller than a
5009 word, at the start of a word, try to widen it to a full
5010 word. This special case allows us to output C++ member
5011 function initializations in a form that the optimizers
5014 && bitsize < BITS_PER_WORD
5015 && bitpos % BITS_PER_WORD == 0
5016 && GET_MODE_CLASS (mode) == MODE_INT
5017 && TREE_CODE (value) == INTEGER_CST
5019 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5021 tree type = TREE_TYPE (value);
5023 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5025 type = lang_hooks.types.type_for_size
5026 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5027 value = fold_convert (type, value);
5030 if (BYTES_BIG_ENDIAN)
5032 = fold_build2 (LSHIFT_EXPR, type, value,
5033 build_int_cst (type,
5034 BITS_PER_WORD - bitsize));
5035 bitsize = BITS_PER_WORD;
5040 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5041 && DECL_NONADDRESSABLE_P (field))
5043 to_rtx = copy_rtx (to_rtx);
5044 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5047 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5048 value, type, cleared,
5049 get_alias_set (TREE_TYPE (field)));
5056 unsigned HOST_WIDE_INT i;
5059 tree elttype = TREE_TYPE (type);
5061 HOST_WIDE_INT minelt = 0;
5062 HOST_WIDE_INT maxelt = 0;
5064 domain = TYPE_DOMAIN (type);
5065 const_bounds_p = (TYPE_MIN_VALUE (domain)
5066 && TYPE_MAX_VALUE (domain)
5067 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5068 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5070 /* If we have constant bounds for the range of the type, get them. */
5073 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5074 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5077 /* If the constructor has fewer elements than the array, clear
5078 the whole array first. Similarly if this is static
5079 constructor of a non-BLKmode object. */
5082 else if (REG_P (target) && TREE_STATIC (exp))
5086 unsigned HOST_WIDE_INT idx;
5088 HOST_WIDE_INT count = 0, zero_count = 0;
5089 need_to_clear = ! const_bounds_p;
5091 /* This loop is a more accurate version of the loop in
5092 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5093 is also needed to check for missing elements. */
5094 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5096 HOST_WIDE_INT this_node_count;
5101 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5103 tree lo_index = TREE_OPERAND (index, 0);
5104 tree hi_index = TREE_OPERAND (index, 1);
5106 if (! host_integerp (lo_index, 1)
5107 || ! host_integerp (hi_index, 1))
5113 this_node_count = (tree_low_cst (hi_index, 1)
5114 - tree_low_cst (lo_index, 1) + 1);
5117 this_node_count = 1;
5119 count += this_node_count;
5120 if (mostly_zeros_p (value))
5121 zero_count += this_node_count;
5124 /* Clear the entire array first if there are any missing
5125 elements, or if the incidence of zero elements is >=
5128 && (count < maxelt - minelt + 1
5129 || 4 * zero_count >= 3 * count))
5133 if (need_to_clear && size > 0)
5136 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5138 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5142 if (!cleared && REG_P (target))
5143 /* Inform later passes that the old value is dead. */
5144 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5146 /* Store each element of the constructor into the
5147 corresponding element of TARGET, determined by counting the
5149 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5151 enum machine_mode mode;
5152 HOST_WIDE_INT bitsize;
5153 HOST_WIDE_INT bitpos;
5155 rtx xtarget = target;
5157 if (cleared && initializer_zerop (value))
5160 unsignedp = TYPE_UNSIGNED (elttype);
5161 mode = TYPE_MODE (elttype);
5162 if (mode == BLKmode)
5163 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5164 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5167 bitsize = GET_MODE_BITSIZE (mode);
5169 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5171 tree lo_index = TREE_OPERAND (index, 0);
5172 tree hi_index = TREE_OPERAND (index, 1);
5173 rtx index_r, pos_rtx;
5174 HOST_WIDE_INT lo, hi, count;
5177 /* If the range is constant and "small", unroll the loop. */
5179 && host_integerp (lo_index, 0)
5180 && host_integerp (hi_index, 0)
5181 && (lo = tree_low_cst (lo_index, 0),
5182 hi = tree_low_cst (hi_index, 0),
5183 count = hi - lo + 1,
5186 || (host_integerp (TYPE_SIZE (elttype), 1)
5187 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5190 lo -= minelt; hi -= minelt;
5191 for (; lo <= hi; lo++)
5193 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5196 && !MEM_KEEP_ALIAS_SET_P (target)
5197 && TREE_CODE (type) == ARRAY_TYPE
5198 && TYPE_NONALIASED_COMPONENT (type))
5200 target = copy_rtx (target);
5201 MEM_KEEP_ALIAS_SET_P (target) = 1;
5204 store_constructor_field
5205 (target, bitsize, bitpos, mode, value, type, cleared,
5206 get_alias_set (elttype));
5211 rtx loop_start = gen_label_rtx ();
5212 rtx loop_end = gen_label_rtx ();
5215 expand_normal (hi_index);
5216 unsignedp = TYPE_UNSIGNED (domain);
5218 index = build_decl (VAR_DECL, NULL_TREE, domain);
5221 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5223 SET_DECL_RTL (index, index_r);
5224 store_expr (lo_index, index_r, 0);
5226 /* Build the head of the loop. */
5227 do_pending_stack_adjust ();
5228 emit_label (loop_start);
5230 /* Assign value to element index. */
5232 fold_convert (ssizetype,
5233 fold_build2 (MINUS_EXPR,
5236 TYPE_MIN_VALUE (domain)));
5239 size_binop (MULT_EXPR, position,
5240 fold_convert (ssizetype,
5241 TYPE_SIZE_UNIT (elttype)));
5243 pos_rtx = expand_normal (position);
5244 xtarget = offset_address (target, pos_rtx,
5245 highest_pow2_factor (position));
5246 xtarget = adjust_address (xtarget, mode, 0);
5247 if (TREE_CODE (value) == CONSTRUCTOR)
5248 store_constructor (value, xtarget, cleared,
5249 bitsize / BITS_PER_UNIT);
5251 store_expr (value, xtarget, 0);
5253 /* Generate a conditional jump to exit the loop. */
5254 exit_cond = build2 (LT_EXPR, integer_type_node,
5256 jumpif (exit_cond, loop_end);
5258 /* Update the loop counter, and jump to the head of
5260 expand_assignment (index,
5261 build2 (PLUS_EXPR, TREE_TYPE (index),
5262 index, integer_one_node));
5264 emit_jump (loop_start);
5266 /* Build the end of the loop. */
5267 emit_label (loop_end);
5270 else if ((index != 0 && ! host_integerp (index, 0))
5271 || ! host_integerp (TYPE_SIZE (elttype), 1))
5276 index = ssize_int (1);
5279 index = fold_convert (ssizetype,
5280 fold_build2 (MINUS_EXPR,
5283 TYPE_MIN_VALUE (domain)));
5286 size_binop (MULT_EXPR, index,
5287 fold_convert (ssizetype,
5288 TYPE_SIZE_UNIT (elttype)));
5289 xtarget = offset_address (target,
5290 expand_normal (position),
5291 highest_pow2_factor (position));
5292 xtarget = adjust_address (xtarget, mode, 0);
5293 store_expr (value, xtarget, 0);
5298 bitpos = ((tree_low_cst (index, 0) - minelt)
5299 * tree_low_cst (TYPE_SIZE (elttype), 1));
5301 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5303 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5304 && TREE_CODE (type) == ARRAY_TYPE
5305 && TYPE_NONALIASED_COMPONENT (type))
5307 target = copy_rtx (target);
5308 MEM_KEEP_ALIAS_SET_P (target) = 1;
5310 store_constructor_field (target, bitsize, bitpos, mode, value,
5311 type, cleared, get_alias_set (elttype));
5319 unsigned HOST_WIDE_INT idx;
5320 constructor_elt *ce;
5324 tree elttype = TREE_TYPE (type);
5325 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5326 enum machine_mode eltmode = TYPE_MODE (elttype);
5327 HOST_WIDE_INT bitsize;
5328 HOST_WIDE_INT bitpos;
5329 rtvec vector = NULL;
5332 gcc_assert (eltmode != BLKmode);
5334 n_elts = TYPE_VECTOR_SUBPARTS (type);
5335 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5337 enum machine_mode mode = GET_MODE (target);
5339 icode = (int) vec_init_optab->handlers[mode].insn_code;
5340 if (icode != CODE_FOR_nothing)
5344 vector = rtvec_alloc (n_elts);
5345 for (i = 0; i < n_elts; i++)
5346 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5350 /* If the constructor has fewer elements than the vector,
5351 clear the whole array first. Similarly if this is static
5352 constructor of a non-BLKmode object. */
5355 else if (REG_P (target) && TREE_STATIC (exp))
5359 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5362 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5364 int n_elts_here = tree_low_cst
5365 (int_const_binop (TRUNC_DIV_EXPR,
5366 TYPE_SIZE (TREE_TYPE (value)),
5367 TYPE_SIZE (elttype), 0), 1);
5369 count += n_elts_here;
5370 if (mostly_zeros_p (value))
5371 zero_count += n_elts_here;
5374 /* Clear the entire vector first if there are any missing elements,
5375 or if the incidence of zero elements is >= 75%. */
5376 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5379 if (need_to_clear && size > 0 && !vector)
5382 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5384 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5388 /* Inform later passes that the old value is dead. */
5389 if (!cleared && !vector && REG_P (target))
5390 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5392 /* Store each element of the constructor into the corresponding
5393 element of TARGET, determined by counting the elements. */
5394 for (idx = 0, i = 0;
5395 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5396 idx++, i += bitsize / elt_size)
5398 HOST_WIDE_INT eltpos;
5399 tree value = ce->value;
5401 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5402 if (cleared && initializer_zerop (value))
5406 eltpos = tree_low_cst (ce->index, 1);
5412 /* Vector CONSTRUCTORs should only be built from smaller
5413 vectors in the case of BLKmode vectors. */
5414 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5415 RTVEC_ELT (vector, eltpos)
5416 = expand_normal (value);
5420 enum machine_mode value_mode =
5421 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5422 ? TYPE_MODE (TREE_TYPE (value))
5424 bitpos = eltpos * elt_size;
5425 store_constructor_field (target, bitsize, bitpos,
5426 value_mode, value, type,
5427 cleared, get_alias_set (elttype));
5432 emit_insn (GEN_FCN (icode)
5434 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5443 /* Store the value of EXP (an expression tree)
5444 into a subfield of TARGET which has mode MODE and occupies
5445 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5446 If MODE is VOIDmode, it means that we are storing into a bit-field.
5448 Always return const0_rtx unless we have something particular to
5451 TYPE is the type of the underlying object,
5453 ALIAS_SET is the alias set for the destination. This value will
5454 (in general) be different from that for TARGET, since TARGET is a
5455 reference to the containing structure. */
5458 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5459 enum machine_mode mode, tree exp, tree type, int alias_set)
5461 HOST_WIDE_INT width_mask = 0;
5463 if (TREE_CODE (exp) == ERROR_MARK)
5466 /* If we have nothing to store, do nothing unless the expression has
5469 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5470 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5471 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5473 /* If we are storing into an unaligned field of an aligned union that is
5474 in a register, we may have the mode of TARGET being an integer mode but
5475 MODE == BLKmode. In that case, get an aligned object whose size and
5476 alignment are the same as TARGET and store TARGET into it (we can avoid
5477 the store if the field being stored is the entire width of TARGET). Then
5478 call ourselves recursively to store the field into a BLKmode version of
5479 that object. Finally, load from the object into TARGET. This is not
5480 very efficient in general, but should only be slightly more expensive
5481 than the otherwise-required unaligned accesses. Perhaps this can be
5482 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5483 twice, once with emit_move_insn and once via store_field. */
5486 && (REG_P (target) || GET_CODE (target) == SUBREG))
5488 rtx object = assign_temp (type, 0, 1, 1);
5489 rtx blk_object = adjust_address (object, BLKmode, 0);
5491 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5492 emit_move_insn (object, target);
5494 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5496 emit_move_insn (target, object);
5498 /* We want to return the BLKmode version of the data. */
5502 if (GET_CODE (target) == CONCAT)
5504 /* We're storing into a struct containing a single __complex. */
5506 gcc_assert (!bitpos);
5507 return store_expr (exp, target, 0);
5510 /* If the structure is in a register or if the component
5511 is a bit field, we cannot use addressing to access it.
5512 Use bit-field techniques or SUBREG to store in it. */
5514 if (mode == VOIDmode
5515 || (mode != BLKmode && ! direct_store[(int) mode]
5516 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5517 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5519 || GET_CODE (target) == SUBREG
5520 /* If the field isn't aligned enough to store as an ordinary memref,
5521 store it as a bit field. */
5523 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5524 || bitpos % GET_MODE_ALIGNMENT (mode))
5525 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5526 || (bitpos % BITS_PER_UNIT != 0)))
5527 /* If the RHS and field are a constant size and the size of the
5528 RHS isn't the same size as the bitfield, we must use bitfield
5531 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5532 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5536 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5537 implies a mask operation. If the precision is the same size as
5538 the field we're storing into, that mask is redundant. This is
5539 particularly common with bit field assignments generated by the
5541 if (TREE_CODE (exp) == NOP_EXPR)
5543 tree type = TREE_TYPE (exp);
5544 if (INTEGRAL_TYPE_P (type)
5545 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5546 && bitsize == TYPE_PRECISION (type))
5548 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5549 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5550 exp = TREE_OPERAND (exp, 0);
5554 temp = expand_normal (exp);
5556 /* If BITSIZE is narrower than the size of the type of EXP
5557 we will be narrowing TEMP. Normally, what's wanted are the
5558 low-order bits. However, if EXP's type is a record and this is
5559 big-endian machine, we want the upper BITSIZE bits. */
5560 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5561 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5562 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5563 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5564 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5568 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5570 if (mode != VOIDmode && mode != BLKmode
5571 && mode != TYPE_MODE (TREE_TYPE (exp)))
5572 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5574 /* If the modes of TARGET and TEMP are both BLKmode, both
5575 must be in memory and BITPOS must be aligned on a byte
5576 boundary. If so, we simply do a block copy. */
5577 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5579 gcc_assert (MEM_P (target) && MEM_P (temp)
5580 && !(bitpos % BITS_PER_UNIT));
5582 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5583 emit_block_move (target, temp,
5584 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5591 /* Store the value in the bitfield. */
5592 store_bit_field (target, bitsize, bitpos, mode, temp);
5598 /* Now build a reference to just the desired component. */
5599 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5601 if (to_rtx == target)
5602 to_rtx = copy_rtx (to_rtx);
5604 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5605 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5606 set_mem_alias_set (to_rtx, alias_set);
5608 return store_expr (exp, to_rtx, 0);
5612 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5613 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5614 codes and find the ultimate containing object, which we return.
5616 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5617 bit position, and *PUNSIGNEDP to the signedness of the field.
5618 If the position of the field is variable, we store a tree
5619 giving the variable offset (in units) in *POFFSET.
5620 This offset is in addition to the bit position.
5621 If the position is not variable, we store 0 in *POFFSET.
5623 If any of the extraction expressions is volatile,
5624 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5626 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5627 is a mode that can be used to access the field. In that case, *PBITSIZE
5630 If the field describes a variable-sized object, *PMODE is set to
5631 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5632 this case, but the address of the object can be found.
5634 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5635 look through nodes that serve as markers of a greater alignment than
5636 the one that can be deduced from the expression. These nodes make it
5637 possible for front-ends to prevent temporaries from being created by
5638 the middle-end on alignment considerations. For that purpose, the
5639 normal operating mode at high-level is to always pass FALSE so that
5640 the ultimate containing object is really returned; moreover, the
5641 associated predicate handled_component_p will always return TRUE
5642 on these nodes, thus indicating that they are essentially handled
5643 by get_inner_reference. TRUE should only be passed when the caller
5644 is scanning the expression in order to build another representation
5645 and specifically knows how to handle these nodes; as such, this is
5646 the normal operating mode in the RTL expanders. */
5649 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5650 HOST_WIDE_INT *pbitpos, tree *poffset,
5651 enum machine_mode *pmode, int *punsignedp,
5652 int *pvolatilep, bool keep_aligning)
5655 enum machine_mode mode = VOIDmode;
5656 tree offset = size_zero_node;
5657 tree bit_offset = bitsize_zero_node;
5660 /* First get the mode, signedness, and size. We do this from just the
5661 outermost expression. */
5662 if (TREE_CODE (exp) == COMPONENT_REF)
5664 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5665 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5666 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5668 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5670 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5672 size_tree = TREE_OPERAND (exp, 1);
5673 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5677 mode = TYPE_MODE (TREE_TYPE (exp));
5678 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5680 if (mode == BLKmode)
5681 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5683 *pbitsize = GET_MODE_BITSIZE (mode);
5688 if (! host_integerp (size_tree, 1))
5689 mode = BLKmode, *pbitsize = -1;
5691 *pbitsize = tree_low_cst (size_tree, 1);
5694 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5695 and find the ultimate containing object. */
5698 switch (TREE_CODE (exp))
5701 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5702 TREE_OPERAND (exp, 2));
5707 tree field = TREE_OPERAND (exp, 1);
5708 tree this_offset = component_ref_field_offset (exp);
5710 /* If this field hasn't been filled in yet, don't go past it.
5711 This should only happen when folding expressions made during
5712 type construction. */
5713 if (this_offset == 0)
5716 offset = size_binop (PLUS_EXPR, offset, this_offset);
5717 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5718 DECL_FIELD_BIT_OFFSET (field));
5720 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5725 case ARRAY_RANGE_REF:
5727 tree index = TREE_OPERAND (exp, 1);
5728 tree low_bound = array_ref_low_bound (exp);
5729 tree unit_size = array_ref_element_size (exp);
5731 /* We assume all arrays have sizes that are a multiple of a byte.
5732 First subtract the lower bound, if any, in the type of the
5733 index, then convert to sizetype and multiply by the size of
5734 the array element. */
5735 if (! integer_zerop (low_bound))
5736 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
5739 offset = size_binop (PLUS_EXPR, offset,
5740 size_binop (MULT_EXPR,
5741 fold_convert (sizetype, index),
5750 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5751 bitsize_int (*pbitsize));
5754 case VIEW_CONVERT_EXPR:
5755 if (keep_aligning && STRICT_ALIGNMENT
5756 && (TYPE_ALIGN (TREE_TYPE (exp))
5757 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5758 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5759 < BIGGEST_ALIGNMENT)
5760 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5761 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5769 /* If any reference in the chain is volatile, the effect is volatile. */
5770 if (TREE_THIS_VOLATILE (exp))
5773 exp = TREE_OPERAND (exp, 0);
5777 /* If OFFSET is constant, see if we can return the whole thing as a
5778 constant bit position. Otherwise, split it up. */
5779 if (host_integerp (offset, 0)
5780 && 0 != (tem = size_binop (MULT_EXPR,
5781 fold_convert (bitsizetype, offset),
5783 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5784 && host_integerp (tem, 0))
5785 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5787 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5793 /* Return a tree of sizetype representing the size, in bytes, of the element
5794 of EXP, an ARRAY_REF. */
5797 array_ref_element_size (tree exp)
5799 tree aligned_size = TREE_OPERAND (exp, 3);
5800 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5802 /* If a size was specified in the ARRAY_REF, it's the size measured
5803 in alignment units of the element type. So multiply by that value. */
5806 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5807 sizetype from another type of the same width and signedness. */
5808 if (TREE_TYPE (aligned_size) != sizetype)
5809 aligned_size = fold_convert (sizetype, aligned_size);
5810 return size_binop (MULT_EXPR, aligned_size,
5811 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5814 /* Otherwise, take the size from that of the element type. Substitute
5815 any PLACEHOLDER_EXPR that we have. */
5817 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5820 /* Return a tree representing the lower bound of the array mentioned in
5821 EXP, an ARRAY_REF. */
5824 array_ref_low_bound (tree exp)
5826 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5828 /* If a lower bound is specified in EXP, use it. */
5829 if (TREE_OPERAND (exp, 2))
5830 return TREE_OPERAND (exp, 2);
5832 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5833 substituting for a PLACEHOLDER_EXPR as needed. */
5834 if (domain_type && TYPE_MIN_VALUE (domain_type))
5835 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5837 /* Otherwise, return a zero of the appropriate type. */
5838 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5841 /* Return a tree representing the upper bound of the array mentioned in
5842 EXP, an ARRAY_REF. */
5845 array_ref_up_bound (tree exp)
5847 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5849 /* If there is a domain type and it has an upper bound, use it, substituting
5850 for a PLACEHOLDER_EXPR as needed. */
5851 if (domain_type && TYPE_MAX_VALUE (domain_type))
5852 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5854 /* Otherwise fail. */
5858 /* Return a tree representing the offset, in bytes, of the field referenced
5859 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5862 component_ref_field_offset (tree exp)
5864 tree aligned_offset = TREE_OPERAND (exp, 2);
5865 tree field = TREE_OPERAND (exp, 1);
5867 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5868 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5872 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5873 sizetype from another type of the same width and signedness. */
5874 if (TREE_TYPE (aligned_offset) != sizetype)
5875 aligned_offset = fold_convert (sizetype, aligned_offset);
5876 return size_binop (MULT_EXPR, aligned_offset,
5877 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5880 /* Otherwise, take the offset from that of the field. Substitute
5881 any PLACEHOLDER_EXPR that we have. */
5883 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5886 /* Return 1 if T is an expression that get_inner_reference handles. */
5889 handled_component_p (tree t)
5891 switch (TREE_CODE (t))
5896 case ARRAY_RANGE_REF:
5897 case VIEW_CONVERT_EXPR:
5907 /* Given an rtx VALUE that may contain additions and multiplications, return
5908 an equivalent value that just refers to a register, memory, or constant.
5909 This is done by generating instructions to perform the arithmetic and
5910 returning a pseudo-register containing the value.
5912 The returned value may be a REG, SUBREG, MEM or constant. */
5915 force_operand (rtx value, rtx target)
5918 /* Use subtarget as the target for operand 0 of a binary operation. */
5919 rtx subtarget = get_subtarget (target);
5920 enum rtx_code code = GET_CODE (value);
5922 /* Check for subreg applied to an expression produced by loop optimizer. */
5924 && !REG_P (SUBREG_REG (value))
5925 && !MEM_P (SUBREG_REG (value)))
5927 value = simplify_gen_subreg (GET_MODE (value),
5928 force_reg (GET_MODE (SUBREG_REG (value)),
5929 force_operand (SUBREG_REG (value),
5931 GET_MODE (SUBREG_REG (value)),
5932 SUBREG_BYTE (value));
5933 code = GET_CODE (value);
5936 /* Check for a PIC address load. */
5937 if ((code == PLUS || code == MINUS)
5938 && XEXP (value, 0) == pic_offset_table_rtx
5939 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5940 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5941 || GET_CODE (XEXP (value, 1)) == CONST))
5944 subtarget = gen_reg_rtx (GET_MODE (value));
5945 emit_move_insn (subtarget, value);
5949 if (ARITHMETIC_P (value))
5951 op2 = XEXP (value, 1);
5952 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5954 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5957 op2 = negate_rtx (GET_MODE (value), op2);
5960 /* Check for an addition with OP2 a constant integer and our first
5961 operand a PLUS of a virtual register and something else. In that
5962 case, we want to emit the sum of the virtual register and the
5963 constant first and then add the other value. This allows virtual
5964 register instantiation to simply modify the constant rather than
5965 creating another one around this addition. */
5966 if (code == PLUS && GET_CODE (op2) == CONST_INT
5967 && GET_CODE (XEXP (value, 0)) == PLUS
5968 && REG_P (XEXP (XEXP (value, 0), 0))
5969 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5970 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5972 rtx temp = expand_simple_binop (GET_MODE (value), code,
5973 XEXP (XEXP (value, 0), 0), op2,
5974 subtarget, 0, OPTAB_LIB_WIDEN);
5975 return expand_simple_binop (GET_MODE (value), code, temp,
5976 force_operand (XEXP (XEXP (value,
5978 target, 0, OPTAB_LIB_WIDEN);
5981 op1 = force_operand (XEXP (value, 0), subtarget);
5982 op2 = force_operand (op2, NULL_RTX);
5986 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5988 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5989 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5990 target, 1, OPTAB_LIB_WIDEN);
5992 return expand_divmod (0,
5993 FLOAT_MODE_P (GET_MODE (value))
5994 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5995 GET_MODE (value), op1, op2, target, 0);
5998 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6002 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6006 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6010 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6011 target, 0, OPTAB_LIB_WIDEN);
6014 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6015 target, 1, OPTAB_LIB_WIDEN);
6018 if (UNARY_P (value))
6021 target = gen_reg_rtx (GET_MODE (value));
6022 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6029 case FLOAT_TRUNCATE:
6030 convert_move (target, op1, code == ZERO_EXTEND);
6035 expand_fix (target, op1, code == UNSIGNED_FIX);
6039 case UNSIGNED_FLOAT:
6040 expand_float (target, op1, code == UNSIGNED_FLOAT);
6044 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6048 #ifdef INSN_SCHEDULING
6049 /* On machines that have insn scheduling, we want all memory reference to be
6050 explicit, so we need to deal with such paradoxical SUBREGs. */
6051 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6052 && (GET_MODE_SIZE (GET_MODE (value))
6053 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6055 = simplify_gen_subreg (GET_MODE (value),
6056 force_reg (GET_MODE (SUBREG_REG (value)),
6057 force_operand (SUBREG_REG (value),
6059 GET_MODE (SUBREG_REG (value)),
6060 SUBREG_BYTE (value));
6066 /* Subroutine of expand_expr: return nonzero iff there is no way that
6067 EXP can reference X, which is being modified. TOP_P is nonzero if this
6068 call is going to be used to determine whether we need a temporary
6069 for EXP, as opposed to a recursive call to this function.
6071 It is always safe for this routine to return zero since it merely
6072 searches for optimization opportunities. */
6075 safe_from_p (rtx x, tree exp, int top_p)
6081 /* If EXP has varying size, we MUST use a target since we currently
6082 have no way of allocating temporaries of variable size
6083 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6084 So we assume here that something at a higher level has prevented a
6085 clash. This is somewhat bogus, but the best we can do. Only
6086 do this when X is BLKmode and when we are at the top level. */
6087 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6088 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6089 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6090 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6091 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6093 && GET_MODE (x) == BLKmode)
6094 /* If X is in the outgoing argument area, it is always safe. */
6096 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6097 || (GET_CODE (XEXP (x, 0)) == PLUS
6098 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6101 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6102 find the underlying pseudo. */
6103 if (GET_CODE (x) == SUBREG)
6106 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6110 /* Now look at our tree code and possibly recurse. */
6111 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6113 case tcc_declaration:
6114 exp_rtl = DECL_RTL_IF_SET (exp);
6120 case tcc_exceptional:
6121 if (TREE_CODE (exp) == TREE_LIST)
6125 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6127 exp = TREE_CHAIN (exp);
6130 if (TREE_CODE (exp) != TREE_LIST)
6131 return safe_from_p (x, exp, 0);
6134 else if (TREE_CODE (exp) == CONSTRUCTOR)
6136 constructor_elt *ce;
6137 unsigned HOST_WIDE_INT idx;
6140 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6142 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6143 || !safe_from_p (x, ce->value, 0))
6147 else if (TREE_CODE (exp) == ERROR_MARK)
6148 return 1; /* An already-visited SAVE_EXPR? */
6153 /* The only case we look at here is the DECL_INITIAL inside a
6155 return (TREE_CODE (exp) != DECL_EXPR
6156 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6157 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6158 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6161 case tcc_comparison:
6162 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6167 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6169 case tcc_expression:
6171 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6172 the expression. If it is set, we conflict iff we are that rtx or
6173 both are in memory. Otherwise, we check all operands of the
6174 expression recursively. */
6176 switch (TREE_CODE (exp))
6179 /* If the operand is static or we are static, we can't conflict.
6180 Likewise if we don't conflict with the operand at all. */
6181 if (staticp (TREE_OPERAND (exp, 0))
6182 || TREE_STATIC (exp)
6183 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6186 /* Otherwise, the only way this can conflict is if we are taking
6187 the address of a DECL a that address if part of X, which is
6189 exp = TREE_OPERAND (exp, 0);
6192 if (!DECL_RTL_SET_P (exp)
6193 || !MEM_P (DECL_RTL (exp)))
6196 exp_rtl = XEXP (DECL_RTL (exp), 0);
6200 case MISALIGNED_INDIRECT_REF:
6201 case ALIGN_INDIRECT_REF:
6204 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6205 get_alias_set (exp)))
6210 /* Assume that the call will clobber all hard registers and
6212 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6217 case WITH_CLEANUP_EXPR:
6218 case CLEANUP_POINT_EXPR:
6219 /* Lowered by gimplify.c. */
6223 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6229 /* If we have an rtx, we do not need to scan our operands. */
6233 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
6234 for (i = 0; i < nops; i++)
6235 if (TREE_OPERAND (exp, i) != 0
6236 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6239 /* If this is a language-specific tree code, it may require
6240 special handling. */
6241 if ((unsigned int) TREE_CODE (exp)
6242 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6243 && !lang_hooks.safe_from_p (x, exp))
6248 /* Should never get a type here. */
6252 /* If we have an rtl, find any enclosed object. Then see if we conflict
6256 if (GET_CODE (exp_rtl) == SUBREG)
6258 exp_rtl = SUBREG_REG (exp_rtl);
6260 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6264 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6265 are memory and they conflict. */
6266 return ! (rtx_equal_p (x, exp_rtl)
6267 || (MEM_P (x) && MEM_P (exp_rtl)
6268 && true_dependence (exp_rtl, VOIDmode, x,
6269 rtx_addr_varies_p)));
6272 /* If we reach here, it is safe. */
6277 /* Return the highest power of two that EXP is known to be a multiple of.
6278 This is used in updating alignment of MEMs in array references. */
6280 unsigned HOST_WIDE_INT
6281 highest_pow2_factor (tree exp)
6283 unsigned HOST_WIDE_INT c0, c1;
6285 switch (TREE_CODE (exp))
6288 /* We can find the lowest bit that's a one. If the low
6289 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6290 We need to handle this case since we can find it in a COND_EXPR,
6291 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6292 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6294 if (TREE_CONSTANT_OVERFLOW (exp))
6295 return BIGGEST_ALIGNMENT;
6298 /* Note: tree_low_cst is intentionally not used here,
6299 we don't care about the upper bits. */
6300 c0 = TREE_INT_CST_LOW (exp);
6302 return c0 ? c0 : BIGGEST_ALIGNMENT;
6306 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6307 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6308 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6309 return MIN (c0, c1);
6312 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6313 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6316 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6318 if (integer_pow2p (TREE_OPERAND (exp, 1))
6319 && host_integerp (TREE_OPERAND (exp, 1), 1))
6321 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6322 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6323 return MAX (1, c0 / c1);
6327 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6329 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6332 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6335 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6336 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6337 return MIN (c0, c1);
6346 /* Similar, except that the alignment requirements of TARGET are
6347 taken into account. Assume it is at least as aligned as its
6348 type, unless it is a COMPONENT_REF in which case the layout of
6349 the structure gives the alignment. */
6351 static unsigned HOST_WIDE_INT
6352 highest_pow2_factor_for_target (tree target, tree exp)
6354 unsigned HOST_WIDE_INT target_align, factor;
6356 factor = highest_pow2_factor (exp);
6357 if (TREE_CODE (target) == COMPONENT_REF)
6358 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6360 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6361 return MAX (factor, target_align);
6364 /* Expands variable VAR. */
6367 expand_var (tree var)
6369 if (DECL_EXTERNAL (var))
6372 if (TREE_STATIC (var))
6373 /* If this is an inlined copy of a static local variable,
6374 look up the original decl. */
6375 var = DECL_ORIGIN (var);
6377 if (TREE_STATIC (var)
6378 ? !TREE_ASM_WRITTEN (var)
6379 : !DECL_RTL_SET_P (var))
6381 if (TREE_CODE (var) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (var))
6382 /* Should be ignored. */;
6383 else if (lang_hooks.expand_decl (var))
6385 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6387 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6388 rest_of_decl_compilation (var, 0, 0);
6390 /* No expansion needed. */
6391 gcc_assert (TREE_CODE (var) == TYPE_DECL
6392 || TREE_CODE (var) == CONST_DECL
6393 || TREE_CODE (var) == FUNCTION_DECL
6394 || TREE_CODE (var) == LABEL_DECL);
6398 /* Subroutine of expand_expr. Expand the two operands of a binary
6399 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6400 The value may be stored in TARGET if TARGET is nonzero. The
6401 MODIFIER argument is as documented by expand_expr. */
6404 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6405 enum expand_modifier modifier)
6407 if (! safe_from_p (target, exp1, 1))
6409 if (operand_equal_p (exp0, exp1, 0))
6411 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6412 *op1 = copy_rtx (*op0);
6416 /* If we need to preserve evaluation order, copy exp0 into its own
6417 temporary variable so that it can't be clobbered by exp1. */
6418 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6419 exp0 = save_expr (exp0);
6420 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6421 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6426 /* Return a MEM that contains constant EXP. DEFER is as for
6427 output_constant_def and MODIFIER is as for expand_expr. */
6430 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6434 mem = output_constant_def (exp, defer);
6435 if (modifier != EXPAND_INITIALIZER)
6436 mem = use_anchored_address (mem);
6440 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6441 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6444 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6445 enum expand_modifier modifier)
6447 rtx result, subtarget;
6449 HOST_WIDE_INT bitsize, bitpos;
6450 int volatilep, unsignedp;
6451 enum machine_mode mode1;
6453 /* If we are taking the address of a constant and are at the top level,
6454 we have to use output_constant_def since we can't call force_const_mem
6456 /* ??? This should be considered a front-end bug. We should not be
6457 generating ADDR_EXPR of something that isn't an LVALUE. The only
6458 exception here is STRING_CST. */
6459 if (TREE_CODE (exp) == CONSTRUCTOR
6460 || CONSTANT_CLASS_P (exp))
6461 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6463 /* Everything must be something allowed by is_gimple_addressable. */
6464 switch (TREE_CODE (exp))
6467 /* This case will happen via recursion for &a->b. */
6468 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6471 /* Recurse and make the output_constant_def clause above handle this. */
6472 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6476 /* The real part of the complex number is always first, therefore
6477 the address is the same as the address of the parent object. */
6480 inner = TREE_OPERAND (exp, 0);
6484 /* The imaginary part of the complex number is always second.
6485 The expression is therefore always offset by the size of the
6488 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6489 inner = TREE_OPERAND (exp, 0);
6493 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6494 expand_expr, as that can have various side effects; LABEL_DECLs for
6495 example, may not have their DECL_RTL set yet. Assume language
6496 specific tree nodes can be expanded in some interesting way. */
6498 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6500 result = expand_expr (exp, target, tmode,
6501 modifier == EXPAND_INITIALIZER
6502 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6504 /* If the DECL isn't in memory, then the DECL wasn't properly
6505 marked TREE_ADDRESSABLE, which will be either a front-end
6506 or a tree optimizer bug. */
6507 gcc_assert (MEM_P (result));
6508 result = XEXP (result, 0);
6510 /* ??? Is this needed anymore? */
6511 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6513 assemble_external (exp);
6514 TREE_USED (exp) = 1;
6517 if (modifier != EXPAND_INITIALIZER
6518 && modifier != EXPAND_CONST_ADDRESS)
6519 result = force_operand (result, target);
6523 /* Pass FALSE as the last argument to get_inner_reference although
6524 we are expanding to RTL. The rationale is that we know how to
6525 handle "aligning nodes" here: we can just bypass them because
6526 they won't change the final object whose address will be returned
6527 (they actually exist only for that purpose). */
6528 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6529 &mode1, &unsignedp, &volatilep, false);
6533 /* We must have made progress. */
6534 gcc_assert (inner != exp);
6536 subtarget = offset || bitpos ? NULL_RTX : target;
6537 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6543 if (modifier != EXPAND_NORMAL)
6544 result = force_operand (result, NULL);
6545 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6547 result = convert_memory_address (tmode, result);
6548 tmp = convert_memory_address (tmode, tmp);
6550 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6551 result = gen_rtx_PLUS (tmode, result, tmp);
6554 subtarget = bitpos ? NULL_RTX : target;
6555 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6556 1, OPTAB_LIB_WIDEN);
6562 /* Someone beforehand should have rejected taking the address
6563 of such an object. */
6564 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6566 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6567 if (modifier < EXPAND_SUM)
6568 result = force_operand (result, target);
6574 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6575 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6578 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6579 enum expand_modifier modifier)
6581 enum machine_mode rmode;
6584 /* Target mode of VOIDmode says "whatever's natural". */
6585 if (tmode == VOIDmode)
6586 tmode = TYPE_MODE (TREE_TYPE (exp));
6588 /* We can get called with some Weird Things if the user does silliness
6589 like "(short) &a". In that case, convert_memory_address won't do
6590 the right thing, so ignore the given target mode. */
6591 if (tmode != Pmode && tmode != ptr_mode)
6594 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6597 /* Despite expand_expr claims concerning ignoring TMODE when not
6598 strictly convenient, stuff breaks if we don't honor it. Note
6599 that combined with the above, we only do this for pointer modes. */
6600 rmode = GET_MODE (result);
6601 if (rmode == VOIDmode)
6604 result = convert_memory_address (tmode, result);
6610 /* expand_expr: generate code for computing expression EXP.
6611 An rtx for the computed value is returned. The value is never null.
6612 In the case of a void EXP, const0_rtx is returned.
6614 The value may be stored in TARGET if TARGET is nonzero.
6615 TARGET is just a suggestion; callers must assume that
6616 the rtx returned may not be the same as TARGET.
6618 If TARGET is CONST0_RTX, it means that the value will be ignored.
6620 If TMODE is not VOIDmode, it suggests generating the
6621 result in mode TMODE. But this is done only when convenient.
6622 Otherwise, TMODE is ignored and the value generated in its natural mode.
6623 TMODE is just a suggestion; callers must assume that
6624 the rtx returned may not have mode TMODE.
6626 Note that TARGET may have neither TMODE nor MODE. In that case, it
6627 probably will not be used.
6629 If MODIFIER is EXPAND_SUM then when EXP is an addition
6630 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6631 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6632 products as above, or REG or MEM, or constant.
6633 Ordinarily in such cases we would output mul or add instructions
6634 and then return a pseudo reg containing the sum.
6636 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6637 it also marks a label as absolutely required (it can't be dead).
6638 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6639 This is used for outputting expressions used in initializers.
6641 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6642 with a constant address even if that address is not normally legitimate.
6643 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6645 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6646 a call parameter. Such targets require special care as we haven't yet
6647 marked TARGET so that it's safe from being trashed by libcalls. We
6648 don't want to use TARGET for anything but the final result;
6649 Intermediate values must go elsewhere. Additionally, calls to
6650 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6652 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6653 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6654 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6655 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6658 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6659 enum expand_modifier, rtx *);
6662 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6663 enum expand_modifier modifier, rtx *alt_rtl)
6666 rtx ret, last = NULL;
6668 /* Handle ERROR_MARK before anybody tries to access its type. */
6669 if (TREE_CODE (exp) == ERROR_MARK
6670 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6672 ret = CONST0_RTX (tmode);
6673 return ret ? ret : const0_rtx;
6676 if (flag_non_call_exceptions)
6678 rn = lookup_stmt_eh_region (exp);
6679 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6681 last = get_last_insn ();
6684 /* If this is an expression of some kind and it has an associated line
6685 number, then emit the line number before expanding the expression.
6687 We need to save and restore the file and line information so that
6688 errors discovered during expansion are emitted with the right
6689 information. It would be better of the diagnostic routines
6690 used the file/line information embedded in the tree nodes rather
6692 if (cfun && cfun->ib_boundaries_block && EXPR_HAS_LOCATION (exp))
6694 location_t saved_location = input_location;
6695 input_location = EXPR_LOCATION (exp);
6696 emit_line_note (input_location);
6698 /* Record where the insns produced belong. */
6699 record_block_change (TREE_BLOCK (exp));
6701 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6703 input_location = saved_location;
6707 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6710 /* If using non-call exceptions, mark all insns that may trap.
6711 expand_call() will mark CALL_INSNs before we get to this code,
6712 but it doesn't handle libcalls, and these may trap. */
6716 for (insn = next_real_insn (last); insn;
6717 insn = next_real_insn (insn))
6719 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6720 /* If we want exceptions for non-call insns, any
6721 may_trap_p instruction may throw. */
6722 && GET_CODE (PATTERN (insn)) != CLOBBER
6723 && GET_CODE (PATTERN (insn)) != USE
6724 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6726 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6736 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6737 enum expand_modifier modifier, rtx *alt_rtl)
6739 rtx op0, op1, temp, decl_rtl;
6740 tree type = TREE_TYPE (exp);
6742 enum machine_mode mode;
6743 enum tree_code code = TREE_CODE (exp);
6745 rtx subtarget, original_target;
6747 tree context, subexp0, subexp1;
6748 bool reduce_bit_field = false;
6749 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6750 ? reduce_to_bit_field_precision ((expr), \
6755 mode = TYPE_MODE (type);
6756 unsignedp = TYPE_UNSIGNED (type);
6757 if (lang_hooks.reduce_bit_field_operations
6758 && TREE_CODE (type) == INTEGER_TYPE
6759 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6761 /* An operation in what may be a bit-field type needs the
6762 result to be reduced to the precision of the bit-field type,
6763 which is narrower than that of the type's mode. */
6764 reduce_bit_field = true;
6765 if (modifier == EXPAND_STACK_PARM)
6769 /* Use subtarget as the target for operand 0 of a binary operation. */
6770 subtarget = get_subtarget (target);
6771 original_target = target;
6772 ignore = (target == const0_rtx
6773 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6774 || code == CONVERT_EXPR || code == COND_EXPR
6775 || code == VIEW_CONVERT_EXPR)
6776 && TREE_CODE (type) == VOID_TYPE));
6778 /* If we are going to ignore this result, we need only do something
6779 if there is a side-effect somewhere in the expression. If there
6780 is, short-circuit the most common cases here. Note that we must
6781 not call expand_expr with anything but const0_rtx in case this
6782 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6786 if (! TREE_SIDE_EFFECTS (exp))
6789 /* Ensure we reference a volatile object even if value is ignored, but
6790 don't do this if all we are doing is taking its address. */
6791 if (TREE_THIS_VOLATILE (exp)
6792 && TREE_CODE (exp) != FUNCTION_DECL
6793 && mode != VOIDmode && mode != BLKmode
6794 && modifier != EXPAND_CONST_ADDRESS)
6796 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6798 temp = copy_to_reg (temp);
6802 if (TREE_CODE_CLASS (code) == tcc_unary
6803 || code == COMPONENT_REF || code == INDIRECT_REF)
6804 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6807 else if (TREE_CODE_CLASS (code) == tcc_binary
6808 || TREE_CODE_CLASS (code) == tcc_comparison
6809 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6811 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6812 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6815 else if (code == BIT_FIELD_REF)
6817 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6818 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6819 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6831 tree function = decl_function_context (exp);
6833 temp = label_rtx (exp);
6834 temp = gen_rtx_LABEL_REF (Pmode, temp);
6836 if (function != current_function_decl
6838 LABEL_REF_NONLOCAL_P (temp) = 1;
6840 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6845 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6850 /* If a static var's type was incomplete when the decl was written,
6851 but the type is complete now, lay out the decl now. */
6852 if (DECL_SIZE (exp) == 0
6853 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6854 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6855 layout_decl (exp, 0);
6857 /* ... fall through ... */
6861 decl_rtl = DECL_RTL (exp);
6862 gcc_assert (decl_rtl);
6864 /* Ensure variable marked as used even if it doesn't go through
6865 a parser. If it hasn't be used yet, write out an external
6867 if (! TREE_USED (exp))
6869 assemble_external (exp);
6870 TREE_USED (exp) = 1;
6873 /* Show we haven't gotten RTL for this yet. */
6876 /* Variables inherited from containing functions should have
6877 been lowered by this point. */
6878 context = decl_function_context (exp);
6879 gcc_assert (!context
6880 || context == current_function_decl
6881 || TREE_STATIC (exp)
6882 /* ??? C++ creates functions that are not TREE_STATIC. */
6883 || TREE_CODE (exp) == FUNCTION_DECL);
6885 /* This is the case of an array whose size is to be determined
6886 from its initializer, while the initializer is still being parsed.
6889 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
6890 temp = validize_mem (decl_rtl);
6892 /* If DECL_RTL is memory, we are in the normal case and either
6893 the address is not valid or it is not a register and -fforce-addr
6894 is specified, get the address into a register. */
6896 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
6899 *alt_rtl = decl_rtl;
6900 decl_rtl = use_anchored_address (decl_rtl);
6901 if (modifier != EXPAND_CONST_ADDRESS
6902 && modifier != EXPAND_SUM
6903 && (!memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0))
6904 || (flag_force_addr && !REG_P (XEXP (decl_rtl, 0)))))
6905 temp = replace_equiv_address (decl_rtl,
6906 copy_rtx (XEXP (decl_rtl, 0)));
6909 /* If we got something, return it. But first, set the alignment
6910 if the address is a register. */
6913 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6914 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6919 /* If the mode of DECL_RTL does not match that of the decl, it
6920 must be a promoted value. We return a SUBREG of the wanted mode,
6921 but mark it so that we know that it was already extended. */
6923 if (REG_P (decl_rtl)
6924 && GET_MODE (decl_rtl) != DECL_MODE (exp))
6926 enum machine_mode pmode;
6928 /* Get the signedness used for this variable. Ensure we get the
6929 same mode we got when the variable was declared. */
6930 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6931 (TREE_CODE (exp) == RESULT_DECL
6932 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
6933 gcc_assert (GET_MODE (decl_rtl) == pmode);
6935 temp = gen_lowpart_SUBREG (mode, decl_rtl);
6936 SUBREG_PROMOTED_VAR_P (temp) = 1;
6937 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6944 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6945 TREE_INT_CST_HIGH (exp), mode);
6947 /* ??? If overflow is set, fold will have done an incomplete job,
6948 which can result in (plus xx (const_int 0)), which can get
6949 simplified by validate_replace_rtx during virtual register
6950 instantiation, which can result in unrecognizable insns.
6951 Avoid this by forcing all overflows into registers. */
6952 if (TREE_CONSTANT_OVERFLOW (exp)
6953 && modifier != EXPAND_INITIALIZER)
6954 temp = force_reg (mode, temp);
6960 tree tmp = NULL_TREE;
6961 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
6962 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT)
6963 return const_vector_from_tree (exp);
6964 if (GET_MODE_CLASS (mode) == MODE_INT)
6966 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
6968 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
6971 tmp = build_constructor_from_list (type,
6972 TREE_VECTOR_CST_ELTS (exp));
6973 return expand_expr (tmp, ignore ? const0_rtx : target,
6978 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6981 /* If optimized, generate immediate CONST_DOUBLE
6982 which will be turned into memory by reload if necessary.
6984 We used to force a register so that loop.c could see it. But
6985 this does not allow gen_* patterns to perform optimizations with
6986 the constants. It also produces two insns in cases like "x = 1.0;".
6987 On most machines, floating-point constants are not permitted in
6988 many insns, so we'd end up copying it to a register in any case.
6990 Now, we do the copying in expand_binop, if appropriate. */
6991 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6992 TYPE_MODE (TREE_TYPE (exp)));
6995 /* Handle evaluating a complex constant in a CONCAT target. */
6996 if (original_target && GET_CODE (original_target) == CONCAT)
6998 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7001 rtarg = XEXP (original_target, 0);
7002 itarg = XEXP (original_target, 1);
7004 /* Move the real and imaginary parts separately. */
7005 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
7006 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
7009 emit_move_insn (rtarg, op0);
7011 emit_move_insn (itarg, op1);
7013 return original_target;
7016 /* ... fall through ... */
7019 temp = expand_expr_constant (exp, 1, modifier);
7021 /* temp contains a constant address.
7022 On RISC machines where a constant address isn't valid,
7023 make some insns to get that address into a register. */
7024 if (modifier != EXPAND_CONST_ADDRESS
7025 && modifier != EXPAND_INITIALIZER
7026 && modifier != EXPAND_SUM
7027 && (! memory_address_p (mode, XEXP (temp, 0))
7028 || flag_force_addr))
7029 return replace_equiv_address (temp,
7030 copy_rtx (XEXP (temp, 0)));
7035 tree val = TREE_OPERAND (exp, 0);
7036 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7038 if (!SAVE_EXPR_RESOLVED_P (exp))
7040 /* We can indeed still hit this case, typically via builtin
7041 expanders calling save_expr immediately before expanding
7042 something. Assume this means that we only have to deal
7043 with non-BLKmode values. */
7044 gcc_assert (GET_MODE (ret) != BLKmode);
7046 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7047 DECL_ARTIFICIAL (val) = 1;
7048 DECL_IGNORED_P (val) = 1;
7049 TREE_OPERAND (exp, 0) = val;
7050 SAVE_EXPR_RESOLVED_P (exp) = 1;
7052 if (!CONSTANT_P (ret))
7053 ret = copy_to_reg (ret);
7054 SET_DECL_RTL (val, ret);
7061 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7062 expand_goto (TREE_OPERAND (exp, 0));
7064 expand_computed_goto (TREE_OPERAND (exp, 0));
7068 /* If we don't need the result, just ensure we evaluate any
7072 unsigned HOST_WIDE_INT idx;
7075 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7076 expand_expr (value, const0_rtx, VOIDmode, 0);
7081 /* Try to avoid creating a temporary at all. This is possible
7082 if all of the initializer is zero.
7083 FIXME: try to handle all [0..255] initializers we can handle
7085 else if (TREE_STATIC (exp)
7086 && !TREE_ADDRESSABLE (exp)
7087 && target != 0 && mode == BLKmode
7088 && all_zeros_p (exp))
7090 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7094 /* All elts simple constants => refer to a constant in memory. But
7095 if this is a non-BLKmode mode, let it store a field at a time
7096 since that should make a CONST_INT or CONST_DOUBLE when we
7097 fold. Likewise, if we have a target we can use, it is best to
7098 store directly into the target unless the type is large enough
7099 that memcpy will be used. If we are making an initializer and
7100 all operands are constant, put it in memory as well.
7102 FIXME: Avoid trying to fill vector constructors piece-meal.
7103 Output them with output_constant_def below unless we're sure
7104 they're zeros. This should go away when vector initializers
7105 are treated like VECTOR_CST instead of arrays.
7107 else if ((TREE_STATIC (exp)
7108 && ((mode == BLKmode
7109 && ! (target != 0 && safe_from_p (target, exp, 1)))
7110 || TREE_ADDRESSABLE (exp)
7111 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7112 && (! MOVE_BY_PIECES_P
7113 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7115 && ! mostly_zeros_p (exp))))
7116 || ((modifier == EXPAND_INITIALIZER
7117 || modifier == EXPAND_CONST_ADDRESS)
7118 && TREE_CONSTANT (exp)))
7120 rtx constructor = expand_expr_constant (exp, 1, modifier);
7122 if (modifier != EXPAND_CONST_ADDRESS
7123 && modifier != EXPAND_INITIALIZER
7124 && modifier != EXPAND_SUM)
7125 constructor = validize_mem (constructor);
7131 /* Handle calls that pass values in multiple non-contiguous
7132 locations. The Irix 6 ABI has examples of this. */
7133 if (target == 0 || ! safe_from_p (target, exp, 1)
7134 || GET_CODE (target) == PARALLEL
7135 || modifier == EXPAND_STACK_PARM)
7137 = assign_temp (build_qualified_type (type,
7139 | (TREE_READONLY (exp)
7140 * TYPE_QUAL_CONST))),
7141 0, TREE_ADDRESSABLE (exp), 1);
7143 store_constructor (exp, target, 0, int_expr_size (exp));
7147 case MISALIGNED_INDIRECT_REF:
7148 case ALIGN_INDIRECT_REF:
7151 tree exp1 = TREE_OPERAND (exp, 0);
7153 if (modifier != EXPAND_WRITE)
7157 t = fold_read_from_constant_string (exp);
7159 return expand_expr (t, target, tmode, modifier);
7162 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7163 op0 = memory_address (mode, op0);
7165 if (code == ALIGN_INDIRECT_REF)
7167 int align = TYPE_ALIGN_UNIT (type);
7168 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7169 op0 = memory_address (mode, op0);
7172 temp = gen_rtx_MEM (mode, op0);
7174 set_mem_attributes (temp, exp, 0);
7176 /* Resolve the misalignment now, so that we don't have to remember
7177 to resolve it later. Of course, this only works for reads. */
7178 /* ??? When we get around to supporting writes, we'll have to handle
7179 this in store_expr directly. The vectorizer isn't generating
7180 those yet, however. */
7181 if (code == MISALIGNED_INDIRECT_REF)
7186 gcc_assert (modifier == EXPAND_NORMAL
7187 || modifier == EXPAND_STACK_PARM);
7189 /* The vectorizer should have already checked the mode. */
7190 icode = movmisalign_optab->handlers[mode].insn_code;
7191 gcc_assert (icode != CODE_FOR_nothing);
7193 /* We've already validated the memory, and we're creating a
7194 new pseudo destination. The predicates really can't fail. */
7195 reg = gen_reg_rtx (mode);
7197 /* Nor can the insn generator. */
7198 insn = GEN_FCN (icode) (reg, temp);
7207 case TARGET_MEM_REF:
7209 struct mem_address addr;
7211 get_address_description (exp, &addr);
7212 op0 = addr_for_mem_ref (&addr, true);
7213 op0 = memory_address (mode, op0);
7214 temp = gen_rtx_MEM (mode, op0);
7215 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7222 tree array = TREE_OPERAND (exp, 0);
7223 tree index = TREE_OPERAND (exp, 1);
7225 /* Fold an expression like: "foo"[2].
7226 This is not done in fold so it won't happen inside &.
7227 Don't fold if this is for wide characters since it's too
7228 difficult to do correctly and this is a very rare case. */
7230 if (modifier != EXPAND_CONST_ADDRESS
7231 && modifier != EXPAND_INITIALIZER
7232 && modifier != EXPAND_MEMORY)
7234 tree t = fold_read_from_constant_string (exp);
7237 return expand_expr (t, target, tmode, modifier);
7240 /* If this is a constant index into a constant array,
7241 just get the value from the array. Handle both the cases when
7242 we have an explicit constructor and when our operand is a variable
7243 that was declared const. */
7245 if (modifier != EXPAND_CONST_ADDRESS
7246 && modifier != EXPAND_INITIALIZER
7247 && modifier != EXPAND_MEMORY
7248 && TREE_CODE (array) == CONSTRUCTOR
7249 && ! TREE_SIDE_EFFECTS (array)
7250 && TREE_CODE (index) == INTEGER_CST)
7252 unsigned HOST_WIDE_INT ix;
7255 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7257 if (tree_int_cst_equal (field, index))
7259 if (!TREE_SIDE_EFFECTS (value))
7260 return expand_expr (fold (value), target, tmode, modifier);
7265 else if (optimize >= 1
7266 && modifier != EXPAND_CONST_ADDRESS
7267 && modifier != EXPAND_INITIALIZER
7268 && modifier != EXPAND_MEMORY
7269 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7270 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7271 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7272 && targetm.binds_local_p (array))
7274 if (TREE_CODE (index) == INTEGER_CST)
7276 tree init = DECL_INITIAL (array);
7278 if (TREE_CODE (init) == CONSTRUCTOR)
7280 unsigned HOST_WIDE_INT ix;
7283 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7285 if (tree_int_cst_equal (field, index))
7287 if (!TREE_SIDE_EFFECTS (value))
7288 return expand_expr (fold (value), target, tmode,
7293 else if(TREE_CODE (init) == STRING_CST)
7295 tree index1 = index;
7296 tree low_bound = array_ref_low_bound (exp);
7297 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7299 /* Optimize the special-case of a zero lower bound.
7301 We convert the low_bound to sizetype to avoid some problems
7302 with constant folding. (E.g. suppose the lower bound is 1,
7303 and its mode is QI. Without the conversion,l (ARRAY
7304 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7305 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7307 if (! integer_zerop (low_bound))
7308 index1 = size_diffop (index1, fold_convert (sizetype,
7311 if (0 > compare_tree_int (index1,
7312 TREE_STRING_LENGTH (init)))
7314 tree type = TREE_TYPE (TREE_TYPE (init));
7315 enum machine_mode mode = TYPE_MODE (type);
7317 if (GET_MODE_CLASS (mode) == MODE_INT
7318 && GET_MODE_SIZE (mode) == 1)
7319 return gen_int_mode (TREE_STRING_POINTER (init)
7320 [TREE_INT_CST_LOW (index1)],
7327 goto normal_inner_ref;
7330 /* If the operand is a CONSTRUCTOR, we can just extract the
7331 appropriate field if it is present. */
7332 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7334 unsigned HOST_WIDE_INT idx;
7337 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7339 if (field == TREE_OPERAND (exp, 1)
7340 /* We can normally use the value of the field in the
7341 CONSTRUCTOR. However, if this is a bitfield in
7342 an integral mode that we can fit in a HOST_WIDE_INT,
7343 we must mask only the number of bits in the bitfield,
7344 since this is done implicitly by the constructor. If
7345 the bitfield does not meet either of those conditions,
7346 we can't do this optimization. */
7347 && (! DECL_BIT_FIELD (field)
7348 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7349 && (GET_MODE_BITSIZE (DECL_MODE (field))
7350 <= HOST_BITS_PER_WIDE_INT))))
7352 if (DECL_BIT_FIELD (field)
7353 && modifier == EXPAND_STACK_PARM)
7355 op0 = expand_expr (value, target, tmode, modifier);
7356 if (DECL_BIT_FIELD (field))
7358 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7359 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7361 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7363 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7364 op0 = expand_and (imode, op0, op1, target);
7369 = build_int_cst (NULL_TREE,
7370 GET_MODE_BITSIZE (imode) - bitsize);
7372 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7374 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7382 goto normal_inner_ref;
7385 case ARRAY_RANGE_REF:
7388 enum machine_mode mode1;
7389 HOST_WIDE_INT bitsize, bitpos;
7392 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7393 &mode1, &unsignedp, &volatilep, true);
7396 /* If we got back the original object, something is wrong. Perhaps
7397 we are evaluating an expression too early. In any event, don't
7398 infinitely recurse. */
7399 gcc_assert (tem != exp);
7401 /* If TEM's type is a union of variable size, pass TARGET to the inner
7402 computation, since it will need a temporary and TARGET is known
7403 to have to do. This occurs in unchecked conversion in Ada. */
7407 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7408 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7410 && modifier != EXPAND_STACK_PARM
7411 ? target : NULL_RTX),
7413 (modifier == EXPAND_INITIALIZER
7414 || modifier == EXPAND_CONST_ADDRESS
7415 || modifier == EXPAND_STACK_PARM)
7416 ? modifier : EXPAND_NORMAL);
7418 /* If this is a constant, put it into a register if it is a legitimate
7419 constant, OFFSET is 0, and we won't try to extract outside the
7420 register (in case we were passed a partially uninitialized object
7421 or a view_conversion to a larger size). Force the constant to
7422 memory otherwise. */
7423 if (CONSTANT_P (op0))
7425 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7426 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7428 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7429 op0 = force_reg (mode, op0);
7431 op0 = validize_mem (force_const_mem (mode, op0));
7434 /* Otherwise, if this object not in memory and we either have an
7435 offset, a BLKmode result, or a reference outside the object, put it
7436 there. Such cases can occur in Ada if we have unchecked conversion
7437 of an expression from a scalar type to an array or record type or
7438 for an ARRAY_RANGE_REF whose type is BLKmode. */
7439 else if (!MEM_P (op0)
7441 || (bitpos + bitsize > GET_MODE_BITSIZE (GET_MODE (op0)))
7442 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7444 tree nt = build_qualified_type (TREE_TYPE (tem),
7445 (TYPE_QUALS (TREE_TYPE (tem))
7446 | TYPE_QUAL_CONST));
7447 rtx memloc = assign_temp (nt, 1, 1, 1);
7449 emit_move_insn (memloc, op0);
7455 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7458 gcc_assert (MEM_P (op0));
7460 #ifdef POINTERS_EXTEND_UNSIGNED
7461 if (GET_MODE (offset_rtx) != Pmode)
7462 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7464 if (GET_MODE (offset_rtx) != ptr_mode)
7465 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7468 if (GET_MODE (op0) == BLKmode
7469 /* A constant address in OP0 can have VOIDmode, we must
7470 not try to call force_reg in that case. */
7471 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7473 && (bitpos % bitsize) == 0
7474 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7475 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7477 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7481 op0 = offset_address (op0, offset_rtx,
7482 highest_pow2_factor (offset));
7485 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7486 record its alignment as BIGGEST_ALIGNMENT. */
7487 if (MEM_P (op0) && bitpos == 0 && offset != 0
7488 && is_aligning_offset (offset, tem))
7489 set_mem_align (op0, BIGGEST_ALIGNMENT);
7491 /* Don't forget about volatility even if this is a bitfield. */
7492 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7494 if (op0 == orig_op0)
7495 op0 = copy_rtx (op0);
7497 MEM_VOLATILE_P (op0) = 1;
7500 /* The following code doesn't handle CONCAT.
7501 Assume only bitpos == 0 can be used for CONCAT, due to
7502 one element arrays having the same mode as its element. */
7503 if (GET_CODE (op0) == CONCAT)
7505 gcc_assert (bitpos == 0
7506 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7510 /* In cases where an aligned union has an unaligned object
7511 as a field, we might be extracting a BLKmode value from
7512 an integer-mode (e.g., SImode) object. Handle this case
7513 by doing the extract into an object as wide as the field
7514 (which we know to be the width of a basic mode), then
7515 storing into memory, and changing the mode to BLKmode. */
7516 if (mode1 == VOIDmode
7517 || REG_P (op0) || GET_CODE (op0) == SUBREG
7518 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7519 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7520 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7521 && modifier != EXPAND_CONST_ADDRESS
7522 && modifier != EXPAND_INITIALIZER)
7523 /* If the field isn't aligned enough to fetch as a memref,
7524 fetch it as a bit field. */
7525 || (mode1 != BLKmode
7526 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7527 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7529 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7530 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7531 && ((modifier == EXPAND_CONST_ADDRESS
7532 || modifier == EXPAND_INITIALIZER)
7534 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7535 || (bitpos % BITS_PER_UNIT != 0)))
7536 /* If the type and the field are a constant size and the
7537 size of the type isn't the same size as the bitfield,
7538 we must use bitfield operations. */
7540 && TYPE_SIZE (TREE_TYPE (exp))
7541 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7542 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7545 enum machine_mode ext_mode = mode;
7547 if (ext_mode == BLKmode
7548 && ! (target != 0 && MEM_P (op0)
7550 && bitpos % BITS_PER_UNIT == 0))
7551 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7553 if (ext_mode == BLKmode)
7556 target = assign_temp (type, 0, 1, 1);
7561 /* In this case, BITPOS must start at a byte boundary and
7562 TARGET, if specified, must be a MEM. */
7563 gcc_assert (MEM_P (op0)
7564 && (!target || MEM_P (target))
7565 && !(bitpos % BITS_PER_UNIT));
7567 emit_block_move (target,
7568 adjust_address (op0, VOIDmode,
7569 bitpos / BITS_PER_UNIT),
7570 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7572 (modifier == EXPAND_STACK_PARM
7573 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7578 op0 = validize_mem (op0);
7580 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7581 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7583 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7584 (modifier == EXPAND_STACK_PARM
7585 ? NULL_RTX : target),
7586 ext_mode, ext_mode);
7588 /* If the result is a record type and BITSIZE is narrower than
7589 the mode of OP0, an integral mode, and this is a big endian
7590 machine, we must put the field into the high-order bits. */
7591 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7592 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7593 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7594 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7595 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7599 /* If the result type is BLKmode, store the data into a temporary
7600 of the appropriate type, but with the mode corresponding to the
7601 mode for the data we have (op0's mode). It's tempting to make
7602 this a constant type, since we know it's only being stored once,
7603 but that can cause problems if we are taking the address of this
7604 COMPONENT_REF because the MEM of any reference via that address
7605 will have flags corresponding to the type, which will not
7606 necessarily be constant. */
7607 if (mode == BLKmode)
7610 = assign_stack_temp_for_type
7611 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7613 emit_move_insn (new, op0);
7614 op0 = copy_rtx (new);
7615 PUT_MODE (op0, BLKmode);
7616 set_mem_attributes (op0, exp, 1);
7622 /* If the result is BLKmode, use that to access the object
7624 if (mode == BLKmode)
7627 /* Get a reference to just this component. */
7628 if (modifier == EXPAND_CONST_ADDRESS
7629 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7630 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7632 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7634 if (op0 == orig_op0)
7635 op0 = copy_rtx (op0);
7637 set_mem_attributes (op0, exp, 0);
7638 if (REG_P (XEXP (op0, 0)))
7639 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7641 MEM_VOLATILE_P (op0) |= volatilep;
7642 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7643 || modifier == EXPAND_CONST_ADDRESS
7644 || modifier == EXPAND_INITIALIZER)
7646 else if (target == 0)
7647 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7649 convert_move (target, op0, unsignedp);
7654 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7657 /* Check for a built-in function. */
7658 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7659 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7661 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7663 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7664 == BUILT_IN_FRONTEND)
7665 return lang_hooks.expand_expr (exp, original_target,
7669 return expand_builtin (exp, target, subtarget, tmode, ignore);
7672 return expand_call (exp, target, ignore);
7674 case NON_LVALUE_EXPR:
7677 if (TREE_OPERAND (exp, 0) == error_mark_node)
7680 if (TREE_CODE (type) == UNION_TYPE)
7682 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7684 /* If both input and output are BLKmode, this conversion isn't doing
7685 anything except possibly changing memory attribute. */
7686 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7688 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7691 result = copy_rtx (result);
7692 set_mem_attributes (result, exp, 0);
7698 if (TYPE_MODE (type) != BLKmode)
7699 target = gen_reg_rtx (TYPE_MODE (type));
7701 target = assign_temp (type, 0, 1, 1);
7705 /* Store data into beginning of memory target. */
7706 store_expr (TREE_OPERAND (exp, 0),
7707 adjust_address (target, TYPE_MODE (valtype), 0),
7708 modifier == EXPAND_STACK_PARM);
7712 gcc_assert (REG_P (target));
7714 /* Store this field into a union of the proper type. */
7715 store_field (target,
7716 MIN ((int_size_in_bytes (TREE_TYPE
7717 (TREE_OPERAND (exp, 0)))
7719 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7720 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7724 /* Return the entire union. */
7728 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7730 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7733 /* If the signedness of the conversion differs and OP0 is
7734 a promoted SUBREG, clear that indication since we now
7735 have to do the proper extension. */
7736 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7737 && GET_CODE (op0) == SUBREG)
7738 SUBREG_PROMOTED_VAR_P (op0) = 0;
7740 return REDUCE_BIT_FIELD (op0);
7743 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
7744 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7745 if (GET_MODE (op0) == mode)
7748 /* If OP0 is a constant, just convert it into the proper mode. */
7749 else if (CONSTANT_P (op0))
7751 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7752 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7754 if (modifier == EXPAND_INITIALIZER)
7755 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7756 subreg_lowpart_offset (mode,
7759 op0= convert_modes (mode, inner_mode, op0,
7760 TYPE_UNSIGNED (inner_type));
7763 else if (modifier == EXPAND_INITIALIZER)
7764 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7766 else if (target == 0)
7767 op0 = convert_to_mode (mode, op0,
7768 TYPE_UNSIGNED (TREE_TYPE
7769 (TREE_OPERAND (exp, 0))));
7772 convert_move (target, op0,
7773 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7777 return REDUCE_BIT_FIELD (op0);
7779 case VIEW_CONVERT_EXPR:
7780 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7782 /* If the input and output modes are both the same, we are done. */
7783 if (TYPE_MODE (type) == GET_MODE (op0))
7785 /* If neither mode is BLKmode, and both modes are the same size
7786 then we can use gen_lowpart. */
7787 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7788 && GET_MODE_SIZE (TYPE_MODE (type))
7789 == GET_MODE_SIZE (GET_MODE (op0)))
7791 if (GET_CODE (op0) == SUBREG)
7792 op0 = force_reg (GET_MODE (op0), op0);
7793 op0 = gen_lowpart (TYPE_MODE (type), op0);
7795 /* If both modes are integral, then we can convert from one to the
7797 else if (SCALAR_INT_MODE_P (GET_MODE (op0))
7798 && SCALAR_INT_MODE_P (TYPE_MODE (type)))
7799 op0 = convert_modes (TYPE_MODE (type), GET_MODE (op0), op0,
7800 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7801 /* As a last resort, spill op0 to memory, and reload it in a
7803 else if (!MEM_P (op0))
7805 /* If the operand is not a MEM, force it into memory. Since we
7806 are going to be changing the mode of the MEM, don't call
7807 force_const_mem for constants because we don't allow pool
7808 constants to change mode. */
7809 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7811 gcc_assert (!TREE_ADDRESSABLE (exp));
7813 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7815 = assign_stack_temp_for_type
7816 (TYPE_MODE (inner_type),
7817 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7819 emit_move_insn (target, op0);
7823 /* At this point, OP0 is in the correct mode. If the output type is such
7824 that the operand is known to be aligned, indicate that it is.
7825 Otherwise, we need only be concerned about alignment for non-BLKmode
7829 op0 = copy_rtx (op0);
7831 if (TYPE_ALIGN_OK (type))
7832 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7833 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7834 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7836 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7837 HOST_WIDE_INT temp_size
7838 = MAX (int_size_in_bytes (inner_type),
7839 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7840 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7841 temp_size, 0, type);
7842 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7844 gcc_assert (!TREE_ADDRESSABLE (exp));
7846 if (GET_MODE (op0) == BLKmode)
7847 emit_block_move (new_with_op0_mode, op0,
7848 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7849 (modifier == EXPAND_STACK_PARM
7850 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7852 emit_move_insn (new_with_op0_mode, op0);
7857 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7863 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7864 something else, make sure we add the register to the constant and
7865 then to the other thing. This case can occur during strength
7866 reduction and doing it this way will produce better code if the
7867 frame pointer or argument pointer is eliminated.
7869 fold-const.c will ensure that the constant is always in the inner
7870 PLUS_EXPR, so the only case we need to do anything about is if
7871 sp, ap, or fp is our second argument, in which case we must swap
7872 the innermost first argument and our second argument. */
7874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7876 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7877 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7878 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7879 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7881 tree t = TREE_OPERAND (exp, 1);
7883 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7884 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7887 /* If the result is to be ptr_mode and we are adding an integer to
7888 something, we might be forming a constant. So try to use
7889 plus_constant. If it produces a sum and we can't accept it,
7890 use force_operand. This allows P = &ARR[const] to generate
7891 efficient code on machines where a SYMBOL_REF is not a valid
7894 If this is an EXPAND_SUM call, always return the sum. */
7895 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7896 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7898 if (modifier == EXPAND_STACK_PARM)
7900 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7901 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7902 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7906 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7908 /* Use immed_double_const to ensure that the constant is
7909 truncated according to the mode of OP1, then sign extended
7910 to a HOST_WIDE_INT. Using the constant directly can result
7911 in non-canonical RTL in a 64x32 cross compile. */
7913 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7915 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7916 op1 = plus_constant (op1, INTVAL (constant_part));
7917 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7918 op1 = force_operand (op1, target);
7919 return REDUCE_BIT_FIELD (op1);
7922 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7923 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7924 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7929 (modifier == EXPAND_INITIALIZER
7930 ? EXPAND_INITIALIZER : EXPAND_SUM));
7931 if (! CONSTANT_P (op0))
7933 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7934 VOIDmode, modifier);
7935 /* Return a PLUS if modifier says it's OK. */
7936 if (modifier == EXPAND_SUM
7937 || modifier == EXPAND_INITIALIZER)
7938 return simplify_gen_binary (PLUS, mode, op0, op1);
7941 /* Use immed_double_const to ensure that the constant is
7942 truncated according to the mode of OP1, then sign extended
7943 to a HOST_WIDE_INT. Using the constant directly can result
7944 in non-canonical RTL in a 64x32 cross compile. */
7946 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7948 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7949 op0 = plus_constant (op0, INTVAL (constant_part));
7950 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7951 op0 = force_operand (op0, target);
7952 return REDUCE_BIT_FIELD (op0);
7956 /* No sense saving up arithmetic to be done
7957 if it's all in the wrong mode to form part of an address.
7958 And force_operand won't know whether to sign-extend or
7960 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7961 || mode != ptr_mode)
7963 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7964 subtarget, &op0, &op1, 0);
7965 if (op0 == const0_rtx)
7967 if (op1 == const0_rtx)
7972 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7973 subtarget, &op0, &op1, modifier);
7974 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7977 /* For initializers, we are allowed to return a MINUS of two
7978 symbolic constants. Here we handle all cases when both operands
7980 /* Handle difference of two symbolic constants,
7981 for the sake of an initializer. */
7982 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7983 && really_constant_p (TREE_OPERAND (exp, 0))
7984 && really_constant_p (TREE_OPERAND (exp, 1)))
7986 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7987 NULL_RTX, &op0, &op1, modifier);
7989 /* If the last operand is a CONST_INT, use plus_constant of
7990 the negated constant. Else make the MINUS. */
7991 if (GET_CODE (op1) == CONST_INT)
7992 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7994 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7997 /* No sense saving up arithmetic to be done
7998 if it's all in the wrong mode to form part of an address.
7999 And force_operand won't know whether to sign-extend or
8001 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8002 || mode != ptr_mode)
8005 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8006 subtarget, &op0, &op1, modifier);
8008 /* Convert A - const to A + (-const). */
8009 if (GET_CODE (op1) == CONST_INT)
8011 op1 = negate_rtx (mode, op1);
8012 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8018 /* If first operand is constant, swap them.
8019 Thus the following special case checks need only
8020 check the second operand. */
8021 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8023 tree t1 = TREE_OPERAND (exp, 0);
8024 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8025 TREE_OPERAND (exp, 1) = t1;
8028 /* Attempt to return something suitable for generating an
8029 indexed address, for machines that support that. */
8031 if (modifier == EXPAND_SUM && mode == ptr_mode
8032 && host_integerp (TREE_OPERAND (exp, 1), 0))
8034 tree exp1 = TREE_OPERAND (exp, 1);
8036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8040 op0 = force_operand (op0, NULL_RTX);
8042 op0 = copy_to_mode_reg (mode, op0);
8044 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8045 gen_int_mode (tree_low_cst (exp1, 0),
8046 TYPE_MODE (TREE_TYPE (exp1)))));
8049 if (modifier == EXPAND_STACK_PARM)
8052 /* Check for multiplying things that have been extended
8053 from a narrower type. If this machine supports multiplying
8054 in that narrower type with a result in the desired type,
8055 do it that way, and avoid the explicit type-conversion. */
8057 subexp0 = TREE_OPERAND (exp, 0);
8058 subexp1 = TREE_OPERAND (exp, 1);
8059 /* First, check if we have a multiplication of one signed and one
8060 unsigned operand. */
8061 if (TREE_CODE (subexp0) == NOP_EXPR
8062 && TREE_CODE (subexp1) == NOP_EXPR
8063 && TREE_CODE (type) == INTEGER_TYPE
8064 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8065 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8066 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8067 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8068 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8069 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8071 enum machine_mode innermode
8072 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8073 this_optab = usmul_widen_optab;
8074 if (mode == GET_MODE_WIDER_MODE (innermode))
8076 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8078 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8079 expand_operands (TREE_OPERAND (subexp0, 0),
8080 TREE_OPERAND (subexp1, 0),
8081 NULL_RTX, &op0, &op1, 0);
8083 expand_operands (TREE_OPERAND (subexp0, 0),
8084 TREE_OPERAND (subexp1, 0),
8085 NULL_RTX, &op1, &op0, 0);
8091 /* Check for a multiplication with matching signedness. */
8092 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8093 && TREE_CODE (type) == INTEGER_TYPE
8094 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8095 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8096 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8097 && int_fits_type_p (TREE_OPERAND (exp, 1),
8098 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8099 /* Don't use a widening multiply if a shift will do. */
8100 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8101 > HOST_BITS_PER_WIDE_INT)
8102 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8104 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8105 && (TYPE_PRECISION (TREE_TYPE
8106 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8107 == TYPE_PRECISION (TREE_TYPE
8109 (TREE_OPERAND (exp, 0), 0))))
8110 /* If both operands are extended, they must either both
8111 be zero-extended or both be sign-extended. */
8112 && (TYPE_UNSIGNED (TREE_TYPE
8113 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8114 == TYPE_UNSIGNED (TREE_TYPE
8116 (TREE_OPERAND (exp, 0), 0)))))))
8118 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8119 enum machine_mode innermode = TYPE_MODE (op0type);
8120 bool zextend_p = TYPE_UNSIGNED (op0type);
8121 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8122 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8124 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8126 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8128 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8129 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8130 TREE_OPERAND (exp, 1),
8131 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8133 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8134 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8135 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8138 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8139 && innermode == word_mode)
8142 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8143 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8144 op1 = convert_modes (innermode, mode,
8145 expand_normal (TREE_OPERAND (exp, 1)),
8148 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8149 temp = expand_binop (mode, other_optab, op0, op1, target,
8150 unsignedp, OPTAB_LIB_WIDEN);
8151 hipart = gen_highpart (innermode, temp);
8152 htem = expand_mult_highpart_adjust (innermode, hipart,
8156 emit_move_insn (hipart, htem);
8157 return REDUCE_BIT_FIELD (temp);
8161 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8162 subtarget, &op0, &op1, 0);
8163 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8165 case TRUNC_DIV_EXPR:
8166 case FLOOR_DIV_EXPR:
8168 case ROUND_DIV_EXPR:
8169 case EXACT_DIV_EXPR:
8170 if (modifier == EXPAND_STACK_PARM)
8172 /* Possible optimization: compute the dividend with EXPAND_SUM
8173 then if the divisor is constant can optimize the case
8174 where some terms of the dividend have coeffs divisible by it. */
8175 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8176 subtarget, &op0, &op1, 0);
8177 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8182 case TRUNC_MOD_EXPR:
8183 case FLOOR_MOD_EXPR:
8185 case ROUND_MOD_EXPR:
8186 if (modifier == EXPAND_STACK_PARM)
8188 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8189 subtarget, &op0, &op1, 0);
8190 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8192 case FIX_TRUNC_EXPR:
8193 op0 = expand_normal (TREE_OPERAND (exp, 0));
8194 if (target == 0 || modifier == EXPAND_STACK_PARM)
8195 target = gen_reg_rtx (mode);
8196 expand_fix (target, op0, unsignedp);
8200 op0 = expand_normal (TREE_OPERAND (exp, 0));
8201 if (target == 0 || modifier == EXPAND_STACK_PARM)
8202 target = gen_reg_rtx (mode);
8203 /* expand_float can't figure out what to do if FROM has VOIDmode.
8204 So give it the correct mode. With -O, cse will optimize this. */
8205 if (GET_MODE (op0) == VOIDmode)
8206 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8208 expand_float (target, op0,
8209 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8213 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8214 if (modifier == EXPAND_STACK_PARM)
8216 temp = expand_unop (mode,
8217 optab_for_tree_code (NEGATE_EXPR, type),
8220 return REDUCE_BIT_FIELD (temp);
8223 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8224 if (modifier == EXPAND_STACK_PARM)
8227 /* ABS_EXPR is not valid for complex arguments. */
8228 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8229 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8231 /* Unsigned abs is simply the operand. Testing here means we don't
8232 risk generating incorrect code below. */
8233 if (TYPE_UNSIGNED (type))
8236 return expand_abs (mode, op0, target, unsignedp,
8237 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8241 target = original_target;
8243 || modifier == EXPAND_STACK_PARM
8244 || (MEM_P (target) && MEM_VOLATILE_P (target))
8245 || GET_MODE (target) != mode
8247 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8248 target = gen_reg_rtx (mode);
8249 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8250 target, &op0, &op1, 0);
8252 /* First try to do it with a special MIN or MAX instruction.
8253 If that does not win, use a conditional jump to select the proper
8255 this_optab = optab_for_tree_code (code, type);
8256 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8261 /* At this point, a MEM target is no longer useful; we will get better
8264 if (! REG_P (target))
8265 target = gen_reg_rtx (mode);
8267 /* If op1 was placed in target, swap op0 and op1. */
8268 if (target != op0 && target == op1)
8275 /* We generate better code and avoid problems with op1 mentioning
8276 target by forcing op1 into a pseudo if it isn't a constant. */
8277 if (! CONSTANT_P (op1))
8278 op1 = force_reg (mode, op1);
8281 enum rtx_code comparison_code;
8284 if (code == MAX_EXPR)
8285 comparison_code = unsignedp ? GEU : GE;
8287 comparison_code = unsignedp ? LEU : LE;
8289 /* Canonicalize to comparisons against 0. */
8290 if (op1 == const1_rtx)
8292 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8293 or (a != 0 ? a : 1) for unsigned.
8294 For MIN we are safe converting (a <= 1 ? a : 1)
8295 into (a <= 0 ? a : 1) */
8296 cmpop1 = const0_rtx;
8297 if (code == MAX_EXPR)
8298 comparison_code = unsignedp ? NE : GT;
8300 if (op1 == constm1_rtx && !unsignedp)
8302 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8303 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8304 cmpop1 = const0_rtx;
8305 if (code == MIN_EXPR)
8306 comparison_code = LT;
8308 #ifdef HAVE_conditional_move
8309 /* Use a conditional move if possible. */
8310 if (can_conditionally_move_p (mode))
8314 /* ??? Same problem as in expmed.c: emit_conditional_move
8315 forces a stack adjustment via compare_from_rtx, and we
8316 lose the stack adjustment if the sequence we are about
8317 to create is discarded. */
8318 do_pending_stack_adjust ();
8322 /* Try to emit the conditional move. */
8323 insn = emit_conditional_move (target, comparison_code,
8328 /* If we could do the conditional move, emit the sequence,
8332 rtx seq = get_insns ();
8338 /* Otherwise discard the sequence and fall back to code with
8344 emit_move_insn (target, op0);
8346 temp = gen_label_rtx ();
8347 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8348 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8350 emit_move_insn (target, op1);
8355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8356 if (modifier == EXPAND_STACK_PARM)
8358 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8362 /* ??? Can optimize bitwise operations with one arg constant.
8363 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8364 and (a bitwise1 b) bitwise2 b (etc)
8365 but that is probably not worth while. */
8367 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8368 boolean values when we want in all cases to compute both of them. In
8369 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8370 as actual zero-or-1 values and then bitwise anding. In cases where
8371 there cannot be any side effects, better code would be made by
8372 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8373 how to recognize those cases. */
8375 case TRUTH_AND_EXPR:
8376 code = BIT_AND_EXPR;
8381 code = BIT_IOR_EXPR;
8385 case TRUTH_XOR_EXPR:
8386 code = BIT_XOR_EXPR;
8394 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8396 if (modifier == EXPAND_STACK_PARM)
8398 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8399 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8402 /* Could determine the answer when only additive constants differ. Also,
8403 the addition of one can be handled by changing the condition. */
8410 case UNORDERED_EXPR:
8418 temp = do_store_flag (exp,
8419 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8420 tmode != VOIDmode ? tmode : mode, 0);
8424 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8425 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8427 && REG_P (original_target)
8428 && (GET_MODE (original_target)
8429 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8431 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8434 /* If temp is constant, we can just compute the result. */
8435 if (GET_CODE (temp) == CONST_INT)
8437 if (INTVAL (temp) != 0)
8438 emit_move_insn (target, const1_rtx);
8440 emit_move_insn (target, const0_rtx);
8445 if (temp != original_target)
8447 enum machine_mode mode1 = GET_MODE (temp);
8448 if (mode1 == VOIDmode)
8449 mode1 = tmode != VOIDmode ? tmode : mode;
8451 temp = copy_to_mode_reg (mode1, temp);
8454 op1 = gen_label_rtx ();
8455 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8456 GET_MODE (temp), unsignedp, op1);
8457 emit_move_insn (temp, const1_rtx);
8462 /* If no set-flag instruction, must generate a conditional store
8463 into a temporary variable. Drop through and handle this
8468 || modifier == EXPAND_STACK_PARM
8469 || ! safe_from_p (target, exp, 1)
8470 /* Make sure we don't have a hard reg (such as function's return
8471 value) live across basic blocks, if not optimizing. */
8472 || (!optimize && REG_P (target)
8473 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8474 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8477 emit_move_insn (target, const0_rtx);
8479 op1 = gen_label_rtx ();
8480 jumpifnot (exp, op1);
8483 emit_move_insn (target, const1_rtx);
8486 return ignore ? const0_rtx : target;
8488 case TRUTH_NOT_EXPR:
8489 if (modifier == EXPAND_STACK_PARM)
8491 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8492 /* The parser is careful to generate TRUTH_NOT_EXPR
8493 only with operands that are always zero or one. */
8494 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8495 target, 1, OPTAB_LIB_WIDEN);
8499 case STATEMENT_LIST:
8501 tree_stmt_iterator iter;
8503 gcc_assert (ignore);
8505 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8506 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8511 /* A COND_EXPR with its type being VOID_TYPE represents a
8512 conditional jump and is handled in
8513 expand_gimple_cond_expr. */
8514 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
8516 /* Note that COND_EXPRs whose type is a structure or union
8517 are required to be constructed to contain assignments of
8518 a temporary variable, so that we can evaluate them here
8519 for side effect only. If type is void, we must do likewise. */
8521 gcc_assert (!TREE_ADDRESSABLE (type)
8523 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8524 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8526 /* If we are not to produce a result, we have no target. Otherwise,
8527 if a target was specified use it; it will not be used as an
8528 intermediate target unless it is safe. If no target, use a
8531 if (modifier != EXPAND_STACK_PARM
8533 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8534 && GET_MODE (original_target) == mode
8535 #ifdef HAVE_conditional_move
8536 && (! can_conditionally_move_p (mode)
8537 || REG_P (original_target))
8539 && !MEM_P (original_target))
8540 temp = original_target;
8542 temp = assign_temp (type, 0, 0, 1);
8544 do_pending_stack_adjust ();
8546 op0 = gen_label_rtx ();
8547 op1 = gen_label_rtx ();
8548 jumpifnot (TREE_OPERAND (exp, 0), op0);
8549 store_expr (TREE_OPERAND (exp, 1), temp,
8550 modifier == EXPAND_STACK_PARM);
8552 emit_jump_insn (gen_jump (op1));
8555 store_expr (TREE_OPERAND (exp, 2), temp,
8556 modifier == EXPAND_STACK_PARM);
8563 target = expand_vec_cond_expr (exp, target);
8568 tree lhs = TREE_OPERAND (exp, 0);
8569 tree rhs = TREE_OPERAND (exp, 1);
8571 gcc_assert (ignore);
8573 /* Check for |= or &= of a bitfield of size one into another bitfield
8574 of size 1. In this case, (unless we need the result of the
8575 assignment) we can do this more efficiently with a
8576 test followed by an assignment, if necessary.
8578 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8579 things change so we do, this code should be enhanced to
8581 if (TREE_CODE (lhs) == COMPONENT_REF
8582 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8583 || TREE_CODE (rhs) == BIT_AND_EXPR)
8584 && TREE_OPERAND (rhs, 0) == lhs
8585 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8586 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8587 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8589 rtx label = gen_label_rtx ();
8590 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
8591 do_jump (TREE_OPERAND (rhs, 1),
8594 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value));
8595 do_pending_stack_adjust ();
8600 expand_assignment (lhs, rhs);
8606 if (!TREE_OPERAND (exp, 0))
8607 expand_null_return ();
8609 expand_return (TREE_OPERAND (exp, 0));
8613 return expand_expr_addr_expr (exp, target, tmode, modifier);
8616 /* Get the rtx code of the operands. */
8617 op0 = expand_normal (TREE_OPERAND (exp, 0));
8618 op1 = expand_normal (TREE_OPERAND (exp, 1));
8621 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8623 /* Move the real (op0) and imaginary (op1) parts to their location. */
8624 write_complex_part (target, op0, false);
8625 write_complex_part (target, op1, true);
8630 op0 = expand_normal (TREE_OPERAND (exp, 0));
8631 return read_complex_part (op0, false);
8634 op0 = expand_normal (TREE_OPERAND (exp, 0));
8635 return read_complex_part (op0, true);
8638 expand_resx_expr (exp);
8641 case TRY_CATCH_EXPR:
8643 case EH_FILTER_EXPR:
8644 case TRY_FINALLY_EXPR:
8645 /* Lowered by tree-eh.c. */
8648 case WITH_CLEANUP_EXPR:
8649 case CLEANUP_POINT_EXPR:
8651 case CASE_LABEL_EXPR:
8657 case PREINCREMENT_EXPR:
8658 case PREDECREMENT_EXPR:
8659 case POSTINCREMENT_EXPR:
8660 case POSTDECREMENT_EXPR:
8663 case TRUTH_ANDIF_EXPR:
8664 case TRUTH_ORIF_EXPR:
8665 /* Lowered by gimplify.c. */
8669 return get_exception_pointer (cfun);
8672 return get_exception_filter (cfun);
8675 /* Function descriptors are not valid except for as
8676 initialization constants, and should not be expanded. */
8684 expand_label (TREE_OPERAND (exp, 0));
8688 expand_asm_expr (exp);
8691 case WITH_SIZE_EXPR:
8692 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8693 have pulled out the size to use in whatever context it needed. */
8694 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8697 case REALIGN_LOAD_EXPR:
8699 tree oprnd0 = TREE_OPERAND (exp, 0);
8700 tree oprnd1 = TREE_OPERAND (exp, 1);
8701 tree oprnd2 = TREE_OPERAND (exp, 2);
8704 this_optab = optab_for_tree_code (code, type);
8705 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8706 op2 = expand_normal (oprnd2);
8707 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8715 tree oprnd0 = TREE_OPERAND (exp, 0);
8716 tree oprnd1 = TREE_OPERAND (exp, 1);
8717 tree oprnd2 = TREE_OPERAND (exp, 2);
8720 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8721 op2 = expand_normal (oprnd2);
8722 target = expand_widen_pattern_expr (exp, op0, op1, op2,
8727 case WIDEN_SUM_EXPR:
8729 tree oprnd0 = TREE_OPERAND (exp, 0);
8730 tree oprnd1 = TREE_OPERAND (exp, 1);
8732 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8733 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
8738 case REDUC_MAX_EXPR:
8739 case REDUC_MIN_EXPR:
8740 case REDUC_PLUS_EXPR:
8742 op0 = expand_normal (TREE_OPERAND (exp, 0));
8743 this_optab = optab_for_tree_code (code, type);
8744 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8749 case VEC_EXTRACT_EVEN_EXPR:
8750 case VEC_EXTRACT_ODD_EXPR:
8752 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8753 NULL_RTX, &op0, &op1, 0);
8754 this_optab = optab_for_tree_code (code, type);
8755 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8761 case VEC_INTERLEAVE_HIGH_EXPR:
8762 case VEC_INTERLEAVE_LOW_EXPR:
8764 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8765 NULL_RTX, &op0, &op1, 0);
8766 this_optab = optab_for_tree_code (code, type);
8767 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8773 case VEC_LSHIFT_EXPR:
8774 case VEC_RSHIFT_EXPR:
8776 target = expand_vec_shift_expr (exp, target);
8780 case VEC_UNPACK_HI_EXPR:
8781 case VEC_UNPACK_LO_EXPR:
8783 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8784 this_optab = optab_for_tree_code (code, type);
8785 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
8791 case VEC_WIDEN_MULT_HI_EXPR:
8792 case VEC_WIDEN_MULT_LO_EXPR:
8794 tree oprnd0 = TREE_OPERAND (exp, 0);
8795 tree oprnd1 = TREE_OPERAND (exp, 1);
8797 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8798 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
8800 gcc_assert (target);
8804 case VEC_PACK_MOD_EXPR:
8805 case VEC_PACK_SAT_EXPR:
8807 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8812 return lang_hooks.expand_expr (exp, original_target, tmode,
8816 /* Here to do an ordinary binary operator. */
8818 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8819 subtarget, &op0, &op1, 0);
8821 this_optab = optab_for_tree_code (code, type);
8823 if (modifier == EXPAND_STACK_PARM)
8825 temp = expand_binop (mode, this_optab, op0, op1, target,
8826 unsignedp, OPTAB_LIB_WIDEN);
8828 return REDUCE_BIT_FIELD (temp);
8830 #undef REDUCE_BIT_FIELD
8832 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8833 signedness of TYPE), possibly returning the result in TARGET. */
8835 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8837 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8838 if (target && GET_MODE (target) != GET_MODE (exp))
8840 if (TYPE_UNSIGNED (type))
8843 if (prec < HOST_BITS_PER_WIDE_INT)
8844 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8847 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8848 ((unsigned HOST_WIDE_INT) 1
8849 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8851 return expand_and (GET_MODE (exp), exp, mask, target);
8855 tree count = build_int_cst (NULL_TREE,
8856 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8857 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8858 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8862 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8863 when applied to the address of EXP produces an address known to be
8864 aligned more than BIGGEST_ALIGNMENT. */
8867 is_aligning_offset (tree offset, tree exp)
8869 /* Strip off any conversions. */
8870 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8871 || TREE_CODE (offset) == NOP_EXPR
8872 || TREE_CODE (offset) == CONVERT_EXPR)
8873 offset = TREE_OPERAND (offset, 0);
8875 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8876 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8877 if (TREE_CODE (offset) != BIT_AND_EXPR
8878 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8879 || compare_tree_int (TREE_OPERAND (offset, 1),
8880 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8881 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8884 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8885 It must be NEGATE_EXPR. Then strip any more conversions. */
8886 offset = TREE_OPERAND (offset, 0);
8887 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8888 || TREE_CODE (offset) == NOP_EXPR
8889 || TREE_CODE (offset) == CONVERT_EXPR)
8890 offset = TREE_OPERAND (offset, 0);
8892 if (TREE_CODE (offset) != NEGATE_EXPR)
8895 offset = TREE_OPERAND (offset, 0);
8896 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8897 || TREE_CODE (offset) == NOP_EXPR
8898 || TREE_CODE (offset) == CONVERT_EXPR)
8899 offset = TREE_OPERAND (offset, 0);
8901 /* This must now be the address of EXP. */
8902 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8905 /* Return the tree node if an ARG corresponds to a string constant or zero
8906 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8907 in bytes within the string that ARG is accessing. The type of the
8908 offset will be `sizetype'. */
8911 string_constant (tree arg, tree *ptr_offset)
8916 if (TREE_CODE (arg) == ADDR_EXPR)
8918 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8920 *ptr_offset = size_zero_node;
8921 return TREE_OPERAND (arg, 0);
8923 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8925 array = TREE_OPERAND (arg, 0);
8926 offset = size_zero_node;
8928 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8930 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8931 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8932 if (TREE_CODE (array) != STRING_CST
8933 && TREE_CODE (array) != VAR_DECL)
8939 else if (TREE_CODE (arg) == PLUS_EXPR)
8941 tree arg0 = TREE_OPERAND (arg, 0);
8942 tree arg1 = TREE_OPERAND (arg, 1);
8947 if (TREE_CODE (arg0) == ADDR_EXPR
8948 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8949 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8951 array = TREE_OPERAND (arg0, 0);
8954 else if (TREE_CODE (arg1) == ADDR_EXPR
8955 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8956 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8958 array = TREE_OPERAND (arg1, 0);
8967 if (TREE_CODE (array) == STRING_CST)
8969 *ptr_offset = fold_convert (sizetype, offset);
8972 else if (TREE_CODE (array) == VAR_DECL)
8976 /* Variables initialized to string literals can be handled too. */
8977 if (DECL_INITIAL (array) == NULL_TREE
8978 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8981 /* If they are read-only, non-volatile and bind locally. */
8982 if (! TREE_READONLY (array)
8983 || TREE_SIDE_EFFECTS (array)
8984 || ! targetm.binds_local_p (array))
8987 /* Avoid const char foo[4] = "abcde"; */
8988 if (DECL_SIZE_UNIT (array) == NULL_TREE
8989 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8990 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8991 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8994 /* If variable is bigger than the string literal, OFFSET must be constant
8995 and inside of the bounds of the string literal. */
8996 offset = fold_convert (sizetype, offset);
8997 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8998 && (! host_integerp (offset, 1)
8999 || compare_tree_int (offset, length) >= 0))
9002 *ptr_offset = offset;
9003 return DECL_INITIAL (array);
9009 /* Generate code to calculate EXP using a store-flag instruction
9010 and return an rtx for the result. EXP is either a comparison
9011 or a TRUTH_NOT_EXPR whose operand is a comparison.
9013 If TARGET is nonzero, store the result there if convenient.
9015 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9018 Return zero if there is no suitable set-flag instruction
9019 available on this machine.
9021 Once expand_expr has been called on the arguments of the comparison,
9022 we are committed to doing the store flag, since it is not safe to
9023 re-evaluate the expression. We emit the store-flag insn by calling
9024 emit_store_flag, but only expand the arguments if we have a reason
9025 to believe that emit_store_flag will be successful. If we think that
9026 it will, but it isn't, we have to simulate the store-flag with a
9027 set/jump/set sequence. */
9030 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9033 tree arg0, arg1, type;
9035 enum machine_mode operand_mode;
9039 enum insn_code icode;
9040 rtx subtarget = target;
9043 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9044 result at the end. We can't simply invert the test since it would
9045 have already been inverted if it were valid. This case occurs for
9046 some floating-point comparisons. */
9048 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9049 invert = 1, exp = TREE_OPERAND (exp, 0);
9051 arg0 = TREE_OPERAND (exp, 0);
9052 arg1 = TREE_OPERAND (exp, 1);
9054 /* Don't crash if the comparison was erroneous. */
9055 if (arg0 == error_mark_node || arg1 == error_mark_node)
9058 type = TREE_TYPE (arg0);
9059 operand_mode = TYPE_MODE (type);
9060 unsignedp = TYPE_UNSIGNED (type);
9062 /* We won't bother with BLKmode store-flag operations because it would mean
9063 passing a lot of information to emit_store_flag. */
9064 if (operand_mode == BLKmode)
9067 /* We won't bother with store-flag operations involving function pointers
9068 when function pointers must be canonicalized before comparisons. */
9069 #ifdef HAVE_canonicalize_funcptr_for_compare
9070 if (HAVE_canonicalize_funcptr_for_compare
9071 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9072 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9074 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9075 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9076 == FUNCTION_TYPE))))
9083 /* Get the rtx comparison code to use. We know that EXP is a comparison
9084 operation of some type. Some comparisons against 1 and -1 can be
9085 converted to comparisons with zero. Do so here so that the tests
9086 below will be aware that we have a comparison with zero. These
9087 tests will not catch constants in the first operand, but constants
9088 are rarely passed as the first operand. */
9090 switch (TREE_CODE (exp))
9099 if (integer_onep (arg1))
9100 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9102 code = unsignedp ? LTU : LT;
9105 if (! unsignedp && integer_all_onesp (arg1))
9106 arg1 = integer_zero_node, code = LT;
9108 code = unsignedp ? LEU : LE;
9111 if (! unsignedp && integer_all_onesp (arg1))
9112 arg1 = integer_zero_node, code = GE;
9114 code = unsignedp ? GTU : GT;
9117 if (integer_onep (arg1))
9118 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9120 code = unsignedp ? GEU : GE;
9123 case UNORDERED_EXPR:
9152 /* Put a constant second. */
9153 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9155 tem = arg0; arg0 = arg1; arg1 = tem;
9156 code = swap_condition (code);
9159 /* If this is an equality or inequality test of a single bit, we can
9160 do this by shifting the bit being tested to the low-order bit and
9161 masking the result with the constant 1. If the condition was EQ,
9162 we xor it with 1. This does not require an scc insn and is faster
9163 than an scc insn even if we have it.
9165 The code to make this transformation was moved into fold_single_bit_test,
9166 so we just call into the folder and expand its result. */
9168 if ((code == NE || code == EQ)
9169 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9170 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9172 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9173 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9175 target, VOIDmode, EXPAND_NORMAL);
9178 /* Now see if we are likely to be able to do this. Return if not. */
9179 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9182 icode = setcc_gen_code[(int) code];
9184 if (icode == CODE_FOR_nothing)
9186 enum machine_mode wmode;
9188 for (wmode = operand_mode;
9189 icode == CODE_FOR_nothing && wmode != VOIDmode;
9190 wmode = GET_MODE_WIDER_MODE (wmode))
9191 icode = cstore_optab->handlers[(int) wmode].insn_code;
9194 if (icode == CODE_FOR_nothing
9195 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9197 /* We can only do this if it is one of the special cases that
9198 can be handled without an scc insn. */
9199 if ((code == LT && integer_zerop (arg1))
9200 || (! only_cheap && code == GE && integer_zerop (arg1)))
9202 else if (! only_cheap && (code == NE || code == EQ)
9203 && TREE_CODE (type) != REAL_TYPE
9204 && ((abs_optab->handlers[(int) operand_mode].insn_code
9205 != CODE_FOR_nothing)
9206 || (ffs_optab->handlers[(int) operand_mode].insn_code
9207 != CODE_FOR_nothing)))
9213 if (! get_subtarget (target)
9214 || GET_MODE (subtarget) != operand_mode)
9217 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9220 target = gen_reg_rtx (mode);
9222 result = emit_store_flag (target, code, op0, op1,
9223 operand_mode, unsignedp, 1);
9228 result = expand_binop (mode, xor_optab, result, const1_rtx,
9229 result, 0, OPTAB_LIB_WIDEN);
9233 /* If this failed, we have to do this with set/compare/jump/set code. */
9235 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9236 target = gen_reg_rtx (GET_MODE (target));
9238 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9239 label = gen_label_rtx ();
9240 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9243 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9250 /* Stubs in case we haven't got a casesi insn. */
9252 # define HAVE_casesi 0
9253 # define gen_casesi(a, b, c, d, e) (0)
9254 # define CODE_FOR_casesi CODE_FOR_nothing
9257 /* If the machine does not have a case insn that compares the bounds,
9258 this means extra overhead for dispatch tables, which raises the
9259 threshold for using them. */
9260 #ifndef CASE_VALUES_THRESHOLD
9261 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9262 #endif /* CASE_VALUES_THRESHOLD */
9265 case_values_threshold (void)
9267 return CASE_VALUES_THRESHOLD;
9270 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9271 0 otherwise (i.e. if there is no casesi instruction). */
9273 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9274 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9276 enum machine_mode index_mode = SImode;
9277 int index_bits = GET_MODE_BITSIZE (index_mode);
9278 rtx op1, op2, index;
9279 enum machine_mode op_mode;
9284 /* Convert the index to SImode. */
9285 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9287 enum machine_mode omode = TYPE_MODE (index_type);
9288 rtx rangertx = expand_normal (range);
9290 /* We must handle the endpoints in the original mode. */
9291 index_expr = build2 (MINUS_EXPR, index_type,
9292 index_expr, minval);
9293 minval = integer_zero_node;
9294 index = expand_normal (index_expr);
9295 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9296 omode, 1, default_label);
9297 /* Now we can safely truncate. */
9298 index = convert_to_mode (index_mode, index, 0);
9302 if (TYPE_MODE (index_type) != index_mode)
9304 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9305 index_expr = fold_convert (index_type, index_expr);
9308 index = expand_normal (index_expr);
9311 do_pending_stack_adjust ();
9313 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9314 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9316 index = copy_to_mode_reg (op_mode, index);
9318 op1 = expand_normal (minval);
9320 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9321 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9322 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9323 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9325 op1 = copy_to_mode_reg (op_mode, op1);
9327 op2 = expand_normal (range);
9329 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9330 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9331 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9332 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9334 op2 = copy_to_mode_reg (op_mode, op2);
9336 emit_jump_insn (gen_casesi (index, op1, op2,
9337 table_label, default_label));
9341 /* Attempt to generate a tablejump instruction; same concept. */
9342 #ifndef HAVE_tablejump
9343 #define HAVE_tablejump 0
9344 #define gen_tablejump(x, y) (0)
9347 /* Subroutine of the next function.
9349 INDEX is the value being switched on, with the lowest value
9350 in the table already subtracted.
9351 MODE is its expected mode (needed if INDEX is constant).
9352 RANGE is the length of the jump table.
9353 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9355 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9356 index value is out of range. */
9359 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9364 if (INTVAL (range) > cfun->max_jumptable_ents)
9365 cfun->max_jumptable_ents = INTVAL (range);
9367 /* Do an unsigned comparison (in the proper mode) between the index
9368 expression and the value which represents the length of the range.
9369 Since we just finished subtracting the lower bound of the range
9370 from the index expression, this comparison allows us to simultaneously
9371 check that the original index expression value is both greater than
9372 or equal to the minimum value of the range and less than or equal to
9373 the maximum value of the range. */
9375 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9378 /* If index is in range, it must fit in Pmode.
9379 Convert to Pmode so we can index with it. */
9381 index = convert_to_mode (Pmode, index, 1);
9383 /* Don't let a MEM slip through, because then INDEX that comes
9384 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9385 and break_out_memory_refs will go to work on it and mess it up. */
9386 #ifdef PIC_CASE_VECTOR_ADDRESS
9387 if (flag_pic && !REG_P (index))
9388 index = copy_to_mode_reg (Pmode, index);
9391 /* If flag_force_addr were to affect this address
9392 it could interfere with the tricky assumptions made
9393 about addresses that contain label-refs,
9394 which may be valid only very near the tablejump itself. */
9395 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9396 GET_MODE_SIZE, because this indicates how large insns are. The other
9397 uses should all be Pmode, because they are addresses. This code
9398 could fail if addresses and insns are not the same size. */
9399 index = gen_rtx_PLUS (Pmode,
9400 gen_rtx_MULT (Pmode, index,
9401 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9402 gen_rtx_LABEL_REF (Pmode, table_label));
9403 #ifdef PIC_CASE_VECTOR_ADDRESS
9405 index = PIC_CASE_VECTOR_ADDRESS (index);
9408 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9409 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9410 vector = gen_const_mem (CASE_VECTOR_MODE, index);
9411 convert_move (temp, vector, 0);
9413 emit_jump_insn (gen_tablejump (temp, table_label));
9415 /* If we are generating PIC code or if the table is PC-relative, the
9416 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9417 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9422 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9423 rtx table_label, rtx default_label)
9427 if (! HAVE_tablejump)
9430 index_expr = fold_build2 (MINUS_EXPR, index_type,
9431 fold_convert (index_type, index_expr),
9432 fold_convert (index_type, minval));
9433 index = expand_normal (index_expr);
9434 do_pending_stack_adjust ();
9436 do_tablejump (index, TYPE_MODE (index_type),
9437 convert_modes (TYPE_MODE (index_type),
9438 TYPE_MODE (TREE_TYPE (range)),
9439 expand_normal (range),
9440 TYPE_UNSIGNED (TREE_TYPE (range))),
9441 table_label, default_label);
9445 /* Nonzero if the mode is a valid vector mode for this architecture.
9446 This returns nonzero even if there is no hardware support for the
9447 vector mode, but we can emulate with narrower modes. */
9450 vector_mode_valid_p (enum machine_mode mode)
9452 enum mode_class class = GET_MODE_CLASS (mode);
9453 enum machine_mode innermode;
9455 /* Doh! What's going on? */
9456 if (class != MODE_VECTOR_INT
9457 && class != MODE_VECTOR_FLOAT)
9460 /* Hardware support. Woo hoo! */
9461 if (targetm.vector_mode_supported_p (mode))
9464 innermode = GET_MODE_INNER (mode);
9466 /* We should probably return 1 if requesting V4DI and we have no DI,
9467 but we have V2DI, but this is probably very unlikely. */
9469 /* If we have support for the inner mode, we can safely emulate it.
9470 We may not have V2DI, but me can emulate with a pair of DIs. */
9471 return targetm.scalar_mode_supported_p (innermode);
9474 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9476 const_vector_from_tree (tree exp)
9481 enum machine_mode inner, mode;
9483 mode = TYPE_MODE (TREE_TYPE (exp));
9485 if (initializer_zerop (exp))
9486 return CONST0_RTX (mode);
9488 units = GET_MODE_NUNITS (mode);
9489 inner = GET_MODE_INNER (mode);
9491 v = rtvec_alloc (units);
9493 link = TREE_VECTOR_CST_ELTS (exp);
9494 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9496 elt = TREE_VALUE (link);
9498 if (TREE_CODE (elt) == REAL_CST)
9499 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9502 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9503 TREE_INT_CST_HIGH (elt),
9507 /* Initialize remaining elements to 0. */
9508 for (; i < units; ++i)
9509 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9511 return gen_rtx_CONST_VECTOR (mode, v);
9513 #include "gt-expr.h"