1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1680 0, 1, NULL_RTX, mode, mode);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Emit code to move a block SRC of type TYPE to a block DST,
1714 where DST is non-consecutive registers represented by a PARALLEL.
1715 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1719 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1724 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1725 emit_group_load_1 (tmps, dst, src, type, ssize);
1727 /* Copy the extracted pieces into the proper (probable) hard regs. */
1728 for (i = 0; i < XVECLEN (dst, 0); i++)
1730 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1733 emit_move_insn (d, tmps[i]);
1737 /* Similar, but load SRC into new pseudos in a format that looks like
1738 PARALLEL. This can later be fed to emit_group_move to get things
1739 in the right place. */
1742 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1747 vec = rtvec_alloc (XVECLEN (parallel, 0));
1748 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1750 /* Convert the vector to look just like the original PARALLEL, except
1751 with the computed values. */
1752 for (i = 0; i < XVECLEN (parallel, 0); i++)
1754 rtx e = XVECEXP (parallel, 0, i);
1755 rtx d = XEXP (e, 0);
1759 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1760 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1768 /* Emit code to move a block SRC to block DST, where SRC and DST are
1769 non-consecutive groups of registers, each represented by a PARALLEL. */
1772 emit_group_move (rtx dst, rtx src)
1776 gcc_assert (GET_CODE (src) == PARALLEL
1777 && GET_CODE (dst) == PARALLEL
1778 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1780 /* Skip first entry if NULL. */
1781 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1782 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1783 XEXP (XVECEXP (src, 0, i), 0));
1786 /* Move a group of registers represented by a PARALLEL into pseudos. */
1789 emit_group_move_into_temps (rtx src)
1791 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1794 for (i = 0; i < XVECLEN (src, 0); i++)
1796 rtx e = XVECEXP (src, 0, i);
1797 rtx d = XEXP (e, 0);
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1807 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1808 where SRC is non-consecutive registers represented by a PARALLEL.
1809 SSIZE represents the total size of block ORIG_DST, or -1 if not
1813 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 enum machine_mode m = GET_MODE (orig_dst);
1819 gcc_assert (GET_CODE (src) == PARALLEL);
1821 if (!SCALAR_INT_MODE_P (m)
1822 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1824 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1825 if (imode == BLKmode)
1826 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1828 dst = gen_reg_rtx (imode);
1829 emit_group_store (dst, src, type, ssize);
1830 if (imode != BLKmode)
1831 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1832 emit_move_insn (orig_dst, dst);
1836 /* Check for a NULL entry, used to indicate that the parameter goes
1837 both on the stack and in registers. */
1838 if (XEXP (XVECEXP (src, 0, 0), 0))
1843 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1845 /* Copy the (probable) hard regs into pseudos. */
1846 for (i = start; i < XVECLEN (src, 0); i++)
1848 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1849 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1850 emit_move_insn (tmps[i], reg);
1853 /* If we won't be storing directly into memory, protect the real destination
1854 from strange tricks we might play. */
1856 if (GET_CODE (dst) == PARALLEL)
1860 /* We can get a PARALLEL dst if there is a conditional expression in
1861 a return statement. In that case, the dst and src are the same,
1862 so no action is necessary. */
1863 if (rtx_equal_p (dst, src))
1866 /* It is unclear if we can ever reach here, but we may as well handle
1867 it. Allocate a temporary, and split this into a store/load to/from
1870 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1871 emit_group_store (temp, src, type, ssize);
1872 emit_group_load (dst, temp, type, ssize);
1875 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1877 dst = gen_reg_rtx (GET_MODE (orig_dst));
1878 /* Make life a bit easier for combine. */
1879 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1882 /* Process the pieces. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
1885 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1886 enum machine_mode mode = GET_MODE (tmps[i]);
1887 unsigned int bytelen = GET_MODE_SIZE (mode);
1890 /* Handle trailing fragments that run over the size of the struct. */
1891 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1893 /* store_bit_field always takes its value from the lsb.
1894 Move the fragment to the lsb if it's not already there. */
1896 #ifdef BLOCK_REG_PADDING
1897 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1898 == (BYTES_BIG_ENDIAN ? upward : downward)
1904 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1905 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1906 build_int_cst (NULL_TREE, shift),
1909 bytelen = ssize - bytepos;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 dest = XEXP (dst, 0);
1916 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1918 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1919 dest = XEXP (dst, 1);
1923 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1924 dest = assign_stack_temp (GET_MODE (dest),
1925 GET_MODE_SIZE (GET_MODE (dest)), 0);
1926 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1933 /* Optimize the access just a bit. */
1935 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1936 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1941 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1945 /* Copy from the pseudo into the (probable) hard reg. */
1946 if (orig_dst != dst)
1947 emit_move_insn (orig_dst, dst);
1950 /* Generate code to copy a BLKmode object of TYPE out of a
1951 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1952 is null, a stack temporary is created. TGTBLK is returned.
1954 The purpose of this routine is to handle functions that return
1955 BLKmode structures in registers. Some machines (the PA for example)
1956 want to return all small structures in registers regardless of the
1957 structure's alignment. */
1960 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1962 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1963 rtx src = NULL, dst = NULL;
1964 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1965 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1969 tgtblk = assign_temp (build_qualified_type (type,
1971 | TYPE_QUAL_CONST)),
1973 preserve_temp_slots (tgtblk);
1976 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1977 into a new pseudo which is a full word. */
1979 if (GET_MODE (srcreg) != BLKmode
1980 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1981 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1983 /* If the structure doesn't take up a whole number of words, see whether
1984 SRCREG is padded on the left or on the right. If it's on the left,
1985 set PADDING_CORRECTION to the number of bits to skip.
1987 In most ABIs, the structure will be returned at the least end of
1988 the register, which translates to right padding on little-endian
1989 targets and left padding on big-endian targets. The opposite
1990 holds if the structure is returned at the most significant
1991 end of the register. */
1992 if (bytes % UNITS_PER_WORD != 0
1993 && (targetm.calls.return_in_msb (type)
1995 : BYTES_BIG_ENDIAN))
1997 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1999 /* Copy the structure BITSIZE bites at a time.
2001 We could probably emit more efficient code for machines which do not use
2002 strict alignment, but it doesn't seem worth the effort at the current
2004 for (bitpos = 0, xbitpos = padding_correction;
2005 bitpos < bytes * BITS_PER_UNIT;
2006 bitpos += bitsize, xbitpos += bitsize)
2008 /* We need a new source operand each time xbitpos is on a
2009 word boundary and when xbitpos == padding_correction
2010 (the first time through). */
2011 if (xbitpos % BITS_PER_WORD == 0
2012 || xbitpos == padding_correction)
2013 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2016 /* We need a new destination operand each time bitpos is on
2018 if (bitpos % BITS_PER_WORD == 0)
2019 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2021 /* Use xbitpos for the source extraction (right justified) and
2022 xbitpos for the destination store (left justified). */
2023 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2024 extract_bit_field (src, bitsize,
2025 xbitpos % BITS_PER_WORD, 1,
2026 NULL_RTX, word_mode, word_mode));
2032 /* Add a USE expression for REG to the (possibly empty) list pointed
2033 to by CALL_FUSAGE. REG must denote a hard register. */
2036 use_reg (rtx *call_fusage, rtx reg)
2038 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2041 = gen_rtx_EXPR_LIST (VOIDmode,
2042 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2045 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2046 starting at REGNO. All of these registers must be hard registers. */
2049 use_regs (rtx *call_fusage, int regno, int nregs)
2053 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2055 for (i = 0; i < nregs; i++)
2056 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2059 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2060 PARALLEL REGS. This is for calls that pass values in multiple
2061 non-contiguous locations. The Irix 6 ABI has examples of this. */
2064 use_group_regs (rtx *call_fusage, rtx regs)
2068 for (i = 0; i < XVECLEN (regs, 0); i++)
2070 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2072 /* A NULL entry means the parameter goes both on the stack and in
2073 registers. This can also be a MEM for targets that pass values
2074 partially on the stack and partially in registers. */
2075 if (reg != 0 && REG_P (reg))
2076 use_reg (call_fusage, reg);
2081 /* Determine whether the LEN bytes generated by CONSTFUN can be
2082 stored to memory using several move instructions. CONSTFUNDATA is
2083 a pointer which will be passed as argument in every CONSTFUN call.
2084 ALIGN is maximum alignment we can assume. Return nonzero if a
2085 call to store_by_pieces should succeed. */
2088 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2089 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2090 void *constfundata, unsigned int align)
2092 unsigned HOST_WIDE_INT l;
2093 unsigned int max_size;
2094 HOST_WIDE_INT offset = 0;
2095 enum machine_mode mode, tmode;
2096 enum insn_code icode;
2103 if (! STORE_BY_PIECES_P (len, align))
2106 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2107 if (align >= GET_MODE_ALIGNMENT (tmode))
2108 align = GET_MODE_ALIGNMENT (tmode);
2111 enum machine_mode xmode;
2113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2115 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2116 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2117 || SLOW_UNALIGNED_ACCESS (tmode, align))
2120 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2123 /* We would first store what we can in the largest integer mode, then go to
2124 successively smaller modes. */
2127 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2132 max_size = STORE_MAX_PIECES + 1;
2133 while (max_size > 1)
2135 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2136 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2137 if (GET_MODE_SIZE (tmode) < max_size)
2140 if (mode == VOIDmode)
2143 icode = mov_optab->handlers[(int) mode].insn_code;
2144 if (icode != CODE_FOR_nothing
2145 && align >= GET_MODE_ALIGNMENT (mode))
2147 unsigned int size = GET_MODE_SIZE (mode);
2154 cst = (*constfun) (constfundata, offset, mode);
2155 if (!LEGITIMATE_CONSTANT_P (cst))
2165 max_size = GET_MODE_SIZE (mode);
2168 /* The code above should have handled everything. */
2175 /* Generate several move instructions to store LEN bytes generated by
2176 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2177 pointer which will be passed as argument in every CONSTFUN call.
2178 ALIGN is maximum alignment we can assume.
2179 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2180 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2184 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2185 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2186 void *constfundata, unsigned int align, int endp)
2188 struct store_by_pieces data;
2192 gcc_assert (endp != 2);
2196 gcc_assert (STORE_BY_PIECES_P (len, align));
2197 data.constfun = constfun;
2198 data.constfundata = constfundata;
2201 store_by_pieces_1 (&data, align);
2206 gcc_assert (!data.reverse);
2211 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2212 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2214 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2217 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2224 to1 = adjust_address (data.to, QImode, data.offset);
2232 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2233 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2236 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2238 struct store_by_pieces data;
2243 data.constfun = clear_by_pieces_1;
2244 data.constfundata = NULL;
2247 store_by_pieces_1 (&data, align);
2250 /* Callback routine for clear_by_pieces.
2251 Return const0_rtx unconditionally. */
2254 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2255 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2256 enum machine_mode mode ATTRIBUTE_UNUSED)
2261 /* Subroutine of clear_by_pieces and store_by_pieces.
2262 Generate several move instructions to store LEN bytes of block TO. (A MEM
2263 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2266 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2267 unsigned int align ATTRIBUTE_UNUSED)
2269 rtx to_addr = XEXP (data->to, 0);
2270 unsigned int max_size = STORE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2275 data->to_addr = to_addr;
2277 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2278 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280 data->explicit_inc_to = 0;
2282 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 data->offset = data->len;
2286 /* If storing requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2289 if (!data->autinc_to
2290 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2300 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2301 data->autinc_to = 1;
2302 data->explicit_inc_to = -1;
2305 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2306 && ! data->autinc_to)
2308 data->to_addr = copy_addr_to_reg (to_addr);
2309 data->autinc_to = 1;
2310 data->explicit_inc_to = 1;
2313 if ( !data->autinc_to && CONSTANT_P (to_addr))
2314 data->to_addr = copy_addr_to_reg (to_addr);
2317 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2318 if (align >= GET_MODE_ALIGNMENT (tmode))
2319 align = GET_MODE_ALIGNMENT (tmode);
2322 enum machine_mode xmode;
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 /* First store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2337 while (max_size > 1)
2339 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341 if (GET_MODE_SIZE (tmode) < max_size)
2344 if (mode == VOIDmode)
2347 icode = mov_optab->handlers[(int) mode].insn_code;
2348 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2349 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2351 max_size = GET_MODE_SIZE (mode);
2354 /* The code above should have handled everything. */
2355 gcc_assert (!data->len);
2358 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2359 with move instructions for mode MODE. GENFUN is the gen_... function
2360 to make a move insn for that mode. DATA has all the other info. */
2363 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2364 struct store_by_pieces *data)
2366 unsigned int size = GET_MODE_SIZE (mode);
2369 while (data->len >= size)
2372 data->offset -= size;
2374 if (data->autinc_to)
2375 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2378 to1 = adjust_address (data->to, mode, data->offset);
2380 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2381 emit_insn (gen_add2_insn (data->to_addr,
2382 GEN_INT (-(HOST_WIDE_INT) size)));
2384 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2385 emit_insn ((*genfun) (to1, cst));
2387 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2388 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390 if (! data->reverse)
2391 data->offset += size;
2397 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2398 its length in bytes. */
2401 clear_storage (rtx object, rtx size)
2404 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2405 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2415 if (size == const0_rtx)
2417 else if (GET_CODE (size) == CONST_INT
2418 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2419 clear_by_pieces (object, INTVAL (size), align);
2420 else if (clear_storage_via_clrmem (object, size, align))
2423 retval = clear_storage_via_libcall (object, size);
2429 /* A subroutine of clear_storage. Expand a clrmem pattern;
2430 return true if successful. */
2433 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2435 /* Try the most limited insn first, because there's no point
2436 including more than one in the machine description unless
2437 the more limited one has some advantage. */
2439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2440 enum machine_mode mode;
2442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2443 mode = GET_MODE_WIDER_MODE (mode))
2445 enum insn_code code = clrmem_optab[(int) mode];
2446 insn_operand_predicate_fn pred;
2448 if (code != CODE_FOR_nothing
2449 /* We don't need MODE to be narrower than
2450 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2451 the mode mask, as it is returned by the macro, it will
2452 definitely be less than the actual mode mask. */
2453 && ((GET_CODE (size) == CONST_INT
2454 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2455 <= (GET_MODE_MASK (mode) >> 1)))
2456 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2457 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2458 || (*pred) (object, BLKmode))
2459 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2460 || (*pred) (opalign, VOIDmode)))
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 pred = insn_data[(int) code].operand[1].predicate;
2468 if (pred != 0 && ! (*pred) (op1, mode))
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2485 /* A subroutine of clear_storage. Expand a call to memset.
2486 Return the return value of memset, 0 otherwise. */
2489 clear_storage_via_libcall (rtx object, rtx size)
2491 tree call_expr, arg_list, fn, object_tree, size_tree;
2492 enum machine_mode size_mode;
2495 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2496 place those into new pseudos into a VAR_DECL and use them later. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 size_mode = TYPE_MODE (sizetype);
2501 size = convert_to_mode (size_mode, size, 1);
2502 size = copy_to_mode_reg (size_mode, size);
2504 /* It is incorrect to use the libcall calling conventions to call
2505 memset in this context. This could be a user call to memset and
2506 the user may wish to examine the return value from memset. For
2507 targets where libcalls and normal calls have different conventions
2508 for returning pointers, we could end up generating incorrect code. */
2510 object_tree = make_tree (ptr_type_node, object);
2511 size_tree = make_tree (sizetype, size);
2513 fn = clear_storage_libcall_fn (true);
2514 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2515 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2516 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2521 call_expr, arg_list, NULL_TREE);
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 /* A subroutine of clear_storage_via_libcall. Create the tree node
2529 for the function we use for block clears. The first time FOR_CALL
2530 is true, we call assemble_external. */
2532 static GTY(()) tree block_clear_fn;
2535 init_block_clear_fn (const char *asmspec)
2537 if (!block_clear_fn)
2541 fn = get_identifier ("memset");
2542 args = build_function_type_list (ptr_type_node, ptr_type_node,
2543 integer_type_node, sizetype,
2546 fn = build_decl (FUNCTION_DECL, fn, args);
2547 DECL_EXTERNAL (fn) = 1;
2548 TREE_PUBLIC (fn) = 1;
2549 DECL_ARTIFICIAL (fn) = 1;
2550 TREE_NOTHROW (fn) = 1;
2552 block_clear_fn = fn;
2556 set_user_assembler_name (block_clear_fn, asmspec);
2560 clear_storage_libcall_fn (int for_call)
2562 static bool emitted_extern;
2564 if (!block_clear_fn)
2565 init_block_clear_fn (NULL);
2567 if (for_call && !emitted_extern)
2569 emitted_extern = true;
2570 make_decl_rtl (block_clear_fn);
2571 assemble_external (block_clear_fn);
2574 return block_clear_fn;
2577 /* Write to one of the components of the complex value CPLX. Write VAL to
2578 the real part if IMAG_P is false, and the imaginary part if its true. */
2581 write_complex_part (rtx cplx, rtx val, bool imag_p)
2583 enum machine_mode cmode;
2584 enum machine_mode imode;
2587 if (GET_CODE (cplx) == CONCAT)
2589 emit_move_insn (XEXP (cplx, imag_p), val);
2593 cmode = GET_MODE (cplx);
2594 imode = GET_MODE_INNER (cmode);
2595 ibitsize = GET_MODE_BITSIZE (imode);
2597 /* If the sub-object is at least word sized, then we know that subregging
2598 will work. This special case is important, since store_bit_field
2599 wants to operate on integer modes, and there's rarely an OImode to
2600 correspond to TCmode. */
2601 if (ibitsize >= BITS_PER_WORD
2602 /* For hard regs we have exact predicates. Assume we can split
2603 the original object if it spans an even number of hard regs.
2604 This special case is important for SCmode on 64-bit platforms
2605 where the natural size of floating-point regs is 32-bit. */
2606 || (GET_CODE (cplx) == REG
2607 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2608 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2609 /* For MEMs we always try to make a "subreg", that is to adjust
2610 the MEM, because store_bit_field may generate overly
2611 convoluted RTL for sub-word fields. */
2614 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2615 imag_p ? GET_MODE_SIZE (imode) : 0);
2618 emit_move_insn (part, val);
2622 /* simplify_gen_subreg may fail for sub-word MEMs. */
2623 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2626 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2629 /* Extract one of the components of the complex value CPLX. Extract the
2630 real part if IMAG_P is false, and the imaginary part if it's true. */
2633 read_complex_part (rtx cplx, bool imag_p)
2635 enum machine_mode cmode, imode;
2638 if (GET_CODE (cplx) == CONCAT)
2639 return XEXP (cplx, imag_p);
2641 cmode = GET_MODE (cplx);
2642 imode = GET_MODE_INNER (cmode);
2643 ibitsize = GET_MODE_BITSIZE (imode);
2645 /* Special case reads from complex constants that got spilled to memory. */
2646 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2648 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2649 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2651 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2652 if (CONSTANT_CLASS_P (part))
2653 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2657 /* If the sub-object is at least word sized, then we know that subregging
2658 will work. This special case is important, since extract_bit_field
2659 wants to operate on integer modes, and there's rarely an OImode to
2660 correspond to TCmode. */
2661 if (ibitsize >= BITS_PER_WORD
2662 /* For hard regs we have exact predicates. Assume we can split
2663 the original object if it spans an even number of hard regs.
2664 This special case is important for SCmode on 64-bit platforms
2665 where the natural size of floating-point regs is 32-bit. */
2666 || (GET_CODE (cplx) == REG
2667 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2668 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2669 /* For MEMs we always try to make a "subreg", that is to adjust
2670 the MEM, because extract_bit_field may generate overly
2671 convoluted RTL for sub-word fields. */
2674 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2675 imag_p ? GET_MODE_SIZE (imode) : 0);
2679 /* simplify_gen_subreg may fail for sub-word MEMs. */
2680 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2683 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2684 true, NULL_RTX, imode, imode);
2687 /* A subroutine of emit_move_via_alt_mode. Yet another lowpart generator.
2688 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2689 represented in NEW_MODE. */
2692 emit_move_change_mode (enum machine_mode new_mode,
2693 enum machine_mode old_mode, rtx x)
2697 if (reload_in_progress && MEM_P (x))
2699 /* We can't use gen_lowpart here because it may call change_address
2700 which is not appropriate if we were called when a reload was in
2701 progress. We don't have to worry about changing the address since
2702 the size in bytes is supposed to be the same. Copy the MEM to
2703 change the mode and move any substitutions from the old MEM to
2706 ret = adjust_address_nv (x, new_mode, 0);
2707 copy_replacements (x, ret);
2711 /* Note that we do want simplify_subreg's behaviour of validating
2712 that the new mode is ok for a hard register. If we were to use
2713 simplify_gen_subreg, we would create the subreg, but would
2714 probably run into the target not being able to implement it. */
2715 ret = simplify_subreg (new_mode, x, old_mode, 0);
2721 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2722 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2723 code for the move in ALT_MODE, and is known to be valid. Returns the
2724 instruction emitted, or NULL if X or Y cannot be represented in ALT_MODE. */
2727 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2728 enum insn_code code, rtx x, rtx y)
2730 x = emit_move_change_mode (alt_mode, mode, x);
2733 y = emit_move_change_mode (alt_mode, mode, y);
2736 return emit_insn (GEN_FCN (code) (x, y));
2739 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2740 an integer mode of the same size as MODE. Returns the instruction
2741 emitted, or NULL if such a move could not be generated. */
2744 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2746 enum machine_mode imode;
2747 enum insn_code code;
2749 /* There must exist a mode of the exact size we require. */
2750 imode = int_mode_for_mode (mode);
2751 if (imode == BLKmode)
2754 /* The target must support moves in this mode. */
2755 code = mov_optab->handlers[imode].insn_code;
2756 if (code == CODE_FOR_nothing)
2759 return emit_move_via_alt_mode (imode, mode, code, x, y);
2762 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2763 Return an equivalent MEM that does not use an auto-increment. */
2766 emit_move_resolve_push (enum machine_mode mode, rtx x)
2768 enum rtx_code code = GET_CODE (XEXP (x, 0));
2769 HOST_WIDE_INT adjust;
2772 adjust = GET_MODE_SIZE (mode);
2773 #ifdef PUSH_ROUNDING
2774 adjust = PUSH_ROUNDING (adjust);
2776 if (code == PRE_DEC || code == POST_DEC)
2779 /* Do not use anti_adjust_stack, since we don't want to update
2780 stack_pointer_delta. */
2781 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2782 GEN_INT (adjust), stack_pointer_rtx,
2783 0, OPTAB_LIB_WIDEN);
2784 if (temp != stack_pointer_rtx)
2785 emit_move_insn (stack_pointer_rtx, temp);
2791 temp = stack_pointer_rtx;
2794 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2797 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2803 return replace_equiv_address (x, temp);
2806 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2807 X is known to satisfy push_operand, and MODE is known to be complex.
2808 Returns the last instruction emitted. */
2811 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2813 enum machine_mode submode = GET_MODE_INNER (mode);
2816 #ifdef PUSH_ROUNDING
2817 unsigned int submodesize = GET_MODE_SIZE (submode);
2819 /* In case we output to the stack, but the size is smaller than the
2820 machine can push exactly, we need to use move instructions. */
2821 if (PUSH_ROUNDING (submodesize) != submodesize)
2823 x = emit_move_resolve_push (mode, x);
2824 return emit_move_insn (x, y);
2828 /* Note that the real part always precedes the imag part in memory
2829 regardless of machine's endianness. */
2830 switch (GET_CODE (XEXP (x, 0)))
2844 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2845 read_complex_part (y, imag_first));
2846 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2847 read_complex_part (y, !imag_first));
2850 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2851 MODE is known to be complex. Returns the last instruction emitted. */
2854 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2858 /* Need to take special care for pushes, to maintain proper ordering
2859 of the data, and possibly extra padding. */
2860 if (push_operand (x, mode))
2861 return emit_move_complex_push (mode, x, y);
2863 /* For memory to memory moves, optimial behaviour can be had with the
2864 existing block move logic. */
2865 if (MEM_P (x) && MEM_P (y))
2867 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2868 BLOCK_OP_NO_LIBCALL);
2869 return get_last_insn ();
2872 /* See if we can coerce the target into moving both values at once. */
2874 /* Not possible if the values are inherently not adjacent. */
2875 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2877 /* Is possible if both are registers (or subregs of registers). */
2878 else if (register_operand (x, mode) && register_operand (y, mode))
2880 /* If one of the operands is a memory, and alignment constraints
2881 are friendly enough, we may be able to do combined memory operations.
2882 We do not attempt this if Y is a constant because that combination is
2883 usually better with the by-parts thing below. */
2884 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2885 && (!STRICT_ALIGNMENT
2886 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2893 rtx ret = emit_move_via_integer (mode, x, y);
2898 /* Show the output dies here. This is necessary for SUBREGs
2899 of pseudos since we cannot track their lifetimes correctly;
2900 hard regs shouldn't appear here except as return values. */
2901 if (!reload_completed && !reload_in_progress
2902 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2903 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2905 write_complex_part (x, read_complex_part (y, false), false);
2906 write_complex_part (x, read_complex_part (y, true), true);
2907 return get_last_insn ();
2910 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2911 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2914 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2918 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2921 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2922 if (code != CODE_FOR_nothing)
2923 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2926 /* Otherwise, find the MODE_INT mode of the same width. */
2927 ret = emit_move_via_integer (mode, x, y);
2928 gcc_assert (ret != NULL);
2932 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2933 MODE is any multi-word or full-word mode that lacks a move_insn
2934 pattern. Note that you will get better code if you define such
2935 patterns, even if they must turn into multiple assembler instructions. */
2938 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2945 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2947 /* If X is a push on the stack, do the push now and replace
2948 X with a reference to the stack pointer. */
2949 if (push_operand (x, mode))
2950 x = emit_move_resolve_push (mode, x);
2952 /* If we are in reload, see if either operand is a MEM whose address
2953 is scheduled for replacement. */
2954 if (reload_in_progress && MEM_P (x)
2955 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2956 x = replace_equiv_address_nv (x, inner);
2957 if (reload_in_progress && MEM_P (y)
2958 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2959 y = replace_equiv_address_nv (y, inner);
2963 need_clobber = false;
2965 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2968 rtx xpart = operand_subword (x, i, 1, mode);
2969 rtx ypart = operand_subword (y, i, 1, mode);
2971 /* If we can't get a part of Y, put Y into memory if it is a
2972 constant. Otherwise, force it into a register. If we still
2973 can't get a part of Y, abort. */
2974 if (ypart == 0 && CONSTANT_P (y))
2976 y = force_const_mem (mode, y);
2977 ypart = operand_subword (y, i, 1, mode);
2979 else if (ypart == 0)
2980 ypart = operand_subword_force (y, i, mode);
2982 gcc_assert (xpart && ypart);
2984 need_clobber |= (GET_CODE (xpart) == SUBREG);
2986 last_insn = emit_move_insn (xpart, ypart);
2992 /* Show the output dies here. This is necessary for SUBREGs
2993 of pseudos since we cannot track their lifetimes correctly;
2994 hard regs shouldn't appear here except as return values.
2995 We never want to emit such a clobber after reload. */
2997 && ! (reload_in_progress || reload_completed)
2998 && need_clobber != 0)
2999 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3006 /* Low level part of emit_move_insn.
3007 Called just like emit_move_insn, but assumes X and Y
3008 are basically valid. */
3011 emit_move_insn_1 (rtx x, rtx y)
3013 enum machine_mode mode = GET_MODE (x);
3014 enum insn_code code;
3016 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3018 code = mov_optab->handlers[mode].insn_code;
3019 if (code != CODE_FOR_nothing)
3020 return emit_insn (GEN_FCN (code) (x, y));
3022 /* Expand complex moves by moving real part and imag part. */
3023 if (COMPLEX_MODE_P (mode))
3024 return emit_move_complex (mode, x, y);
3026 if (GET_MODE_CLASS (mode) == MODE_CC)
3027 return emit_move_ccmode (mode, x, y);
3029 /* Try using a move pattern for the corresponding integer mode. This is
3030 only safe when simplify_subreg can convert MODE constants into integer
3031 constants. At present, it can only do this reliably if the value
3032 fits within a HOST_WIDE_INT. */
3033 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3035 rtx ret = emit_move_via_integer (mode, x, y);
3040 return emit_move_multi_word (mode, x, y);
3043 /* Generate code to copy Y into X.
3044 Both Y and X must have the same mode, except that
3045 Y can be a constant with VOIDmode.
3046 This mode cannot be BLKmode; use emit_block_move for that.
3048 Return the last instruction emitted. */
3051 emit_move_insn (rtx x, rtx y)
3053 enum machine_mode mode = GET_MODE (x);
3054 rtx y_cst = NULL_RTX;
3057 gcc_assert (mode != BLKmode
3058 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3063 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3064 && (last_insn = compress_float_constant (x, y)))
3069 if (!LEGITIMATE_CONSTANT_P (y))
3071 y = force_const_mem (mode, y);
3073 /* If the target's cannot_force_const_mem prevented the spill,
3074 assume that the target's move expanders will also take care
3075 of the non-legitimate constant. */
3081 /* If X or Y are memory references, verify that their addresses are valid
3084 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3085 && ! push_operand (x, GET_MODE (x)))
3087 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3088 x = validize_mem (x);
3091 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3093 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3094 y = validize_mem (y);
3096 gcc_assert (mode != BLKmode);
3098 last_insn = emit_move_insn_1 (x, y);
3100 if (y_cst && REG_P (x)
3101 && (set = single_set (last_insn)) != NULL_RTX
3102 && SET_DEST (set) == x
3103 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3104 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3109 /* If Y is representable exactly in a narrower mode, and the target can
3110 perform the extension directly from constant or memory, then emit the
3111 move as an extension. */
3114 compress_float_constant (rtx x, rtx y)
3116 enum machine_mode dstmode = GET_MODE (x);
3117 enum machine_mode orig_srcmode = GET_MODE (y);
3118 enum machine_mode srcmode;
3121 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3123 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3124 srcmode != orig_srcmode;
3125 srcmode = GET_MODE_WIDER_MODE (srcmode))
3128 rtx trunc_y, last_insn;
3130 /* Skip if the target can't extend this way. */
3131 ic = can_extend_p (dstmode, srcmode, 0);
3132 if (ic == CODE_FOR_nothing)
3135 /* Skip if the narrowed value isn't exact. */
3136 if (! exact_real_truncate (srcmode, &r))
3139 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3141 if (LEGITIMATE_CONSTANT_P (trunc_y))
3143 /* Skip if the target needs extra instructions to perform
3145 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3148 else if (float_extend_from_mem[dstmode][srcmode])
3149 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3153 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3154 last_insn = get_last_insn ();
3157 set_unique_reg_note (last_insn, REG_EQUAL, y);
3165 /* Pushing data onto the stack. */
3167 /* Push a block of length SIZE (perhaps variable)
3168 and return an rtx to address the beginning of the block.
3169 The value may be virtual_outgoing_args_rtx.
3171 EXTRA is the number of bytes of padding to push in addition to SIZE.
3172 BELOW nonzero means this padding comes at low addresses;
3173 otherwise, the padding comes at high addresses. */
3176 push_block (rtx size, int extra, int below)
3180 size = convert_modes (Pmode, ptr_mode, size, 1);
3181 if (CONSTANT_P (size))
3182 anti_adjust_stack (plus_constant (size, extra));
3183 else if (REG_P (size) && extra == 0)
3184 anti_adjust_stack (size);
3187 temp = copy_to_mode_reg (Pmode, size);
3189 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3190 temp, 0, OPTAB_LIB_WIDEN);
3191 anti_adjust_stack (temp);
3194 #ifndef STACK_GROWS_DOWNWARD
3200 temp = virtual_outgoing_args_rtx;
3201 if (extra != 0 && below)
3202 temp = plus_constant (temp, extra);
3206 if (GET_CODE (size) == CONST_INT)
3207 temp = plus_constant (virtual_outgoing_args_rtx,
3208 -INTVAL (size) - (below ? 0 : extra));
3209 else if (extra != 0 && !below)
3210 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3211 negate_rtx (Pmode, plus_constant (size, extra)));
3213 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3214 negate_rtx (Pmode, size));
3217 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3220 #ifdef PUSH_ROUNDING
3222 /* Emit single push insn. */
3225 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3228 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3230 enum insn_code icode;
3231 insn_operand_predicate_fn pred;
3233 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3234 /* If there is push pattern, use it. Otherwise try old way of throwing
3235 MEM representing push operation to move expander. */
3236 icode = push_optab->handlers[(int) mode].insn_code;
3237 if (icode != CODE_FOR_nothing)
3239 if (((pred = insn_data[(int) icode].operand[0].predicate)
3240 && !((*pred) (x, mode))))
3241 x = force_reg (mode, x);
3242 emit_insn (GEN_FCN (icode) (x));
3245 if (GET_MODE_SIZE (mode) == rounded_size)
3246 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3247 /* If we are to pad downward, adjust the stack pointer first and
3248 then store X into the stack location using an offset. This is
3249 because emit_move_insn does not know how to pad; it does not have
3251 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3253 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3254 HOST_WIDE_INT offset;
3256 emit_move_insn (stack_pointer_rtx,
3257 expand_binop (Pmode,
3258 #ifdef STACK_GROWS_DOWNWARD
3264 GEN_INT (rounded_size),
3265 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3267 offset = (HOST_WIDE_INT) padding_size;
3268 #ifdef STACK_GROWS_DOWNWARD
3269 if (STACK_PUSH_CODE == POST_DEC)
3270 /* We have already decremented the stack pointer, so get the
3272 offset += (HOST_WIDE_INT) rounded_size;
3274 if (STACK_PUSH_CODE == POST_INC)
3275 /* We have already incremented the stack pointer, so get the
3277 offset -= (HOST_WIDE_INT) rounded_size;
3279 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3283 #ifdef STACK_GROWS_DOWNWARD
3284 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3285 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3286 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3288 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3289 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3290 GEN_INT (rounded_size));
3292 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3295 dest = gen_rtx_MEM (mode, dest_addr);
3299 set_mem_attributes (dest, type, 1);
3301 if (flag_optimize_sibling_calls)
3302 /* Function incoming arguments may overlap with sibling call
3303 outgoing arguments and we cannot allow reordering of reads
3304 from function arguments with stores to outgoing arguments
3305 of sibling calls. */
3306 set_mem_alias_set (dest, 0);
3308 emit_move_insn (dest, x);
3312 /* Generate code to push X onto the stack, assuming it has mode MODE and
3314 MODE is redundant except when X is a CONST_INT (since they don't
3316 SIZE is an rtx for the size of data to be copied (in bytes),
3317 needed only if X is BLKmode.
3319 ALIGN (in bits) is maximum alignment we can assume.
3321 If PARTIAL and REG are both nonzero, then copy that many of the first
3322 words of X into registers starting with REG, and push the rest of X.
3323 The amount of space pushed is decreased by PARTIAL words,
3324 rounded *down* to a multiple of PARM_BOUNDARY.
3325 REG must be a hard register in this case.
3326 If REG is zero but PARTIAL is not, take any all others actions for an
3327 argument partially in registers, but do not actually load any
3330 EXTRA is the amount in bytes of extra space to leave next to this arg.
3331 This is ignored if an argument block has already been allocated.
3333 On a machine that lacks real push insns, ARGS_ADDR is the address of
3334 the bottom of the argument block for this call. We use indexing off there
3335 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3336 argument block has not been preallocated.
3338 ARGS_SO_FAR is the size of args previously pushed for this call.
3340 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3341 for arguments passed in registers. If nonzero, it will be the number
3342 of bytes required. */
3345 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3346 unsigned int align, int partial, rtx reg, int extra,
3347 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3351 enum direction stack_direction
3352 #ifdef STACK_GROWS_DOWNWARD
3358 /* Decide where to pad the argument: `downward' for below,
3359 `upward' for above, or `none' for don't pad it.
3360 Default is below for small data on big-endian machines; else above. */
3361 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3363 /* Invert direction if stack is post-decrement.
3365 if (STACK_PUSH_CODE == POST_DEC)
3366 if (where_pad != none)
3367 where_pad = (where_pad == downward ? upward : downward);
3371 if (mode == BLKmode)
3373 /* Copy a block into the stack, entirely or partially. */
3376 int used = partial * UNITS_PER_WORD;
3380 if (reg && GET_CODE (reg) == PARALLEL)
3382 /* Use the size of the elt to compute offset. */
3383 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3384 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3385 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3388 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3394 /* USED is now the # of bytes we need not copy to the stack
3395 because registers will take care of them. */
3398 xinner = adjust_address (xinner, BLKmode, used);
3400 /* If the partial register-part of the arg counts in its stack size,
3401 skip the part of stack space corresponding to the registers.
3402 Otherwise, start copying to the beginning of the stack space,
3403 by setting SKIP to 0. */
3404 skip = (reg_parm_stack_space == 0) ? 0 : used;
3406 #ifdef PUSH_ROUNDING
3407 /* Do it with several push insns if that doesn't take lots of insns
3408 and if there is no difficulty with push insns that skip bytes
3409 on the stack for alignment purposes. */
3412 && GET_CODE (size) == CONST_INT
3414 && MEM_ALIGN (xinner) >= align
3415 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3416 /* Here we avoid the case of a structure whose weak alignment
3417 forces many pushes of a small amount of data,
3418 and such small pushes do rounding that causes trouble. */
3419 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3420 || align >= BIGGEST_ALIGNMENT
3421 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3422 == (align / BITS_PER_UNIT)))
3423 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3425 /* Push padding now if padding above and stack grows down,
3426 or if padding below and stack grows up.
3427 But if space already allocated, this has already been done. */
3428 if (extra && args_addr == 0
3429 && where_pad != none && where_pad != stack_direction)
3430 anti_adjust_stack (GEN_INT (extra));
3432 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3435 #endif /* PUSH_ROUNDING */
3439 /* Otherwise make space on the stack and copy the data
3440 to the address of that space. */
3442 /* Deduct words put into registers from the size we must copy. */
3445 if (GET_CODE (size) == CONST_INT)
3446 size = GEN_INT (INTVAL (size) - used);
3448 size = expand_binop (GET_MODE (size), sub_optab, size,
3449 GEN_INT (used), NULL_RTX, 0,
3453 /* Get the address of the stack space.
3454 In this case, we do not deal with EXTRA separately.
3455 A single stack adjust will do. */
3458 temp = push_block (size, extra, where_pad == downward);
3461 else if (GET_CODE (args_so_far) == CONST_INT)
3462 temp = memory_address (BLKmode,
3463 plus_constant (args_addr,
3464 skip + INTVAL (args_so_far)));
3466 temp = memory_address (BLKmode,
3467 plus_constant (gen_rtx_PLUS (Pmode,
3472 if (!ACCUMULATE_OUTGOING_ARGS)
3474 /* If the source is referenced relative to the stack pointer,
3475 copy it to another register to stabilize it. We do not need
3476 to do this if we know that we won't be changing sp. */
3478 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3479 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3480 temp = copy_to_reg (temp);
3483 target = gen_rtx_MEM (BLKmode, temp);
3485 /* We do *not* set_mem_attributes here, because incoming arguments
3486 may overlap with sibling call outgoing arguments and we cannot
3487 allow reordering of reads from function arguments with stores
3488 to outgoing arguments of sibling calls. We do, however, want
3489 to record the alignment of the stack slot. */
3490 /* ALIGN may well be better aligned than TYPE, e.g. due to
3491 PARM_BOUNDARY. Assume the caller isn't lying. */
3492 set_mem_align (target, align);
3494 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3497 else if (partial > 0)
3499 /* Scalar partly in registers. */
3501 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3504 /* # words of start of argument
3505 that we must make space for but need not store. */
3506 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3507 int args_offset = INTVAL (args_so_far);
3510 /* Push padding now if padding above and stack grows down,
3511 or if padding below and stack grows up.
3512 But if space already allocated, this has already been done. */
3513 if (extra && args_addr == 0
3514 && where_pad != none && where_pad != stack_direction)
3515 anti_adjust_stack (GEN_INT (extra));
3517 /* If we make space by pushing it, we might as well push
3518 the real data. Otherwise, we can leave OFFSET nonzero
3519 and leave the space uninitialized. */
3523 /* Now NOT_STACK gets the number of words that we don't need to
3524 allocate on the stack. */
3525 not_stack = partial - offset;
3527 /* If the partial register-part of the arg counts in its stack size,
3528 skip the part of stack space corresponding to the registers.
3529 Otherwise, start copying to the beginning of the stack space,
3530 by setting SKIP to 0. */
3531 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3533 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3534 x = validize_mem (force_const_mem (mode, x));
3536 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3537 SUBREGs of such registers are not allowed. */
3538 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3539 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3540 x = copy_to_reg (x);
3542 /* Loop over all the words allocated on the stack for this arg. */
3543 /* We can do it by words, because any scalar bigger than a word
3544 has a size a multiple of a word. */
3545 #ifndef PUSH_ARGS_REVERSED
3546 for (i = not_stack; i < size; i++)
3548 for (i = size - 1; i >= not_stack; i--)
3550 if (i >= not_stack + offset)
3551 emit_push_insn (operand_subword_force (x, i, mode),
3552 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3554 GEN_INT (args_offset + ((i - not_stack + skip)
3556 reg_parm_stack_space, alignment_pad);
3563 /* Push padding now if padding above and stack grows down,
3564 or if padding below and stack grows up.
3565 But if space already allocated, this has already been done. */
3566 if (extra && args_addr == 0
3567 && where_pad != none && where_pad != stack_direction)
3568 anti_adjust_stack (GEN_INT (extra));
3570 #ifdef PUSH_ROUNDING
3571 if (args_addr == 0 && PUSH_ARGS)
3572 emit_single_push_insn (mode, x, type);
3576 if (GET_CODE (args_so_far) == CONST_INT)
3578 = memory_address (mode,
3579 plus_constant (args_addr,
3580 INTVAL (args_so_far)));
3582 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3584 dest = gen_rtx_MEM (mode, addr);
3586 /* We do *not* set_mem_attributes here, because incoming arguments
3587 may overlap with sibling call outgoing arguments and we cannot
3588 allow reordering of reads from function arguments with stores
3589 to outgoing arguments of sibling calls. We do, however, want
3590 to record the alignment of the stack slot. */
3591 /* ALIGN may well be better aligned than TYPE, e.g. due to
3592 PARM_BOUNDARY. Assume the caller isn't lying. */
3593 set_mem_align (dest, align);
3595 emit_move_insn (dest, x);
3599 /* If part should go in registers, copy that part
3600 into the appropriate registers. Do this now, at the end,
3601 since mem-to-mem copies above may do function calls. */
3602 if (partial > 0 && reg != 0)
3604 /* Handle calls that pass values in multiple non-contiguous locations.
3605 The Irix 6 ABI has examples of this. */
3606 if (GET_CODE (reg) == PARALLEL)
3607 emit_group_load (reg, x, type, -1);
3609 move_block_to_reg (REGNO (reg), x, partial, mode);
3612 if (extra && args_addr == 0 && where_pad == stack_direction)
3613 anti_adjust_stack (GEN_INT (extra));
3615 if (alignment_pad && args_addr == 0)
3616 anti_adjust_stack (alignment_pad);
3619 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3623 get_subtarget (rtx x)
3627 /* Only registers can be subtargets. */
3629 /* Don't use hard regs to avoid extending their life. */
3630 || REGNO (x) < FIRST_PSEUDO_REGISTER
3634 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3635 FIELD is a bitfield. Returns true if the optimization was successful,
3636 and there's nothing else to do. */
3639 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3640 unsigned HOST_WIDE_INT bitpos,
3641 enum machine_mode mode1, rtx str_rtx,
3644 enum machine_mode str_mode = GET_MODE (str_rtx);
3645 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3650 if (mode1 != VOIDmode
3651 || bitsize >= BITS_PER_WORD
3652 || str_bitsize > BITS_PER_WORD
3653 || TREE_SIDE_EFFECTS (to)
3654 || TREE_THIS_VOLATILE (to))
3658 if (!BINARY_CLASS_P (src)
3659 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3662 op0 = TREE_OPERAND (src, 0);
3663 op1 = TREE_OPERAND (src, 1);
3666 if (!operand_equal_p (to, op0, 0))
3669 if (MEM_P (str_rtx))
3671 unsigned HOST_WIDE_INT offset1;
3673 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3674 str_mode = word_mode;
3675 str_mode = get_best_mode (bitsize, bitpos,
3676 MEM_ALIGN (str_rtx), str_mode, 0);
3677 if (str_mode == VOIDmode)
3679 str_bitsize = GET_MODE_BITSIZE (str_mode);
3682 bitpos %= str_bitsize;
3683 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3684 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3686 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3689 /* If the bit field covers the whole REG/MEM, store_field
3690 will likely generate better code. */
3691 if (bitsize >= str_bitsize)
3694 /* We can't handle fields split across multiple entities. */
3695 if (bitpos + bitsize > str_bitsize)
3698 if (BYTES_BIG_ENDIAN)
3699 bitpos = str_bitsize - bitpos - bitsize;
3701 switch (TREE_CODE (src))
3705 /* For now, just optimize the case of the topmost bitfield
3706 where we don't need to do any masking and also
3707 1 bit bitfields where xor can be used.
3708 We might win by one instruction for the other bitfields
3709 too if insv/extv instructions aren't used, so that
3710 can be added later. */
3711 if (bitpos + bitsize != str_bitsize
3712 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3715 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3716 value = convert_modes (str_mode,
3717 TYPE_MODE (TREE_TYPE (op1)), value,
3718 TYPE_UNSIGNED (TREE_TYPE (op1)));
3720 /* We may be accessing data outside the field, which means
3721 we can alias adjacent data. */
3722 if (MEM_P (str_rtx))
3724 str_rtx = shallow_copy_rtx (str_rtx);
3725 set_mem_alias_set (str_rtx, 0);
3726 set_mem_expr (str_rtx, 0);
3729 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3730 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3732 value = expand_and (str_mode, value, const1_rtx, NULL);
3735 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3736 build_int_cst (NULL_TREE, bitpos),
3738 result = expand_binop (str_mode, binop, str_rtx,
3739 value, str_rtx, 1, OPTAB_WIDEN);
3740 if (result != str_rtx)
3741 emit_move_insn (str_rtx, result);
3752 /* Expand an assignment that stores the value of FROM into TO. */
3755 expand_assignment (tree to, tree from)
3760 /* Don't crash if the lhs of the assignment was erroneous. */
3762 if (TREE_CODE (to) == ERROR_MARK)
3764 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3768 /* Assignment of a structure component needs special treatment
3769 if the structure component's rtx is not simply a MEM.
3770 Assignment of an array element at a constant index, and assignment of
3771 an array element in an unaligned packed structure field, has the same
3773 if (handled_component_p (to)
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3776 enum machine_mode mode1;
3777 HOST_WIDE_INT bitsize, bitpos;
3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3786 &unsignedp, &volatilep);
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3791 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3795 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3797 gcc_assert (MEM_P (to_rtx));
3799 #ifdef POINTERS_EXTEND_UNSIGNED
3800 if (GET_MODE (offset_rtx) != Pmode)
3801 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3803 if (GET_MODE (offset_rtx) != ptr_mode)
3804 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3807 /* A constant address in TO_RTX can have VOIDmode, we must not try
3808 to call force_reg for that case. Avoid that case. */
3810 && GET_MODE (to_rtx) == BLKmode
3811 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3813 && (bitpos % bitsize) == 0
3814 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3815 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3817 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3821 to_rtx = offset_address (to_rtx, offset_rtx,
3822 highest_pow2_factor_for_target (to,
3826 /* Handle expand_expr of a complex value returning a CONCAT. */
3827 if (GET_CODE (to_rtx) == CONCAT)
3829 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3831 gcc_assert (bitpos == 0);
3832 result = store_expr (from, to_rtx, false);
3836 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3837 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3844 /* If the field is at offset zero, we could have been given the
3845 DECL_RTX of the parent struct. Don't munge it. */
3846 to_rtx = shallow_copy_rtx (to_rtx);
3848 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3850 /* Deal with volatile and readonly fields. The former is only
3851 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3853 MEM_VOLATILE_P (to_rtx) = 1;
3854 if (component_uses_parent_alias_set (to))
3855 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3858 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3862 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3863 TREE_TYPE (tem), get_alias_set (to));
3867 preserve_temp_slots (result);
3873 /* If the rhs is a function call and its value is not an aggregate,
3874 call the function before we start to compute the lhs.
3875 This is needed for correct code for cases such as
3876 val = setjmp (buf) on machines where reference to val
3877 requires loading up part of an address in a separate insn.
3879 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3880 since it might be a promoted variable where the zero- or sign- extension
3881 needs to be done. Handling this in the normal way is safe because no
3882 computation is done before the call. */
3883 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3884 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3885 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3886 && REG_P (DECL_RTL (to))))
3891 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3893 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3895 /* Handle calls that return values in multiple non-contiguous locations.
3896 The Irix 6 ABI has examples of this. */
3897 if (GET_CODE (to_rtx) == PARALLEL)
3898 emit_group_load (to_rtx, value, TREE_TYPE (from),
3899 int_size_in_bytes (TREE_TYPE (from)));
3900 else if (GET_MODE (to_rtx) == BLKmode)
3901 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3904 if (POINTER_TYPE_P (TREE_TYPE (to)))
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3906 emit_move_insn (to_rtx, value);
3908 preserve_temp_slots (to_rtx);
3914 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3915 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3918 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3920 /* Don't move directly into a return register. */
3921 if (TREE_CODE (to) == RESULT_DECL
3922 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3927 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3929 if (GET_CODE (to_rtx) == PARALLEL)
3930 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3931 int_size_in_bytes (TREE_TYPE (from)));
3933 emit_move_insn (to_rtx, temp);
3935 preserve_temp_slots (to_rtx);
3941 /* In case we are returning the contents of an object which overlaps
3942 the place the value is being stored, use a safe function when copying
3943 a value through a pointer into a structure value return block. */
3944 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3945 && current_function_returns_struct
3946 && !current_function_returns_pcc_struct)
3951 size = expr_size (from);
3952 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3954 emit_library_call (memmove_libfunc, LCT_NORMAL,
3955 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3956 XEXP (from_rtx, 0), Pmode,
3957 convert_to_mode (TYPE_MODE (sizetype),
3958 size, TYPE_UNSIGNED (sizetype)),
3959 TYPE_MODE (sizetype));
3961 preserve_temp_slots (to_rtx);
3967 /* Compute FROM and store the value in the rtx we got. */
3970 result = store_expr (from, to_rtx, 0);
3971 preserve_temp_slots (result);
3977 /* Generate code for computing expression EXP,
3978 and storing the value into TARGET.
3980 If the mode is BLKmode then we may return TARGET itself.
3981 It turns out that in BLKmode it doesn't cause a problem.
3982 because C has no operators that could combine two different
3983 assignments into the same BLKmode object with different values
3984 with no sequence point. Will other languages need this to
3987 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3988 stack, and block moves may need to be treated specially. */
3991 store_expr (tree exp, rtx target, int call_param_p)
3994 rtx alt_rtl = NULL_RTX;
3995 int dont_return_target = 0;
3997 if (VOID_TYPE_P (TREE_TYPE (exp)))
3999 /* C++ can generate ?: expressions with a throw expression in one
4000 branch and an rvalue in the other. Here, we resolve attempts to
4001 store the throw expression's nonexistent result. */
4002 gcc_assert (!call_param_p);
4003 expand_expr (exp, const0_rtx, VOIDmode, 0);
4006 if (TREE_CODE (exp) == COMPOUND_EXPR)
4008 /* Perform first part of compound expression, then assign from second
4010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4011 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4012 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4014 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4016 /* For conditional expression, get safe form of the target. Then
4017 test the condition, doing the appropriate assignment on either
4018 side. This avoids the creation of unnecessary temporaries.
4019 For non-BLKmode, it is more efficient not to do this. */
4021 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4023 do_pending_stack_adjust ();
4025 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4026 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4027 emit_jump_insn (gen_jump (lab2));
4030 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4036 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4037 /* If this is a scalar in a register that is stored in a wider mode
4038 than the declared mode, compute the result into its declared mode
4039 and then convert to the wider mode. Our value is the computed
4042 rtx inner_target = 0;
4044 /* We can do the conversion inside EXP, which will often result
4045 in some optimizations. Do the conversion in two steps: first
4046 change the signedness, if needed, then the extend. But don't
4047 do this if the type of EXP is a subtype of something else
4048 since then the conversion might involve more than just
4049 converting modes. */
4050 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4051 && TREE_TYPE (TREE_TYPE (exp)) == 0
4052 && (!lang_hooks.reduce_bit_field_operations
4053 || (GET_MODE_PRECISION (GET_MODE (target))
4054 == TYPE_PRECISION (TREE_TYPE (exp)))))
4056 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4057 != SUBREG_PROMOTED_UNSIGNED_P (target))
4059 (lang_hooks.types.signed_or_unsigned_type
4060 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4062 exp = convert (lang_hooks.types.type_for_mode
4063 (GET_MODE (SUBREG_REG (target)),
4064 SUBREG_PROMOTED_UNSIGNED_P (target)),
4067 inner_target = SUBREG_REG (target);
4070 temp = expand_expr (exp, inner_target, VOIDmode,
4071 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4073 /* If TEMP is a VOIDmode constant, use convert_modes to make
4074 sure that we properly convert it. */
4075 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4077 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4078 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4079 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4080 GET_MODE (target), temp,
4081 SUBREG_PROMOTED_UNSIGNED_P (target));
4084 convert_move (SUBREG_REG (target), temp,
4085 SUBREG_PROMOTED_UNSIGNED_P (target));
4091 temp = expand_expr_real (exp, target, GET_MODE (target),
4093 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4095 /* Return TARGET if it's a specified hardware register.
4096 If TARGET is a volatile mem ref, either return TARGET
4097 or return a reg copied *from* TARGET; ANSI requires this.
4099 Otherwise, if TEMP is not TARGET, return TEMP
4100 if it is constant (for efficiency),
4101 or if we really want the correct value. */
4102 if (!(target && REG_P (target)
4103 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4104 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4105 && ! rtx_equal_p (temp, target)
4106 && CONSTANT_P (temp))
4107 dont_return_target = 1;
4110 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4111 the same as that of TARGET, adjust the constant. This is needed, for
4112 example, in case it is a CONST_DOUBLE and we want only a word-sized
4114 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4115 && TREE_CODE (exp) != ERROR_MARK
4116 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4117 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4118 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4120 /* If value was not generated in the target, store it there.
4121 Convert the value to TARGET's type first if necessary and emit the
4122 pending incrementations that have been queued when expanding EXP.
4123 Note that we cannot emit the whole queue blindly because this will
4124 effectively disable the POST_INC optimization later.
4126 If TEMP and TARGET compare equal according to rtx_equal_p, but
4127 one or both of them are volatile memory refs, we have to distinguish
4129 - expand_expr has used TARGET. In this case, we must not generate
4130 another copy. This can be detected by TARGET being equal according
4132 - expand_expr has not used TARGET - that means that the source just
4133 happens to have the same RTX form. Since temp will have been created
4134 by expand_expr, it will compare unequal according to == .
4135 We must generate a copy in this case, to reach the correct number
4136 of volatile memory references. */
4138 if ((! rtx_equal_p (temp, target)
4139 || (temp != target && (side_effects_p (temp)
4140 || side_effects_p (target))))
4141 && TREE_CODE (exp) != ERROR_MARK
4142 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4143 but TARGET is not valid memory reference, TEMP will differ
4144 from TARGET although it is really the same location. */
4145 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4146 /* If there's nothing to copy, don't bother. Don't call expr_size
4147 unless necessary, because some front-ends (C++) expr_size-hook
4148 aborts on objects that are not supposed to be bit-copied or
4150 && expr_size (exp) != const0_rtx)
4152 if (GET_MODE (temp) != GET_MODE (target)
4153 && GET_MODE (temp) != VOIDmode)
4155 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4156 if (dont_return_target)
4158 /* In this case, we will return TEMP,
4159 so make sure it has the proper mode.
4160 But don't forget to store the value into TARGET. */
4161 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4162 emit_move_insn (target, temp);
4165 convert_move (target, temp, unsignedp);
4168 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4170 /* Handle copying a string constant into an array. The string
4171 constant may be shorter than the array. So copy just the string's
4172 actual length, and clear the rest. First get the size of the data
4173 type of the string, which is actually the size of the target. */
4174 rtx size = expr_size (exp);
4176 if (GET_CODE (size) == CONST_INT
4177 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4178 emit_block_move (target, temp, size,
4180 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4183 /* Compute the size of the data to copy from the string. */
4185 = size_binop (MIN_EXPR,
4186 make_tree (sizetype, size),
4187 size_int (TREE_STRING_LENGTH (exp)));
4189 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4191 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4194 /* Copy that much. */
4195 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4196 TYPE_UNSIGNED (sizetype));
4197 emit_block_move (target, temp, copy_size_rtx,
4199 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4201 /* Figure out how much is left in TARGET that we have to clear.
4202 Do all calculations in ptr_mode. */
4203 if (GET_CODE (copy_size_rtx) == CONST_INT)
4205 size = plus_constant (size, -INTVAL (copy_size_rtx));
4206 target = adjust_address (target, BLKmode,
4207 INTVAL (copy_size_rtx));
4211 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4212 copy_size_rtx, NULL_RTX, 0,
4215 #ifdef POINTERS_EXTEND_UNSIGNED
4216 if (GET_MODE (copy_size_rtx) != Pmode)
4217 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4218 TYPE_UNSIGNED (sizetype));
4221 target = offset_address (target, copy_size_rtx,
4222 highest_pow2_factor (copy_size));
4223 label = gen_label_rtx ();
4224 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4225 GET_MODE (size), 0, label);
4228 if (size != const0_rtx)
4229 clear_storage (target, size);
4235 /* Handle calls that return values in multiple non-contiguous locations.
4236 The Irix 6 ABI has examples of this. */
4237 else if (GET_CODE (target) == PARALLEL)
4238 emit_group_load (target, temp, TREE_TYPE (exp),
4239 int_size_in_bytes (TREE_TYPE (exp)));
4240 else if (GET_MODE (temp) == BLKmode)
4241 emit_block_move (target, temp, expr_size (exp),
4243 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4246 temp = force_operand (temp, target);
4248 emit_move_insn (target, temp);
4255 /* Examine CTOR to discover:
4256 * how many scalar fields are set to nonzero values,
4257 and place it in *P_NZ_ELTS;
4258 * how many scalar fields are set to non-constant values,
4259 and place it in *P_NC_ELTS; and
4260 * how many scalar fields in total are in CTOR,
4261 and place it in *P_ELT_COUNT. */
4264 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4265 HOST_WIDE_INT *p_nc_elts,
4266 HOST_WIDE_INT *p_elt_count)
4268 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4275 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4277 tree value = TREE_VALUE (list);
4278 tree purpose = TREE_PURPOSE (list);
4282 if (TREE_CODE (purpose) == RANGE_EXPR)
4284 tree lo_index = TREE_OPERAND (purpose, 0);
4285 tree hi_index = TREE_OPERAND (purpose, 1);
4287 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4288 mult = (tree_low_cst (hi_index, 1)
4289 - tree_low_cst (lo_index, 1) + 1);
4292 switch (TREE_CODE (value))
4296 HOST_WIDE_INT nz = 0, nc = 0, count = 0;
4297 categorize_ctor_elements_1 (value, &nz, &nc, &count);
4298 nz_elts += mult * nz;
4299 nc_elts += mult * nc;
4300 elt_count += mult * count;
4306 if (!initializer_zerop (value))
4312 nz_elts += mult * TREE_STRING_LENGTH (value);
4313 elt_count += mult * TREE_STRING_LENGTH (value);
4317 if (!initializer_zerop (TREE_REALPART (value)))
4319 if (!initializer_zerop (TREE_IMAGPART (value)))
4327 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4329 if (!initializer_zerop (TREE_VALUE (v)))
4339 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4345 *p_nz_elts += nz_elts;
4346 *p_nc_elts += nc_elts;
4347 *p_elt_count += elt_count;
4351 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4352 HOST_WIDE_INT *p_nc_elts,
4353 HOST_WIDE_INT *p_elt_count)
4358 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count);
4361 /* Count the number of scalars in TYPE. Return -1 on overflow or
4365 count_type_elements (tree type)
4367 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4368 switch (TREE_CODE (type))
4372 tree telts = array_type_nelts (type);
4373 if (telts && host_integerp (telts, 1))
4375 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4376 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4379 else if (max / n > m)
4387 HOST_WIDE_INT n = 0, t;
4390 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4391 if (TREE_CODE (f) == FIELD_DECL)
4393 t = count_type_elements (TREE_TYPE (f));
4403 case QUAL_UNION_TYPE:
4405 /* Ho hum. How in the world do we guess here? Clearly it isn't
4406 right to count the fields. Guess based on the number of words. */
4407 HOST_WIDE_INT n = int_size_in_bytes (type);
4410 return n / UNITS_PER_WORD;
4417 return TYPE_VECTOR_SUBPARTS (type);
4426 case REFERENCE_TYPE:
4439 /* Return 1 if EXP contains mostly (3/4) zeros. */
4442 mostly_zeros_p (tree exp)
4444 if (TREE_CODE (exp) == CONSTRUCTOR)
4447 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4449 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count);
4450 elts = count_type_elements (TREE_TYPE (exp));
4452 return nz_elts < elts / 4;
4455 return initializer_zerop (exp);
4458 /* Helper function for store_constructor.
4459 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4460 TYPE is the type of the CONSTRUCTOR, not the element type.
4461 CLEARED is as for store_constructor.
4462 ALIAS_SET is the alias set to use for any stores.
4464 This provides a recursive shortcut back to store_constructor when it isn't
4465 necessary to go through store_field. This is so that we can pass through
4466 the cleared field to let store_constructor know that we may not have to
4467 clear a substructure if the outer structure has already been cleared. */
4470 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4471 HOST_WIDE_INT bitpos, enum machine_mode mode,
4472 tree exp, tree type, int cleared, int alias_set)
4474 if (TREE_CODE (exp) == CONSTRUCTOR
4475 /* We can only call store_constructor recursively if the size and
4476 bit position are on a byte boundary. */
4477 && bitpos % BITS_PER_UNIT == 0
4478 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4479 /* If we have a nonzero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos == 0 || MEM_P (target)))
4486 = adjust_address (target,
4487 GET_MODE (target) == BLKmode
4489 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4490 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4493 /* Update the alias set, if required. */
4494 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4495 && MEM_ALIAS_SET (target) != 0)
4497 target = copy_rtx (target);
4498 set_mem_alias_set (target, alias_set);
4501 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4504 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4507 /* Store the value of constructor EXP into the rtx TARGET.
4508 TARGET is either a REG or a MEM; we know it cannot conflict, since
4509 safe_from_p has been called.
4510 CLEARED is true if TARGET is known to have been zero'd.
4511 SIZE is the number of bytes of TARGET we are allowed to modify: this
4512 may not be the same as the size of EXP if we are assigning to a field
4513 which has been packed to exclude padding bits. */
4516 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4518 tree type = TREE_TYPE (exp);
4519 #ifdef WORD_REGISTER_OPERATIONS
4520 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4523 switch (TREE_CODE (type))
4527 case QUAL_UNION_TYPE:
4531 /* If size is zero or the target is already cleared, do nothing. */
4532 if (size == 0 || cleared)
4534 /* We either clear the aggregate or indicate the value is dead. */
4535 else if ((TREE_CODE (type) == UNION_TYPE
4536 || TREE_CODE (type) == QUAL_UNION_TYPE)
4537 && ! CONSTRUCTOR_ELTS (exp))
4538 /* If the constructor is empty, clear the union. */
4540 clear_storage (target, expr_size (exp));
4544 /* If we are building a static constructor into a register,
4545 set the initial value as zero so we can fold the value into
4546 a constant. But if more than one register is involved,
4547 this probably loses. */
4548 else if (REG_P (target) && TREE_STATIC (exp)
4549 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4551 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4555 /* If the constructor has fewer fields than the structure or
4556 if we are initializing the structure to mostly zeros, clear
4557 the whole structure first. Don't do this if TARGET is a
4558 register whose mode size isn't equal to SIZE since
4559 clear_storage can't handle this case. */
4561 && ((list_length (CONSTRUCTOR_ELTS (exp))
4562 != fields_length (type))
4563 || mostly_zeros_p (exp))
4565 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4568 clear_storage (target, GEN_INT (size));
4573 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4575 /* Store each element of the constructor into the
4576 corresponding field of TARGET. */
4578 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4580 tree field = TREE_PURPOSE (elt);
4581 tree value = TREE_VALUE (elt);
4582 enum machine_mode mode;
4583 HOST_WIDE_INT bitsize;
4584 HOST_WIDE_INT bitpos = 0;
4586 rtx to_rtx = target;
4588 /* Just ignore missing fields. We cleared the whole
4589 structure, above, if any fields are missing. */
4593 if (cleared && initializer_zerop (value))
4596 if (host_integerp (DECL_SIZE (field), 1))
4597 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4601 mode = DECL_MODE (field);
4602 if (DECL_BIT_FIELD (field))
4605 offset = DECL_FIELD_OFFSET (field);
4606 if (host_integerp (offset, 0)
4607 && host_integerp (bit_position (field), 0))
4609 bitpos = int_bit_position (field);
4613 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4620 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4621 make_tree (TREE_TYPE (exp),
4624 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4625 gcc_assert (MEM_P (to_rtx));
4627 #ifdef POINTERS_EXTEND_UNSIGNED
4628 if (GET_MODE (offset_rtx) != Pmode)
4629 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4631 if (GET_MODE (offset_rtx) != ptr_mode)
4632 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4635 to_rtx = offset_address (to_rtx, offset_rtx,
4636 highest_pow2_factor (offset));
4639 #ifdef WORD_REGISTER_OPERATIONS
4640 /* If this initializes a field that is smaller than a
4641 word, at the start of a word, try to widen it to a full
4642 word. This special case allows us to output C++ member
4643 function initializations in a form that the optimizers
4646 && bitsize < BITS_PER_WORD
4647 && bitpos % BITS_PER_WORD == 0
4648 && GET_MODE_CLASS (mode) == MODE_INT
4649 && TREE_CODE (value) == INTEGER_CST
4651 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4653 tree type = TREE_TYPE (value);
4655 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4657 type = lang_hooks.types.type_for_size
4658 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4659 value = convert (type, value);
4662 if (BYTES_BIG_ENDIAN)
4664 = fold (build2 (LSHIFT_EXPR, type, value,
4665 build_int_cst (NULL_TREE,
4666 BITS_PER_WORD - bitsize)));
4667 bitsize = BITS_PER_WORD;
4672 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4673 && DECL_NONADDRESSABLE_P (field))
4675 to_rtx = copy_rtx (to_rtx);
4676 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4679 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4680 value, type, cleared,
4681 get_alias_set (TREE_TYPE (field)));
4691 tree elttype = TREE_TYPE (type);
4693 HOST_WIDE_INT minelt = 0;
4694 HOST_WIDE_INT maxelt = 0;
4696 domain = TYPE_DOMAIN (type);
4697 const_bounds_p = (TYPE_MIN_VALUE (domain)
4698 && TYPE_MAX_VALUE (domain)
4699 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4700 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4702 /* If we have constant bounds for the range of the type, get them. */
4705 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4706 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4709 /* If the constructor has fewer elements than the array, clear
4710 the whole array first. Similarly if this is static
4711 constructor of a non-BLKmode object. */
4714 else if (REG_P (target) && TREE_STATIC (exp))
4718 HOST_WIDE_INT count = 0, zero_count = 0;
4719 need_to_clear = ! const_bounds_p;
4721 /* This loop is a more accurate version of the loop in
4722 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4723 is also needed to check for missing elements. */
4724 for (elt = CONSTRUCTOR_ELTS (exp);
4725 elt != NULL_TREE && ! need_to_clear;
4726 elt = TREE_CHAIN (elt))
4728 tree index = TREE_PURPOSE (elt);
4729 HOST_WIDE_INT this_node_count;
4731 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4733 tree lo_index = TREE_OPERAND (index, 0);
4734 tree hi_index = TREE_OPERAND (index, 1);
4736 if (! host_integerp (lo_index, 1)
4737 || ! host_integerp (hi_index, 1))
4743 this_node_count = (tree_low_cst (hi_index, 1)
4744 - tree_low_cst (lo_index, 1) + 1);
4747 this_node_count = 1;
4749 count += this_node_count;
4750 if (mostly_zeros_p (TREE_VALUE (elt)))
4751 zero_count += this_node_count;
4754 /* Clear the entire array first if there are any missing
4755 elements, or if the incidence of zero elements is >=
4758 && (count < maxelt - minelt + 1
4759 || 4 * zero_count >= 3 * count))
4763 if (need_to_clear && size > 0)
4766 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4768 clear_storage (target, GEN_INT (size));
4772 if (!cleared && REG_P (target))
4773 /* Inform later passes that the old value is dead. */
4774 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4776 /* Store each element of the constructor into the
4777 corresponding element of TARGET, determined by counting the
4779 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4781 elt = TREE_CHAIN (elt), i++)
4783 enum machine_mode mode;
4784 HOST_WIDE_INT bitsize;
4785 HOST_WIDE_INT bitpos;
4787 tree value = TREE_VALUE (elt);
4788 tree index = TREE_PURPOSE (elt);
4789 rtx xtarget = target;
4791 if (cleared && initializer_zerop (value))
4794 unsignedp = TYPE_UNSIGNED (elttype);
4795 mode = TYPE_MODE (elttype);
4796 if (mode == BLKmode)
4797 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4798 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4801 bitsize = GET_MODE_BITSIZE (mode);
4803 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4805 tree lo_index = TREE_OPERAND (index, 0);
4806 tree hi_index = TREE_OPERAND (index, 1);
4807 rtx index_r, pos_rtx;
4808 HOST_WIDE_INT lo, hi, count;
4811 /* If the range is constant and "small", unroll the loop. */
4813 && host_integerp (lo_index, 0)
4814 && host_integerp (hi_index, 0)
4815 && (lo = tree_low_cst (lo_index, 0),
4816 hi = tree_low_cst (hi_index, 0),
4817 count = hi - lo + 1,
4820 || (host_integerp (TYPE_SIZE (elttype), 1)
4821 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4824 lo -= minelt; hi -= minelt;
4825 for (; lo <= hi; lo++)
4827 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4830 && !MEM_KEEP_ALIAS_SET_P (target)
4831 && TREE_CODE (type) == ARRAY_TYPE
4832 && TYPE_NONALIASED_COMPONENT (type))
4834 target = copy_rtx (target);
4835 MEM_KEEP_ALIAS_SET_P (target) = 1;
4838 store_constructor_field
4839 (target, bitsize, bitpos, mode, value, type, cleared,
4840 get_alias_set (elttype));
4845 rtx loop_start = gen_label_rtx ();
4846 rtx loop_end = gen_label_rtx ();
4849 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4850 unsignedp = TYPE_UNSIGNED (domain);
4852 index = build_decl (VAR_DECL, NULL_TREE, domain);
4855 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4857 SET_DECL_RTL (index, index_r);
4858 store_expr (lo_index, index_r, 0);
4860 /* Build the head of the loop. */
4861 do_pending_stack_adjust ();
4862 emit_label (loop_start);
4864 /* Assign value to element index. */
4866 = convert (ssizetype,
4867 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4868 index, TYPE_MIN_VALUE (domain))));
4869 position = size_binop (MULT_EXPR, position,
4871 TYPE_SIZE_UNIT (elttype)));
4873 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4874 xtarget = offset_address (target, pos_rtx,
4875 highest_pow2_factor (position));
4876 xtarget = adjust_address (xtarget, mode, 0);
4877 if (TREE_CODE (value) == CONSTRUCTOR)
4878 store_constructor (value, xtarget, cleared,
4879 bitsize / BITS_PER_UNIT);
4881 store_expr (value, xtarget, 0);
4883 /* Generate a conditional jump to exit the loop. */
4884 exit_cond = build2 (LT_EXPR, integer_type_node,
4886 jumpif (exit_cond, loop_end);
4888 /* Update the loop counter, and jump to the head of
4890 expand_assignment (index,
4891 build2 (PLUS_EXPR, TREE_TYPE (index),
4892 index, integer_one_node));
4894 emit_jump (loop_start);
4896 /* Build the end of the loop. */
4897 emit_label (loop_end);
4900 else if ((index != 0 && ! host_integerp (index, 0))
4901 || ! host_integerp (TYPE_SIZE (elttype), 1))
4906 index = ssize_int (1);
4909 index = fold_convert (ssizetype,
4910 fold (build2 (MINUS_EXPR,
4913 TYPE_MIN_VALUE (domain))));
4915 position = size_binop (MULT_EXPR, index,
4917 TYPE_SIZE_UNIT (elttype)));
4918 xtarget = offset_address (target,
4919 expand_expr (position, 0, VOIDmode, 0),
4920 highest_pow2_factor (position));
4921 xtarget = adjust_address (xtarget, mode, 0);
4922 store_expr (value, xtarget, 0);
4927 bitpos = ((tree_low_cst (index, 0) - minelt)
4928 * tree_low_cst (TYPE_SIZE (elttype), 1));
4930 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4932 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4933 && TREE_CODE (type) == ARRAY_TYPE
4934 && TYPE_NONALIASED_COMPONENT (type))
4936 target = copy_rtx (target);
4937 MEM_KEEP_ALIAS_SET_P (target) = 1;
4939 store_constructor_field (target, bitsize, bitpos, mode, value,
4940 type, cleared, get_alias_set (elttype));
4952 tree elttype = TREE_TYPE (type);
4953 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4954 enum machine_mode eltmode = TYPE_MODE (elttype);
4955 HOST_WIDE_INT bitsize;
4956 HOST_WIDE_INT bitpos;
4960 gcc_assert (eltmode != BLKmode);
4962 n_elts = TYPE_VECTOR_SUBPARTS (type);
4963 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4965 enum machine_mode mode = GET_MODE (target);
4967 icode = (int) vec_init_optab->handlers[mode].insn_code;
4968 if (icode != CODE_FOR_nothing)
4972 vector = alloca (n_elts);
4973 for (i = 0; i < n_elts; i++)
4974 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4978 /* If the constructor has fewer elements than the vector,
4979 clear the whole array first. Similarly if this is static
4980 constructor of a non-BLKmode object. */
4983 else if (REG_P (target) && TREE_STATIC (exp))
4987 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4989 for (elt = CONSTRUCTOR_ELTS (exp);
4991 elt = TREE_CHAIN (elt))
4993 int n_elts_here = tree_low_cst
4994 (int_const_binop (TRUNC_DIV_EXPR,
4995 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4996 TYPE_SIZE (elttype), 0), 1);
4998 count += n_elts_here;
4999 if (mostly_zeros_p (TREE_VALUE (elt)))
5000 zero_count += n_elts_here;
5003 /* Clear the entire vector first if there are any missing elements,
5004 or if the incidence of zero elements is >= 75%. */
5005 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5008 if (need_to_clear && size > 0 && !vector)
5011 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5013 clear_storage (target, GEN_INT (size));
5017 if (!cleared && REG_P (target))
5018 /* Inform later passes that the old value is dead. */
5019 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5021 /* Store each element of the constructor into the corresponding
5022 element of TARGET, determined by counting the elements. */
5023 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5025 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5027 tree value = TREE_VALUE (elt);
5028 tree index = TREE_PURPOSE (elt);
5029 HOST_WIDE_INT eltpos;
5031 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5032 if (cleared && initializer_zerop (value))
5036 eltpos = tree_low_cst (index, 1);
5042 /* Vector CONSTRUCTORs should only be built from smaller
5043 vectors in the case of BLKmode vectors. */
5044 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5045 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5049 enum machine_mode value_mode =
5050 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5051 ? TYPE_MODE (TREE_TYPE (value))
5053 bitpos = eltpos * elt_size;
5054 store_constructor_field (target, bitsize, bitpos,
5055 value_mode, value, type,
5056 cleared, get_alias_set (elttype));
5061 emit_insn (GEN_FCN (icode)
5063 gen_rtx_PARALLEL (GET_MODE (target),
5064 gen_rtvec_v (n_elts, vector))));
5073 /* Store the value of EXP (an expression tree)
5074 into a subfield of TARGET which has mode MODE and occupies
5075 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5076 If MODE is VOIDmode, it means that we are storing into a bit-field.
5078 Always return const0_rtx unless we have something particular to
5081 TYPE is the type of the underlying object,
5083 ALIAS_SET is the alias set for the destination. This value will
5084 (in general) be different from that for TARGET, since TARGET is a
5085 reference to the containing structure. */
5088 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5089 enum machine_mode mode, tree exp, tree type, int alias_set)
5091 HOST_WIDE_INT width_mask = 0;
5093 if (TREE_CODE (exp) == ERROR_MARK)
5096 /* If we have nothing to store, do nothing unless the expression has
5099 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5100 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5101 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5103 /* If we are storing into an unaligned field of an aligned union that is
5104 in a register, we may have the mode of TARGET being an integer mode but
5105 MODE == BLKmode. In that case, get an aligned object whose size and
5106 alignment are the same as TARGET and store TARGET into it (we can avoid
5107 the store if the field being stored is the entire width of TARGET). Then
5108 call ourselves recursively to store the field into a BLKmode version of
5109 that object. Finally, load from the object into TARGET. This is not
5110 very efficient in general, but should only be slightly more expensive
5111 than the otherwise-required unaligned accesses. Perhaps this can be
5112 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5113 twice, once with emit_move_insn and once via store_field. */
5116 && (REG_P (target) || GET_CODE (target) == SUBREG))
5118 rtx object = assign_temp (type, 0, 1, 1);
5119 rtx blk_object = adjust_address (object, BLKmode, 0);
5121 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5122 emit_move_insn (object, target);
5124 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5126 emit_move_insn (target, object);
5128 /* We want to return the BLKmode version of the data. */
5132 if (GET_CODE (target) == CONCAT)
5134 /* We're storing into a struct containing a single __complex. */
5136 gcc_assert (!bitpos);
5137 return store_expr (exp, target, 0);
5140 /* If the structure is in a register or if the component
5141 is a bit field, we cannot use addressing to access it.
5142 Use bit-field techniques or SUBREG to store in it. */
5144 if (mode == VOIDmode
5145 || (mode != BLKmode && ! direct_store[(int) mode]
5146 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5147 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5149 || GET_CODE (target) == SUBREG
5150 /* If the field isn't aligned enough to store as an ordinary memref,
5151 store it as a bit field. */
5153 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5154 || bitpos % GET_MODE_ALIGNMENT (mode))
5155 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5156 || (bitpos % BITS_PER_UNIT != 0)))
5157 /* If the RHS and field are a constant size and the size of the
5158 RHS isn't the same size as the bitfield, we must use bitfield
5161 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5162 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5164 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5166 /* If BITSIZE is narrower than the size of the type of EXP
5167 we will be narrowing TEMP. Normally, what's wanted are the
5168 low-order bits. However, if EXP's type is a record and this is
5169 big-endian machine, we want the upper BITSIZE bits. */
5170 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5171 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5172 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5173 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5174 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5178 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5180 if (mode != VOIDmode && mode != BLKmode
5181 && mode != TYPE_MODE (TREE_TYPE (exp)))
5182 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5184 /* If the modes of TARGET and TEMP are both BLKmode, both
5185 must be in memory and BITPOS must be aligned on a byte
5186 boundary. If so, we simply do a block copy. */
5187 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5189 gcc_assert (MEM_P (target) && MEM_P (temp)
5190 && !(bitpos % BITS_PER_UNIT));
5192 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5193 emit_block_move (target, temp,
5194 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5201 /* Store the value in the bitfield. */
5202 store_bit_field (target, bitsize, bitpos, mode, temp);
5208 /* Now build a reference to just the desired component. */
5209 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5211 if (to_rtx == target)
5212 to_rtx = copy_rtx (to_rtx);
5214 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5215 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5216 set_mem_alias_set (to_rtx, alias_set);
5218 return store_expr (exp, to_rtx, 0);
5222 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5223 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5224 codes and find the ultimate containing object, which we return.
5226 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5227 bit position, and *PUNSIGNEDP to the signedness of the field.
5228 If the position of the field is variable, we store a tree
5229 giving the variable offset (in units) in *POFFSET.
5230 This offset is in addition to the bit position.
5231 If the position is not variable, we store 0 in *POFFSET.
5233 If any of the extraction expressions is volatile,
5234 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5236 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5237 is a mode that can be used to access the field. In that case, *PBITSIZE
5240 If the field describes a variable-sized object, *PMODE is set to
5241 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5242 this case, but the address of the object can be found. */
5245 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5246 HOST_WIDE_INT *pbitpos, tree *poffset,
5247 enum machine_mode *pmode, int *punsignedp,
5251 enum machine_mode mode = VOIDmode;
5252 tree offset = size_zero_node;
5253 tree bit_offset = bitsize_zero_node;
5256 /* First get the mode, signedness, and size. We do this from just the
5257 outermost expression. */
5258 if (TREE_CODE (exp) == COMPONENT_REF)
5260 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5261 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5262 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5264 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5266 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5268 size_tree = TREE_OPERAND (exp, 1);
5269 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5273 mode = TYPE_MODE (TREE_TYPE (exp));
5274 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5276 if (mode == BLKmode)
5277 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5279 *pbitsize = GET_MODE_BITSIZE (mode);
5284 if (! host_integerp (size_tree, 1))
5285 mode = BLKmode, *pbitsize = -1;
5287 *pbitsize = tree_low_cst (size_tree, 1);
5290 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5291 and find the ultimate containing object. */
5294 switch (TREE_CODE (exp))
5297 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5298 TREE_OPERAND (exp, 2));
5303 tree field = TREE_OPERAND (exp, 1);
5304 tree this_offset = component_ref_field_offset (exp);
5306 /* If this field hasn't been filled in yet, don't go past it.
5307 This should only happen when folding expressions made during
5308 type construction. */
5309 if (this_offset == 0)
5312 offset = size_binop (PLUS_EXPR, offset, this_offset);
5313 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5314 DECL_FIELD_BIT_OFFSET (field));
5316 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5321 case ARRAY_RANGE_REF:
5323 tree index = TREE_OPERAND (exp, 1);
5324 tree low_bound = array_ref_low_bound (exp);
5325 tree unit_size = array_ref_element_size (exp);
5327 /* We assume all arrays have sizes that are a multiple of a byte.
5328 First subtract the lower bound, if any, in the type of the
5329 index, then convert to sizetype and multiply by the size of
5330 the array element. */
5331 if (! integer_zerop (low_bound))
5332 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5335 offset = size_binop (PLUS_EXPR, offset,
5336 size_binop (MULT_EXPR,
5337 convert (sizetype, index),
5346 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5347 bitsize_int (*pbitsize));
5350 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5351 conversions that don't change the mode, and all view conversions
5352 except those that need to "step up" the alignment. */
5354 case VIEW_CONVERT_EXPR:
5355 if ((TYPE_ALIGN (TREE_TYPE (exp))
5356 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5358 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5359 < BIGGEST_ALIGNMENT)
5360 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5361 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5369 /* If any reference in the chain is volatile, the effect is volatile. */
5370 if (TREE_THIS_VOLATILE (exp))
5373 exp = TREE_OPERAND (exp, 0);
5377 /* If OFFSET is constant, see if we can return the whole thing as a
5378 constant bit position. Otherwise, split it up. */
5379 if (host_integerp (offset, 0)
5380 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5382 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5383 && host_integerp (tem, 0))
5384 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5386 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5392 /* Return a tree of sizetype representing the size, in bytes, of the element
5393 of EXP, an ARRAY_REF. */
5396 array_ref_element_size (tree exp)
5398 tree aligned_size = TREE_OPERAND (exp, 3);
5399 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5401 /* If a size was specified in the ARRAY_REF, it's the size measured
5402 in alignment units of the element type. So multiply by that value. */
5405 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5406 sizetype from another type of the same width and signedness. */
5407 if (TREE_TYPE (aligned_size) != sizetype)
5408 aligned_size = fold_convert (sizetype, aligned_size);
5409 return size_binop (MULT_EXPR, aligned_size,
5410 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5413 /* Otherwise, take the size from that of the element type. Substitute
5414 any PLACEHOLDER_EXPR that we have. */
5416 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5419 /* Return a tree representing the lower bound of the array mentioned in
5420 EXP, an ARRAY_REF. */
5423 array_ref_low_bound (tree exp)
5425 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5427 /* If a lower bound is specified in EXP, use it. */
5428 if (TREE_OPERAND (exp, 2))
5429 return TREE_OPERAND (exp, 2);
5431 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5432 substituting for a PLACEHOLDER_EXPR as needed. */
5433 if (domain_type && TYPE_MIN_VALUE (domain_type))
5434 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5436 /* Otherwise, return a zero of the appropriate type. */
5437 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5440 /* Return a tree representing the upper bound of the array mentioned in
5441 EXP, an ARRAY_REF. */
5444 array_ref_up_bound (tree exp)
5446 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5448 /* If there is a domain type and it has an upper bound, use it, substituting
5449 for a PLACEHOLDER_EXPR as needed. */
5450 if (domain_type && TYPE_MAX_VALUE (domain_type))
5451 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5453 /* Otherwise fail. */
5457 /* Return a tree representing the offset, in bytes, of the field referenced
5458 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5461 component_ref_field_offset (tree exp)
5463 tree aligned_offset = TREE_OPERAND (exp, 2);
5464 tree field = TREE_OPERAND (exp, 1);
5466 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5467 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5471 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5472 sizetype from another type of the same width and signedness. */
5473 if (TREE_TYPE (aligned_offset) != sizetype)
5474 aligned_offset = fold_convert (sizetype, aligned_offset);
5475 return size_binop (MULT_EXPR, aligned_offset,
5476 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5479 /* Otherwise, take the offset from that of the field. Substitute
5480 any PLACEHOLDER_EXPR that we have. */
5482 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5485 /* Return 1 if T is an expression that get_inner_reference handles. */
5488 handled_component_p (tree t)
5490 switch (TREE_CODE (t))
5495 case ARRAY_RANGE_REF:
5496 case VIEW_CONVERT_EXPR:
5506 /* Given an rtx VALUE that may contain additions and multiplications, return
5507 an equivalent value that just refers to a register, memory, or constant.
5508 This is done by generating instructions to perform the arithmetic and
5509 returning a pseudo-register containing the value.
5511 The returned value may be a REG, SUBREG, MEM or constant. */
5514 force_operand (rtx value, rtx target)
5517 /* Use subtarget as the target for operand 0 of a binary operation. */
5518 rtx subtarget = get_subtarget (target);
5519 enum rtx_code code = GET_CODE (value);
5521 /* Check for subreg applied to an expression produced by loop optimizer. */
5523 && !REG_P (SUBREG_REG (value))
5524 && !MEM_P (SUBREG_REG (value)))
5526 value = simplify_gen_subreg (GET_MODE (value),
5527 force_reg (GET_MODE (SUBREG_REG (value)),
5528 force_operand (SUBREG_REG (value),
5530 GET_MODE (SUBREG_REG (value)),
5531 SUBREG_BYTE (value));
5532 code = GET_CODE (value);
5535 /* Check for a PIC address load. */
5536 if ((code == PLUS || code == MINUS)
5537 && XEXP (value, 0) == pic_offset_table_rtx
5538 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5539 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5540 || GET_CODE (XEXP (value, 1)) == CONST))
5543 subtarget = gen_reg_rtx (GET_MODE (value));
5544 emit_move_insn (subtarget, value);
5548 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5551 target = gen_reg_rtx (GET_MODE (value));
5552 convert_move (target, force_operand (XEXP (value, 0), NULL),
5553 code == ZERO_EXTEND);
5557 if (ARITHMETIC_P (value))
5559 op2 = XEXP (value, 1);
5560 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5562 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5565 op2 = negate_rtx (GET_MODE (value), op2);
5568 /* Check for an addition with OP2 a constant integer and our first
5569 operand a PLUS of a virtual register and something else. In that
5570 case, we want to emit the sum of the virtual register and the
5571 constant first and then add the other value. This allows virtual
5572 register instantiation to simply modify the constant rather than
5573 creating another one around this addition. */
5574 if (code == PLUS && GET_CODE (op2) == CONST_INT
5575 && GET_CODE (XEXP (value, 0)) == PLUS
5576 && REG_P (XEXP (XEXP (value, 0), 0))
5577 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5578 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5580 rtx temp = expand_simple_binop (GET_MODE (value), code,
5581 XEXP (XEXP (value, 0), 0), op2,
5582 subtarget, 0, OPTAB_LIB_WIDEN);
5583 return expand_simple_binop (GET_MODE (value), code, temp,
5584 force_operand (XEXP (XEXP (value,
5586 target, 0, OPTAB_LIB_WIDEN);
5589 op1 = force_operand (XEXP (value, 0), subtarget);
5590 op2 = force_operand (op2, NULL_RTX);
5594 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5596 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5597 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5598 target, 1, OPTAB_LIB_WIDEN);
5600 return expand_divmod (0,
5601 FLOAT_MODE_P (GET_MODE (value))
5602 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5603 GET_MODE (value), op1, op2, target, 0);
5606 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5610 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5614 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5618 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5619 target, 0, OPTAB_LIB_WIDEN);
5622 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5623 target, 1, OPTAB_LIB_WIDEN);
5626 if (UNARY_P (value))
5628 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5629 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5632 #ifdef INSN_SCHEDULING
5633 /* On machines that have insn scheduling, we want all memory reference to be
5634 explicit, so we need to deal with such paradoxical SUBREGs. */
5635 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5636 && (GET_MODE_SIZE (GET_MODE (value))
5637 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5639 = simplify_gen_subreg (GET_MODE (value),
5640 force_reg (GET_MODE (SUBREG_REG (value)),
5641 force_operand (SUBREG_REG (value),
5643 GET_MODE (SUBREG_REG (value)),
5644 SUBREG_BYTE (value));
5650 /* Subroutine of expand_expr: return nonzero iff there is no way that
5651 EXP can reference X, which is being modified. TOP_P is nonzero if this
5652 call is going to be used to determine whether we need a temporary
5653 for EXP, as opposed to a recursive call to this function.
5655 It is always safe for this routine to return zero since it merely
5656 searches for optimization opportunities. */
5659 safe_from_p (rtx x, tree exp, int top_p)
5665 /* If EXP has varying size, we MUST use a target since we currently
5666 have no way of allocating temporaries of variable size
5667 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5668 So we assume here that something at a higher level has prevented a
5669 clash. This is somewhat bogus, but the best we can do. Only
5670 do this when X is BLKmode and when we are at the top level. */
5671 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5672 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5673 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5674 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5675 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5677 && GET_MODE (x) == BLKmode)
5678 /* If X is in the outgoing argument area, it is always safe. */
5680 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5681 || (GET_CODE (XEXP (x, 0)) == PLUS
5682 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5685 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5686 find the underlying pseudo. */
5687 if (GET_CODE (x) == SUBREG)
5690 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5694 /* Now look at our tree code and possibly recurse. */
5695 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5697 case tcc_declaration:
5698 exp_rtl = DECL_RTL_IF_SET (exp);
5704 case tcc_exceptional:
5705 if (TREE_CODE (exp) == TREE_LIST)
5709 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5711 exp = TREE_CHAIN (exp);
5714 if (TREE_CODE (exp) != TREE_LIST)
5715 return safe_from_p (x, exp, 0);
5718 else if (TREE_CODE (exp) == ERROR_MARK)
5719 return 1; /* An already-visited SAVE_EXPR? */
5724 /* The only case we look at here is the DECL_INITIAL inside a
5726 return (TREE_CODE (exp) != DECL_EXPR
5727 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5728 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5729 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5732 case tcc_comparison:
5733 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5738 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5740 case tcc_expression:
5742 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5743 the expression. If it is set, we conflict iff we are that rtx or
5744 both are in memory. Otherwise, we check all operands of the
5745 expression recursively. */
5747 switch (TREE_CODE (exp))
5750 /* If the operand is static or we are static, we can't conflict.
5751 Likewise if we don't conflict with the operand at all. */
5752 if (staticp (TREE_OPERAND (exp, 0))
5753 || TREE_STATIC (exp)
5754 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5757 /* Otherwise, the only way this can conflict is if we are taking
5758 the address of a DECL a that address if part of X, which is
5760 exp = TREE_OPERAND (exp, 0);
5763 if (!DECL_RTL_SET_P (exp)
5764 || !MEM_P (DECL_RTL (exp)))
5767 exp_rtl = XEXP (DECL_RTL (exp), 0);
5771 case MISALIGNED_INDIRECT_REF:
5772 case ALIGN_INDIRECT_REF:
5775 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5776 get_alias_set (exp)))
5781 /* Assume that the call will clobber all hard registers and
5783 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5788 case WITH_CLEANUP_EXPR:
5789 case CLEANUP_POINT_EXPR:
5790 /* Lowered by gimplify.c. */
5794 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5800 /* If we have an rtx, we do not need to scan our operands. */
5804 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5805 for (i = 0; i < nops; i++)
5806 if (TREE_OPERAND (exp, i) != 0
5807 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5810 /* If this is a language-specific tree code, it may require
5811 special handling. */
5812 if ((unsigned int) TREE_CODE (exp)
5813 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5814 && !lang_hooks.safe_from_p (x, exp))
5819 /* Should never get a type here. */
5823 /* If we have an rtl, find any enclosed object. Then see if we conflict
5827 if (GET_CODE (exp_rtl) == SUBREG)
5829 exp_rtl = SUBREG_REG (exp_rtl);
5831 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5835 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5836 are memory and they conflict. */
5837 return ! (rtx_equal_p (x, exp_rtl)
5838 || (MEM_P (x) && MEM_P (exp_rtl)
5839 && true_dependence (exp_rtl, VOIDmode, x,
5840 rtx_addr_varies_p)));
5843 /* If we reach here, it is safe. */
5848 /* Return the highest power of two that EXP is known to be a multiple of.
5849 This is used in updating alignment of MEMs in array references. */
5851 static unsigned HOST_WIDE_INT
5852 highest_pow2_factor (tree exp)
5854 unsigned HOST_WIDE_INT c0, c1;
5856 switch (TREE_CODE (exp))
5859 /* We can find the lowest bit that's a one. If the low
5860 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5861 We need to handle this case since we can find it in a COND_EXPR,
5862 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5863 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5865 if (TREE_CONSTANT_OVERFLOW (exp))
5866 return BIGGEST_ALIGNMENT;
5869 /* Note: tree_low_cst is intentionally not used here,
5870 we don't care about the upper bits. */
5871 c0 = TREE_INT_CST_LOW (exp);
5873 return c0 ? c0 : BIGGEST_ALIGNMENT;
5877 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5878 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5879 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5880 return MIN (c0, c1);
5883 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5884 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5887 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5889 if (integer_pow2p (TREE_OPERAND (exp, 1))
5890 && host_integerp (TREE_OPERAND (exp, 1), 1))
5892 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5893 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5894 return MAX (1, c0 / c1);
5898 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5900 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5903 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5906 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5907 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5908 return MIN (c0, c1);
5917 /* Similar, except that the alignment requirements of TARGET are
5918 taken into account. Assume it is at least as aligned as its
5919 type, unless it is a COMPONENT_REF in which case the layout of
5920 the structure gives the alignment. */
5922 static unsigned HOST_WIDE_INT
5923 highest_pow2_factor_for_target (tree target, tree exp)
5925 unsigned HOST_WIDE_INT target_align, factor;
5927 factor = highest_pow2_factor (exp);
5928 if (TREE_CODE (target) == COMPONENT_REF)
5929 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5931 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5932 return MAX (factor, target_align);
5935 /* Expands variable VAR. */
5938 expand_var (tree var)
5940 if (DECL_EXTERNAL (var))
5943 if (TREE_STATIC (var))
5944 /* If this is an inlined copy of a static local variable,
5945 look up the original decl. */
5946 var = DECL_ORIGIN (var);
5948 if (TREE_STATIC (var)
5949 ? !TREE_ASM_WRITTEN (var)
5950 : !DECL_RTL_SET_P (var))
5952 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5953 /* Should be ignored. */;
5954 else if (lang_hooks.expand_decl (var))
5956 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5958 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5959 rest_of_decl_compilation (var, 0, 0);
5961 /* No expansion needed. */
5962 gcc_assert (TREE_CODE (var) == TYPE_DECL
5963 || TREE_CODE (var) == CONST_DECL
5964 || TREE_CODE (var) == FUNCTION_DECL
5965 || TREE_CODE (var) == LABEL_DECL);
5969 /* Subroutine of expand_expr. Expand the two operands of a binary
5970 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5971 The value may be stored in TARGET if TARGET is nonzero. The
5972 MODIFIER argument is as documented by expand_expr. */
5975 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5976 enum expand_modifier modifier)
5978 if (! safe_from_p (target, exp1, 1))
5980 if (operand_equal_p (exp0, exp1, 0))
5982 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5983 *op1 = copy_rtx (*op0);
5987 /* If we need to preserve evaluation order, copy exp0 into its own
5988 temporary variable so that it can't be clobbered by exp1. */
5989 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5990 exp0 = save_expr (exp0);
5991 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5992 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5997 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5998 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6001 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6002 enum expand_modifier modifier)
6004 rtx result, subtarget;
6006 HOST_WIDE_INT bitsize, bitpos;
6007 int volatilep, unsignedp;
6008 enum machine_mode mode1;
6010 /* If we are taking the address of a constant and are at the top level,
6011 we have to use output_constant_def since we can't call force_const_mem
6013 /* ??? This should be considered a front-end bug. We should not be
6014 generating ADDR_EXPR of something that isn't an LVALUE. The only
6015 exception here is STRING_CST. */
6016 if (TREE_CODE (exp) == CONSTRUCTOR
6017 || CONSTANT_CLASS_P (exp))
6018 return XEXP (output_constant_def (exp, 0), 0);
6020 /* Everything must be something allowed by is_gimple_addressable. */
6021 switch (TREE_CODE (exp))
6024 /* This case will happen via recursion for &a->b. */
6025 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6028 /* Recurse and make the output_constant_def clause above handle this. */
6029 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6033 /* The real part of the complex number is always first, therefore
6034 the address is the same as the address of the parent object. */
6037 inner = TREE_OPERAND (exp, 0);
6041 /* The imaginary part of the complex number is always second.
6042 The expression is therefore always offset by the size of the
6045 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6046 inner = TREE_OPERAND (exp, 0);
6050 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6051 expand_expr, as that can have various side effects; LABEL_DECLs for
6052 example, may not have their DECL_RTL set yet. Assume language
6053 specific tree nodes can be expanded in some interesting way. */
6055 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6057 result = expand_expr (exp, target, tmode,
6058 modifier == EXPAND_INITIALIZER
6059 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6061 /* If the DECL isn't in memory, then the DECL wasn't properly
6062 marked TREE_ADDRESSABLE, which will be either a front-end
6063 or a tree optimizer bug. */
6064 gcc_assert (GET_CODE (result) == MEM);
6065 result = XEXP (result, 0);
6067 /* ??? Is this needed anymore? */
6068 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6070 assemble_external (exp);
6071 TREE_USED (exp) = 1;
6074 if (modifier != EXPAND_INITIALIZER
6075 && modifier != EXPAND_CONST_ADDRESS)
6076 result = force_operand (result, target);
6080 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6081 &mode1, &unsignedp, &volatilep);
6085 /* We must have made progress. */
6086 gcc_assert (inner != exp);
6088 subtarget = offset || bitpos ? NULL_RTX : target;
6089 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6095 if (modifier != EXPAND_NORMAL)
6096 result = force_operand (result, NULL);
6097 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6099 result = convert_memory_address (tmode, result);
6100 tmp = convert_memory_address (tmode, tmp);
6102 if (modifier == EXPAND_SUM)
6103 result = gen_rtx_PLUS (tmode, result, tmp);
6106 subtarget = bitpos ? NULL_RTX : target;
6107 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6108 1, OPTAB_LIB_WIDEN);
6114 /* Someone beforehand should have rejected taking the address
6115 of such an object. */
6116 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6118 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6119 if (modifier < EXPAND_SUM)
6120 result = force_operand (result, target);
6126 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6127 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6130 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6131 enum expand_modifier modifier)
6133 enum machine_mode rmode;
6136 /* Target mode of VOIDmode says "whatever's natural". */
6137 if (tmode == VOIDmode)
6138 tmode = TYPE_MODE (TREE_TYPE (exp));
6140 /* We can get called with some Weird Things if the user does silliness
6141 like "(short) &a". In that case, convert_memory_address won't do
6142 the right thing, so ignore the given target mode. */
6143 if (tmode != Pmode && tmode != ptr_mode)
6146 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6149 /* Despite expand_expr claims concerning ignoring TMODE when not
6150 strictly convenient, stuff breaks if we don't honor it. Note
6151 that combined with the above, we only do this for pointer modes. */
6152 rmode = GET_MODE (result);
6153 if (rmode == VOIDmode)
6156 result = convert_memory_address (tmode, result);
6162 /* expand_expr: generate code for computing expression EXP.
6163 An rtx for the computed value is returned. The value is never null.
6164 In the case of a void EXP, const0_rtx is returned.
6166 The value may be stored in TARGET if TARGET is nonzero.
6167 TARGET is just a suggestion; callers must assume that
6168 the rtx returned may not be the same as TARGET.
6170 If TARGET is CONST0_RTX, it means that the value will be ignored.
6172 If TMODE is not VOIDmode, it suggests generating the
6173 result in mode TMODE. But this is done only when convenient.
6174 Otherwise, TMODE is ignored and the value generated in its natural mode.
6175 TMODE is just a suggestion; callers must assume that
6176 the rtx returned may not have mode TMODE.
6178 Note that TARGET may have neither TMODE nor MODE. In that case, it
6179 probably will not be used.
6181 If MODIFIER is EXPAND_SUM then when EXP is an addition
6182 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6183 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6184 products as above, or REG or MEM, or constant.
6185 Ordinarily in such cases we would output mul or add instructions
6186 and then return a pseudo reg containing the sum.
6188 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6189 it also marks a label as absolutely required (it can't be dead).
6190 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6191 This is used for outputting expressions used in initializers.
6193 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6194 with a constant address even if that address is not normally legitimate.
6195 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6197 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6198 a call parameter. Such targets require special care as we haven't yet
6199 marked TARGET so that it's safe from being trashed by libcalls. We
6200 don't want to use TARGET for anything but the final result;
6201 Intermediate values must go elsewhere. Additionally, calls to
6202 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6204 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6205 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6206 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6207 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6210 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6211 enum expand_modifier, rtx *);
6214 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6215 enum expand_modifier modifier, rtx *alt_rtl)
6218 rtx ret, last = NULL;
6220 /* Handle ERROR_MARK before anybody tries to access its type. */
6221 if (TREE_CODE (exp) == ERROR_MARK
6222 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6224 ret = CONST0_RTX (tmode);
6225 return ret ? ret : const0_rtx;
6228 if (flag_non_call_exceptions)
6230 rn = lookup_stmt_eh_region (exp);
6231 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6233 last = get_last_insn ();
6236 /* If this is an expression of some kind and it has an associated line
6237 number, then emit the line number before expanding the expression.
6239 We need to save and restore the file and line information so that
6240 errors discovered during expansion are emitted with the right
6241 information. It would be better of the diagnostic routines
6242 used the file/line information embedded in the tree nodes rather
6244 if (cfun && EXPR_HAS_LOCATION (exp))
6246 location_t saved_location = input_location;
6247 input_location = EXPR_LOCATION (exp);
6248 emit_line_note (input_location);
6250 /* Record where the insns produced belong. */
6251 record_block_change (TREE_BLOCK (exp));
6253 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6255 input_location = saved_location;
6259 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6262 /* If using non-call exceptions, mark all insns that may trap.
6263 expand_call() will mark CALL_INSNs before we get to this code,
6264 but it doesn't handle libcalls, and these may trap. */
6268 for (insn = next_real_insn (last); insn;
6269 insn = next_real_insn (insn))
6271 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6272 /* If we want exceptions for non-call insns, any
6273 may_trap_p instruction may throw. */
6274 && GET_CODE (PATTERN (insn)) != CLOBBER
6275 && GET_CODE (PATTERN (insn)) != USE
6276 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6278 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6288 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6289 enum expand_modifier modifier, rtx *alt_rtl)
6292 tree type = TREE_TYPE (exp);
6294 enum machine_mode mode;
6295 enum tree_code code = TREE_CODE (exp);
6297 rtx subtarget, original_target;
6300 bool reduce_bit_field = false;
6301 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6302 ? reduce_to_bit_field_precision ((expr), \
6307 mode = TYPE_MODE (type);
6308 unsignedp = TYPE_UNSIGNED (type);
6309 if (lang_hooks.reduce_bit_field_operations
6310 && TREE_CODE (type) == INTEGER_TYPE
6311 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6313 /* An operation in what may be a bit-field type needs the
6314 result to be reduced to the precision of the bit-field type,
6315 which is narrower than that of the type's mode. */
6316 reduce_bit_field = true;
6317 if (modifier == EXPAND_STACK_PARM)
6321 /* Use subtarget as the target for operand 0 of a binary operation. */
6322 subtarget = get_subtarget (target);
6323 original_target = target;
6324 ignore = (target == const0_rtx
6325 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6326 || code == CONVERT_EXPR || code == COND_EXPR
6327 || code == VIEW_CONVERT_EXPR)
6328 && TREE_CODE (type) == VOID_TYPE));
6330 /* If we are going to ignore this result, we need only do something
6331 if there is a side-effect somewhere in the expression. If there
6332 is, short-circuit the most common cases here. Note that we must
6333 not call expand_expr with anything but const0_rtx in case this
6334 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6338 if (! TREE_SIDE_EFFECTS (exp))
6341 /* Ensure we reference a volatile object even if value is ignored, but
6342 don't do this if all we are doing is taking its address. */
6343 if (TREE_THIS_VOLATILE (exp)
6344 && TREE_CODE (exp) != FUNCTION_DECL
6345 && mode != VOIDmode && mode != BLKmode
6346 && modifier != EXPAND_CONST_ADDRESS)
6348 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6350 temp = copy_to_reg (temp);
6354 if (TREE_CODE_CLASS (code) == tcc_unary
6355 || code == COMPONENT_REF || code == INDIRECT_REF)
6356 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6359 else if (TREE_CODE_CLASS (code) == tcc_binary
6360 || TREE_CODE_CLASS (code) == tcc_comparison
6361 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6363 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6364 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6367 else if (code == BIT_FIELD_REF)
6369 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6370 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6371 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6378 /* If will do cse, generate all results into pseudo registers
6379 since 1) that allows cse to find more things
6380 and 2) otherwise cse could produce an insn the machine
6381 cannot support. An exception is a CONSTRUCTOR into a multi-word
6382 MEM: that's much more likely to be most efficient into the MEM.
6383 Another is a CALL_EXPR which must return in memory. */
6385 if (! cse_not_expected && mode != BLKmode && target
6386 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6387 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6388 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6395 tree function = decl_function_context (exp);
6397 temp = label_rtx (exp);
6398 temp = gen_rtx_LABEL_REF (Pmode, temp);
6400 if (function != current_function_decl
6402 LABEL_REF_NONLOCAL_P (temp) = 1;
6404 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6409 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6414 /* If a static var's type was incomplete when the decl was written,
6415 but the type is complete now, lay out the decl now. */
6416 if (DECL_SIZE (exp) == 0
6417 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6418 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6419 layout_decl (exp, 0);
6421 /* ... fall through ... */
6425 gcc_assert (DECL_RTL (exp));
6427 /* Ensure variable marked as used even if it doesn't go through
6428 a parser. If it hasn't be used yet, write out an external
6430 if (! TREE_USED (exp))
6432 assemble_external (exp);
6433 TREE_USED (exp) = 1;
6436 /* Show we haven't gotten RTL for this yet. */
6439 /* Variables inherited from containing functions should have
6440 been lowered by this point. */
6441 context = decl_function_context (exp);
6442 gcc_assert (!context
6443 || context == current_function_decl
6444 || TREE_STATIC (exp)
6445 /* ??? C++ creates functions that are not TREE_STATIC. */
6446 || TREE_CODE (exp) == FUNCTION_DECL);
6448 /* This is the case of an array whose size is to be determined
6449 from its initializer, while the initializer is still being parsed.
6452 if (MEM_P (DECL_RTL (exp))
6453 && REG_P (XEXP (DECL_RTL (exp), 0)))
6454 temp = validize_mem (DECL_RTL (exp));
6456 /* If DECL_RTL is memory, we are in the normal case and either
6457 the address is not valid or it is not a register and -fforce-addr
6458 is specified, get the address into a register. */
6460 else if (MEM_P (DECL_RTL (exp))
6461 && modifier != EXPAND_CONST_ADDRESS
6462 && modifier != EXPAND_SUM
6463 && modifier != EXPAND_INITIALIZER
6464 && (! memory_address_p (DECL_MODE (exp),
6465 XEXP (DECL_RTL (exp), 0))
6467 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6470 *alt_rtl = DECL_RTL (exp);
6471 temp = replace_equiv_address (DECL_RTL (exp),
6472 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6475 /* If we got something, return it. But first, set the alignment
6476 if the address is a register. */
6479 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6480 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6485 /* If the mode of DECL_RTL does not match that of the decl, it
6486 must be a promoted value. We return a SUBREG of the wanted mode,
6487 but mark it so that we know that it was already extended. */
6489 if (REG_P (DECL_RTL (exp))
6490 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6492 enum machine_mode pmode;
6494 /* Get the signedness used for this variable. Ensure we get the
6495 same mode we got when the variable was declared. */
6496 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6497 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6498 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6500 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6501 SUBREG_PROMOTED_VAR_P (temp) = 1;
6502 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6506 return DECL_RTL (exp);
6509 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6510 TREE_INT_CST_HIGH (exp), mode);
6512 /* ??? If overflow is set, fold will have done an incomplete job,
6513 which can result in (plus xx (const_int 0)), which can get
6514 simplified by validate_replace_rtx during virtual register
6515 instantiation, which can result in unrecognizable insns.
6516 Avoid this by forcing all overflows into registers. */
6517 if (TREE_CONSTANT_OVERFLOW (exp)
6518 && modifier != EXPAND_INITIALIZER)
6519 temp = force_reg (mode, temp);
6524 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6525 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6526 return const_vector_from_tree (exp);
6528 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6529 TREE_VECTOR_CST_ELTS (exp)),
6530 ignore ? const0_rtx : target, tmode, modifier);
6533 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6536 /* If optimized, generate immediate CONST_DOUBLE
6537 which will be turned into memory by reload if necessary.
6539 We used to force a register so that loop.c could see it. But
6540 this does not allow gen_* patterns to perform optimizations with
6541 the constants. It also produces two insns in cases like "x = 1.0;".
6542 On most machines, floating-point constants are not permitted in
6543 many insns, so we'd end up copying it to a register in any case.
6545 Now, we do the copying in expand_binop, if appropriate. */
6546 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6547 TYPE_MODE (TREE_TYPE (exp)));
6550 /* Handle evaluating a complex constant in a CONCAT target. */
6551 if (original_target && GET_CODE (original_target) == CONCAT)
6553 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6556 rtarg = XEXP (original_target, 0);
6557 itarg = XEXP (original_target, 1);
6559 /* Move the real and imaginary parts separately. */
6560 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6561 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6564 emit_move_insn (rtarg, op0);
6566 emit_move_insn (itarg, op1);
6568 return original_target;
6571 /* ... fall through ... */
6574 temp = output_constant_def (exp, 1);
6576 /* temp contains a constant address.
6577 On RISC machines where a constant address isn't valid,
6578 make some insns to get that address into a register. */
6579 if (modifier != EXPAND_CONST_ADDRESS
6580 && modifier != EXPAND_INITIALIZER
6581 && modifier != EXPAND_SUM
6582 && (! memory_address_p (mode, XEXP (temp, 0))
6583 || flag_force_addr))
6584 return replace_equiv_address (temp,
6585 copy_rtx (XEXP (temp, 0)));
6590 tree val = TREE_OPERAND (exp, 0);
6591 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6593 if (!SAVE_EXPR_RESOLVED_P (exp))
6595 /* We can indeed still hit this case, typically via builtin
6596 expanders calling save_expr immediately before expanding
6597 something. Assume this means that we only have to deal
6598 with non-BLKmode values. */
6599 gcc_assert (GET_MODE (ret) != BLKmode);
6601 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6602 DECL_ARTIFICIAL (val) = 1;
6603 DECL_IGNORED_P (val) = 1;
6604 TREE_OPERAND (exp, 0) = val;
6605 SAVE_EXPR_RESOLVED_P (exp) = 1;
6607 if (!CONSTANT_P (ret))
6608 ret = copy_to_reg (ret);
6609 SET_DECL_RTL (val, ret);
6616 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6617 expand_goto (TREE_OPERAND (exp, 0));
6619 expand_computed_goto (TREE_OPERAND (exp, 0));
6623 /* If we don't need the result, just ensure we evaluate any
6629 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6630 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6635 /* All elts simple constants => refer to a constant in memory. But
6636 if this is a non-BLKmode mode, let it store a field at a time
6637 since that should make a CONST_INT or CONST_DOUBLE when we
6638 fold. Likewise, if we have a target we can use, it is best to
6639 store directly into the target unless the type is large enough
6640 that memcpy will be used. If we are making an initializer and
6641 all operands are constant, put it in memory as well.
6643 FIXME: Avoid trying to fill vector constructors piece-meal.
6644 Output them with output_constant_def below unless we're sure
6645 they're zeros. This should go away when vector initializers
6646 are treated like VECTOR_CST instead of arrays.
6648 else if ((TREE_STATIC (exp)
6649 && ((mode == BLKmode
6650 && ! (target != 0 && safe_from_p (target, exp, 1)))
6651 || TREE_ADDRESSABLE (exp)
6652 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6653 && (! MOVE_BY_PIECES_P
6654 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6656 && ! mostly_zeros_p (exp))))
6657 || ((modifier == EXPAND_INITIALIZER
6658 || modifier == EXPAND_CONST_ADDRESS)
6659 && TREE_CONSTANT (exp)))
6661 rtx constructor = output_constant_def (exp, 1);
6663 if (modifier != EXPAND_CONST_ADDRESS
6664 && modifier != EXPAND_INITIALIZER
6665 && modifier != EXPAND_SUM)
6666 constructor = validize_mem (constructor);
6672 /* Handle calls that pass values in multiple non-contiguous
6673 locations. The Irix 6 ABI has examples of this. */
6674 if (target == 0 || ! safe_from_p (target, exp, 1)
6675 || GET_CODE (target) == PARALLEL
6676 || modifier == EXPAND_STACK_PARM)
6678 = assign_temp (build_qualified_type (type,
6680 | (TREE_READONLY (exp)
6681 * TYPE_QUAL_CONST))),
6682 0, TREE_ADDRESSABLE (exp), 1);
6684 store_constructor (exp, target, 0, int_expr_size (exp));
6688 case MISALIGNED_INDIRECT_REF:
6689 case ALIGN_INDIRECT_REF:
6692 tree exp1 = TREE_OPERAND (exp, 0);
6695 if (code == MISALIGNED_INDIRECT_REF
6696 && !targetm.vectorize.misaligned_mem_ok (mode))
6699 if (modifier != EXPAND_WRITE)
6703 t = fold_read_from_constant_string (exp);
6705 return expand_expr (t, target, tmode, modifier);
6708 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6709 op0 = memory_address (mode, op0);
6711 if (code == ALIGN_INDIRECT_REF)
6713 int align = TYPE_ALIGN_UNIT (type);
6714 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6715 op0 = memory_address (mode, op0);
6718 temp = gen_rtx_MEM (mode, op0);
6720 orig = REF_ORIGINAL (exp);
6723 set_mem_attributes (temp, orig, 0);
6731 tree array = TREE_OPERAND (exp, 0);
6732 tree index = TREE_OPERAND (exp, 1);
6734 /* Fold an expression like: "foo"[2].
6735 This is not done in fold so it won't happen inside &.
6736 Don't fold if this is for wide characters since it's too
6737 difficult to do correctly and this is a very rare case. */
6739 if (modifier != EXPAND_CONST_ADDRESS
6740 && modifier != EXPAND_INITIALIZER
6741 && modifier != EXPAND_MEMORY)
6743 tree t = fold_read_from_constant_string (exp);
6746 return expand_expr (t, target, tmode, modifier);
6749 /* If this is a constant index into a constant array,
6750 just get the value from the array. Handle both the cases when
6751 we have an explicit constructor and when our operand is a variable
6752 that was declared const. */
6754 if (modifier != EXPAND_CONST_ADDRESS
6755 && modifier != EXPAND_INITIALIZER
6756 && modifier != EXPAND_MEMORY
6757 && TREE_CODE (array) == CONSTRUCTOR
6758 && ! TREE_SIDE_EFFECTS (array)
6759 && TREE_CODE (index) == INTEGER_CST)
6763 for (elem = CONSTRUCTOR_ELTS (array);
6764 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6765 elem = TREE_CHAIN (elem))
6768 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6769 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6773 else if (optimize >= 1
6774 && modifier != EXPAND_CONST_ADDRESS
6775 && modifier != EXPAND_INITIALIZER
6776 && modifier != EXPAND_MEMORY
6777 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6778 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6779 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6780 && targetm.binds_local_p (array))
6782 if (TREE_CODE (index) == INTEGER_CST)
6784 tree init = DECL_INITIAL (array);
6786 if (TREE_CODE (init) == CONSTRUCTOR)
6790 for (elem = CONSTRUCTOR_ELTS (init);
6792 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6793 elem = TREE_CHAIN (elem))
6796 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6797 return expand_expr (fold (TREE_VALUE (elem)), target,
6800 else if (TREE_CODE (init) == STRING_CST
6801 && 0 > compare_tree_int (index,
6802 TREE_STRING_LENGTH (init)))
6804 tree type = TREE_TYPE (TREE_TYPE (init));
6805 enum machine_mode mode = TYPE_MODE (type);
6807 if (GET_MODE_CLASS (mode) == MODE_INT
6808 && GET_MODE_SIZE (mode) == 1)
6809 return gen_int_mode (TREE_STRING_POINTER (init)
6810 [TREE_INT_CST_LOW (index)], mode);
6815 goto normal_inner_ref;
6818 /* If the operand is a CONSTRUCTOR, we can just extract the
6819 appropriate field if it is present. */
6820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6824 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6825 elt = TREE_CHAIN (elt))
6826 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6827 /* We can normally use the value of the field in the
6828 CONSTRUCTOR. However, if this is a bitfield in
6829 an integral mode that we can fit in a HOST_WIDE_INT,
6830 we must mask only the number of bits in the bitfield,
6831 since this is done implicitly by the constructor. If
6832 the bitfield does not meet either of those conditions,
6833 we can't do this optimization. */
6834 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6835 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6837 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6838 <= HOST_BITS_PER_WIDE_INT))))
6840 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6841 && modifier == EXPAND_STACK_PARM)
6843 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6844 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6846 HOST_WIDE_INT bitsize
6847 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6848 enum machine_mode imode
6849 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6851 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6853 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6854 op0 = expand_and (imode, op0, op1, target);
6859 = build_int_cst (NULL_TREE,
6860 GET_MODE_BITSIZE (imode) - bitsize);
6862 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6864 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6872 goto normal_inner_ref;
6875 case ARRAY_RANGE_REF:
6878 enum machine_mode mode1;
6879 HOST_WIDE_INT bitsize, bitpos;
6882 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6883 &mode1, &unsignedp, &volatilep);
6886 /* If we got back the original object, something is wrong. Perhaps
6887 we are evaluating an expression too early. In any event, don't
6888 infinitely recurse. */
6889 gcc_assert (tem != exp);
6891 /* If TEM's type is a union of variable size, pass TARGET to the inner
6892 computation, since it will need a temporary and TARGET is known
6893 to have to do. This occurs in unchecked conversion in Ada. */
6897 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6898 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6900 && modifier != EXPAND_STACK_PARM
6901 ? target : NULL_RTX),
6903 (modifier == EXPAND_INITIALIZER
6904 || modifier == EXPAND_CONST_ADDRESS
6905 || modifier == EXPAND_STACK_PARM)
6906 ? modifier : EXPAND_NORMAL);
6908 /* If this is a constant, put it into a register if it is a
6909 legitimate constant and OFFSET is 0 and memory if it isn't. */
6910 if (CONSTANT_P (op0))
6912 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6913 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6915 op0 = force_reg (mode, op0);
6917 op0 = validize_mem (force_const_mem (mode, op0));
6920 /* Otherwise, if this object not in memory and we either have an
6921 offset or a BLKmode result, put it there. This case can't occur in
6922 C, but can in Ada if we have unchecked conversion of an expression
6923 from a scalar type to an array or record type or for an
6924 ARRAY_RANGE_REF whose type is BLKmode. */
6925 else if (!MEM_P (op0)
6927 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6929 tree nt = build_qualified_type (TREE_TYPE (tem),
6930 (TYPE_QUALS (TREE_TYPE (tem))
6931 | TYPE_QUAL_CONST));
6932 rtx memloc = assign_temp (nt, 1, 1, 1);
6934 emit_move_insn (memloc, op0);
6940 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6943 gcc_assert (MEM_P (op0));
6945 #ifdef POINTERS_EXTEND_UNSIGNED
6946 if (GET_MODE (offset_rtx) != Pmode)
6947 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6949 if (GET_MODE (offset_rtx) != ptr_mode)
6950 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6953 if (GET_MODE (op0) == BLKmode
6954 /* A constant address in OP0 can have VOIDmode, we must
6955 not try to call force_reg in that case. */
6956 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6958 && (bitpos % bitsize) == 0
6959 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6960 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6962 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6966 op0 = offset_address (op0, offset_rtx,
6967 highest_pow2_factor (offset));
6970 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6971 record its alignment as BIGGEST_ALIGNMENT. */
6972 if (MEM_P (op0) && bitpos == 0 && offset != 0
6973 && is_aligning_offset (offset, tem))
6974 set_mem_align (op0, BIGGEST_ALIGNMENT);
6976 /* Don't forget about volatility even if this is a bitfield. */
6977 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6979 if (op0 == orig_op0)
6980 op0 = copy_rtx (op0);
6982 MEM_VOLATILE_P (op0) = 1;
6985 /* The following code doesn't handle CONCAT.
6986 Assume only bitpos == 0 can be used for CONCAT, due to
6987 one element arrays having the same mode as its element. */
6988 if (GET_CODE (op0) == CONCAT)
6990 gcc_assert (bitpos == 0
6991 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6995 /* In cases where an aligned union has an unaligned object
6996 as a field, we might be extracting a BLKmode value from
6997 an integer-mode (e.g., SImode) object. Handle this case
6998 by doing the extract into an object as wide as the field
6999 (which we know to be the width of a basic mode), then
7000 storing into memory, and changing the mode to BLKmode. */
7001 if (mode1 == VOIDmode
7002 || REG_P (op0) || GET_CODE (op0) == SUBREG
7003 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7004 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7005 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7006 && modifier != EXPAND_CONST_ADDRESS
7007 && modifier != EXPAND_INITIALIZER)
7008 /* If the field isn't aligned enough to fetch as a memref,
7009 fetch it as a bit field. */
7010 || (mode1 != BLKmode
7011 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7012 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7014 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7015 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7016 && ((modifier == EXPAND_CONST_ADDRESS
7017 || modifier == EXPAND_INITIALIZER)
7019 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7020 || (bitpos % BITS_PER_UNIT != 0)))
7021 /* If the type and the field are a constant size and the
7022 size of the type isn't the same size as the bitfield,
7023 we must use bitfield operations. */
7025 && TYPE_SIZE (TREE_TYPE (exp))
7026 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7027 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7030 enum machine_mode ext_mode = mode;
7032 if (ext_mode == BLKmode
7033 && ! (target != 0 && MEM_P (op0)
7035 && bitpos % BITS_PER_UNIT == 0))
7036 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7038 if (ext_mode == BLKmode)
7041 target = assign_temp (type, 0, 1, 1);
7046 /* In this case, BITPOS must start at a byte boundary and
7047 TARGET, if specified, must be a MEM. */
7048 gcc_assert (MEM_P (op0)
7049 && (!target || MEM_P (target))
7050 && !(bitpos % BITS_PER_UNIT));
7052 emit_block_move (target,
7053 adjust_address (op0, VOIDmode,
7054 bitpos / BITS_PER_UNIT),
7055 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7057 (modifier == EXPAND_STACK_PARM
7058 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7063 op0 = validize_mem (op0);
7065 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7066 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7068 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7069 (modifier == EXPAND_STACK_PARM
7070 ? NULL_RTX : target),
7071 ext_mode, ext_mode);
7073 /* If the result is a record type and BITSIZE is narrower than
7074 the mode of OP0, an integral mode, and this is a big endian
7075 machine, we must put the field into the high-order bits. */
7076 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7077 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7078 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7079 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7080 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7084 /* If the result type is BLKmode, store the data into a temporary
7085 of the appropriate type, but with the mode corresponding to the
7086 mode for the data we have (op0's mode). It's tempting to make
7087 this a constant type, since we know it's only being stored once,
7088 but that can cause problems if we are taking the address of this
7089 COMPONENT_REF because the MEM of any reference via that address
7090 will have flags corresponding to the type, which will not
7091 necessarily be constant. */
7092 if (mode == BLKmode)
7095 = assign_stack_temp_for_type
7096 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7098 emit_move_insn (new, op0);
7099 op0 = copy_rtx (new);
7100 PUT_MODE (op0, BLKmode);
7101 set_mem_attributes (op0, exp, 1);
7107 /* If the result is BLKmode, use that to access the object
7109 if (mode == BLKmode)
7112 /* Get a reference to just this component. */
7113 if (modifier == EXPAND_CONST_ADDRESS
7114 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7115 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7117 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7119 if (op0 == orig_op0)
7120 op0 = copy_rtx (op0);
7122 set_mem_attributes (op0, exp, 0);
7123 if (REG_P (XEXP (op0, 0)))
7124 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7126 MEM_VOLATILE_P (op0) |= volatilep;
7127 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7128 || modifier == EXPAND_CONST_ADDRESS
7129 || modifier == EXPAND_INITIALIZER)
7131 else if (target == 0)
7132 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7134 convert_move (target, op0, unsignedp);
7139 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7142 /* Check for a built-in function. */
7143 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7144 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7146 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7148 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7149 == BUILT_IN_FRONTEND)
7150 return lang_hooks.expand_expr (exp, original_target,
7154 return expand_builtin (exp, target, subtarget, tmode, ignore);
7157 return expand_call (exp, target, ignore);
7159 case NON_LVALUE_EXPR:
7162 if (TREE_OPERAND (exp, 0) == error_mark_node)
7165 if (TREE_CODE (type) == UNION_TYPE)
7167 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7169 /* If both input and output are BLKmode, this conversion isn't doing
7170 anything except possibly changing memory attribute. */
7171 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7173 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7176 result = copy_rtx (result);
7177 set_mem_attributes (result, exp, 0);
7183 if (TYPE_MODE (type) != BLKmode)
7184 target = gen_reg_rtx (TYPE_MODE (type));
7186 target = assign_temp (type, 0, 1, 1);
7190 /* Store data into beginning of memory target. */
7191 store_expr (TREE_OPERAND (exp, 0),
7192 adjust_address (target, TYPE_MODE (valtype), 0),
7193 modifier == EXPAND_STACK_PARM);
7197 gcc_assert (REG_P (target));
7199 /* Store this field into a union of the proper type. */
7200 store_field (target,
7201 MIN ((int_size_in_bytes (TREE_TYPE
7202 (TREE_OPERAND (exp, 0)))
7204 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7205 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7209 /* Return the entire union. */
7213 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7218 /* If the signedness of the conversion differs and OP0 is
7219 a promoted SUBREG, clear that indication since we now
7220 have to do the proper extension. */
7221 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7222 && GET_CODE (op0) == SUBREG)
7223 SUBREG_PROMOTED_VAR_P (op0) = 0;
7225 return REDUCE_BIT_FIELD (op0);
7228 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7229 if (GET_MODE (op0) == mode)
7232 /* If OP0 is a constant, just convert it into the proper mode. */
7233 else if (CONSTANT_P (op0))
7235 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7236 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7238 if (modifier == EXPAND_INITIALIZER)
7239 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7240 subreg_lowpart_offset (mode,
7243 op0= convert_modes (mode, inner_mode, op0,
7244 TYPE_UNSIGNED (inner_type));
7247 else if (modifier == EXPAND_INITIALIZER)
7248 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7250 else if (target == 0)
7251 op0 = convert_to_mode (mode, op0,
7252 TYPE_UNSIGNED (TREE_TYPE
7253 (TREE_OPERAND (exp, 0))));
7256 convert_move (target, op0,
7257 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7261 return REDUCE_BIT_FIELD (op0);
7263 case VIEW_CONVERT_EXPR:
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7266 /* If the input and output modes are both the same, we are done.
7267 Otherwise, if neither mode is BLKmode and both are integral and within
7268 a word, we can use gen_lowpart. If neither is true, make sure the
7269 operand is in memory and convert the MEM to the new mode. */
7270 if (TYPE_MODE (type) == GET_MODE (op0))
7272 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7273 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7274 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7275 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7276 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7277 op0 = gen_lowpart (TYPE_MODE (type), op0);
7278 else if (!MEM_P (op0))
7280 /* If the operand is not a MEM, force it into memory. Since we
7281 are going to be be changing the mode of the MEM, don't call
7282 force_const_mem for constants because we don't allow pool
7283 constants to change mode. */
7284 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7286 gcc_assert (!TREE_ADDRESSABLE (exp));
7288 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7290 = assign_stack_temp_for_type
7291 (TYPE_MODE (inner_type),
7292 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7294 emit_move_insn (target, op0);
7298 /* At this point, OP0 is in the correct mode. If the output type is such
7299 that the operand is known to be aligned, indicate that it is.
7300 Otherwise, we need only be concerned about alignment for non-BLKmode
7304 op0 = copy_rtx (op0);
7306 if (TYPE_ALIGN_OK (type))
7307 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7308 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7309 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7311 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7312 HOST_WIDE_INT temp_size
7313 = MAX (int_size_in_bytes (inner_type),
7314 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7315 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7316 temp_size, 0, type);
7317 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7319 gcc_assert (!TREE_ADDRESSABLE (exp));
7321 if (GET_MODE (op0) == BLKmode)
7322 emit_block_move (new_with_op0_mode, op0,
7323 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7324 (modifier == EXPAND_STACK_PARM
7325 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7327 emit_move_insn (new_with_op0_mode, op0);
7332 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7338 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7339 something else, make sure we add the register to the constant and
7340 then to the other thing. This case can occur during strength
7341 reduction and doing it this way will produce better code if the
7342 frame pointer or argument pointer is eliminated.
7344 fold-const.c will ensure that the constant is always in the inner
7345 PLUS_EXPR, so the only case we need to do anything about is if
7346 sp, ap, or fp is our second argument, in which case we must swap
7347 the innermost first argument and our second argument. */
7349 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7350 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7351 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7352 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7353 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7354 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7356 tree t = TREE_OPERAND (exp, 1);
7358 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7359 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7362 /* If the result is to be ptr_mode and we are adding an integer to
7363 something, we might be forming a constant. So try to use
7364 plus_constant. If it produces a sum and we can't accept it,
7365 use force_operand. This allows P = &ARR[const] to generate
7366 efficient code on machines where a SYMBOL_REF is not a valid
7369 If this is an EXPAND_SUM call, always return the sum. */
7370 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7371 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7373 if (modifier == EXPAND_STACK_PARM)
7375 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7376 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7377 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7381 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7383 /* Use immed_double_const to ensure that the constant is
7384 truncated according to the mode of OP1, then sign extended
7385 to a HOST_WIDE_INT. Using the constant directly can result
7386 in non-canonical RTL in a 64x32 cross compile. */
7388 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7390 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7391 op1 = plus_constant (op1, INTVAL (constant_part));
7392 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7393 op1 = force_operand (op1, target);
7394 return REDUCE_BIT_FIELD (op1);
7397 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7398 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7399 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7404 (modifier == EXPAND_INITIALIZER
7405 ? EXPAND_INITIALIZER : EXPAND_SUM));
7406 if (! CONSTANT_P (op0))
7408 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7409 VOIDmode, modifier);
7410 /* Return a PLUS if modifier says it's OK. */
7411 if (modifier == EXPAND_SUM
7412 || modifier == EXPAND_INITIALIZER)
7413 return simplify_gen_binary (PLUS, mode, op0, op1);
7416 /* Use immed_double_const to ensure that the constant is
7417 truncated according to the mode of OP1, then sign extended
7418 to a HOST_WIDE_INT. Using the constant directly can result
7419 in non-canonical RTL in a 64x32 cross compile. */
7421 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7423 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7424 op0 = plus_constant (op0, INTVAL (constant_part));
7425 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7426 op0 = force_operand (op0, target);
7427 return REDUCE_BIT_FIELD (op0);
7431 /* No sense saving up arithmetic to be done
7432 if it's all in the wrong mode to form part of an address.
7433 And force_operand won't know whether to sign-extend or
7435 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7436 || mode != ptr_mode)
7438 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7439 subtarget, &op0, &op1, 0);
7440 if (op0 == const0_rtx)
7442 if (op1 == const0_rtx)
7447 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7448 subtarget, &op0, &op1, modifier);
7449 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7452 /* For initializers, we are allowed to return a MINUS of two
7453 symbolic constants. Here we handle all cases when both operands
7455 /* Handle difference of two symbolic constants,
7456 for the sake of an initializer. */
7457 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7458 && really_constant_p (TREE_OPERAND (exp, 0))
7459 && really_constant_p (TREE_OPERAND (exp, 1)))
7461 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7462 NULL_RTX, &op0, &op1, modifier);
7464 /* If the last operand is a CONST_INT, use plus_constant of
7465 the negated constant. Else make the MINUS. */
7466 if (GET_CODE (op1) == CONST_INT)
7467 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7469 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7472 /* No sense saving up arithmetic to be done
7473 if it's all in the wrong mode to form part of an address.
7474 And force_operand won't know whether to sign-extend or
7476 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7477 || mode != ptr_mode)
7480 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7481 subtarget, &op0, &op1, modifier);
7483 /* Convert A - const to A + (-const). */
7484 if (GET_CODE (op1) == CONST_INT)
7486 op1 = negate_rtx (mode, op1);
7487 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7493 /* If first operand is constant, swap them.
7494 Thus the following special case checks need only
7495 check the second operand. */
7496 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7498 tree t1 = TREE_OPERAND (exp, 0);
7499 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7500 TREE_OPERAND (exp, 1) = t1;
7503 /* Attempt to return something suitable for generating an
7504 indexed address, for machines that support that. */
7506 if (modifier == EXPAND_SUM && mode == ptr_mode
7507 && host_integerp (TREE_OPERAND (exp, 1), 0))
7509 tree exp1 = TREE_OPERAND (exp, 1);
7511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7515 op0 = force_operand (op0, NULL_RTX);
7517 op0 = copy_to_mode_reg (mode, op0);
7519 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7520 gen_int_mode (tree_low_cst (exp1, 0),
7521 TYPE_MODE (TREE_TYPE (exp1)))));
7524 if (modifier == EXPAND_STACK_PARM)
7527 /* Check for multiplying things that have been extended
7528 from a narrower type. If this machine supports multiplying
7529 in that narrower type with a result in the desired type,
7530 do it that way, and avoid the explicit type-conversion. */
7531 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7532 && TREE_CODE (type) == INTEGER_TYPE
7533 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7534 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7535 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7536 && int_fits_type_p (TREE_OPERAND (exp, 1),
7537 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7538 /* Don't use a widening multiply if a shift will do. */
7539 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7540 > HOST_BITS_PER_WIDE_INT)
7541 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7543 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7544 && (TYPE_PRECISION (TREE_TYPE
7545 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7546 == TYPE_PRECISION (TREE_TYPE
7548 (TREE_OPERAND (exp, 0), 0))))
7549 /* If both operands are extended, they must either both
7550 be zero-extended or both be sign-extended. */
7551 && (TYPE_UNSIGNED (TREE_TYPE
7552 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7553 == TYPE_UNSIGNED (TREE_TYPE
7555 (TREE_OPERAND (exp, 0), 0)))))))
7557 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7558 enum machine_mode innermode = TYPE_MODE (op0type);
7559 bool zextend_p = TYPE_UNSIGNED (op0type);
7560 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7561 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7563 if (mode == GET_MODE_WIDER_MODE (innermode))
7565 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7567 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7568 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7569 TREE_OPERAND (exp, 1),
7570 NULL_RTX, &op0, &op1, 0);
7572 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7573 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7574 NULL_RTX, &op0, &op1, 0);
7577 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7578 && innermode == word_mode)
7581 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7582 NULL_RTX, VOIDmode, 0);
7583 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7584 op1 = convert_modes (innermode, mode,
7585 expand_expr (TREE_OPERAND (exp, 1),
7586 NULL_RTX, VOIDmode, 0),
7589 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7590 NULL_RTX, VOIDmode, 0);
7591 temp = expand_binop (mode, other_optab, op0, op1, target,
7592 unsignedp, OPTAB_LIB_WIDEN);
7593 hipart = gen_highpart (innermode, temp);
7594 htem = expand_mult_highpart_adjust (innermode, hipart,
7598 emit_move_insn (hipart, htem);
7599 return REDUCE_BIT_FIELD (temp);
7603 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7604 subtarget, &op0, &op1, 0);
7605 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7607 case TRUNC_DIV_EXPR:
7608 case FLOOR_DIV_EXPR:
7610 case ROUND_DIV_EXPR:
7611 case EXACT_DIV_EXPR:
7612 if (modifier == EXPAND_STACK_PARM)
7614 /* Possible optimization: compute the dividend with EXPAND_SUM
7615 then if the divisor is constant can optimize the case
7616 where some terms of the dividend have coeffs divisible by it. */
7617 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7618 subtarget, &op0, &op1, 0);
7619 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7622 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7623 expensive divide. If not, combine will rebuild the original
7625 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7626 && TREE_CODE (type) == REAL_TYPE
7627 && !real_onep (TREE_OPERAND (exp, 0)))
7628 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7629 build2 (RDIV_EXPR, type,
7630 build_real (type, dconst1),
7631 TREE_OPERAND (exp, 1))),
7632 target, tmode, modifier);
7636 case TRUNC_MOD_EXPR:
7637 case FLOOR_MOD_EXPR:
7639 case ROUND_MOD_EXPR:
7640 if (modifier == EXPAND_STACK_PARM)
7642 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7643 subtarget, &op0, &op1, 0);
7644 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7646 case FIX_ROUND_EXPR:
7647 case FIX_FLOOR_EXPR:
7649 gcc_unreachable (); /* Not used for C. */
7651 case FIX_TRUNC_EXPR:
7652 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7653 if (target == 0 || modifier == EXPAND_STACK_PARM)
7654 target = gen_reg_rtx (mode);
7655 expand_fix (target, op0, unsignedp);
7659 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7660 if (target == 0 || modifier == EXPAND_STACK_PARM)
7661 target = gen_reg_rtx (mode);
7662 /* expand_float can't figure out what to do if FROM has VOIDmode.
7663 So give it the correct mode. With -O, cse will optimize this. */
7664 if (GET_MODE (op0) == VOIDmode)
7665 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7667 expand_float (target, op0,
7668 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7672 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7673 if (modifier == EXPAND_STACK_PARM)
7675 temp = expand_unop (mode,
7676 optab_for_tree_code (NEGATE_EXPR, type),
7679 return REDUCE_BIT_FIELD (temp);
7682 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7683 if (modifier == EXPAND_STACK_PARM)
7686 /* ABS_EXPR is not valid for complex arguments. */
7687 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7688 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7690 /* Unsigned abs is simply the operand. Testing here means we don't
7691 risk generating incorrect code below. */
7692 if (TYPE_UNSIGNED (type))
7695 return expand_abs (mode, op0, target, unsignedp,
7696 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7700 target = original_target;
7702 || modifier == EXPAND_STACK_PARM
7703 || (MEM_P (target) && MEM_VOLATILE_P (target))
7704 || GET_MODE (target) != mode
7706 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7707 target = gen_reg_rtx (mode);
7708 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7709 target, &op0, &op1, 0);
7711 /* First try to do it with a special MIN or MAX instruction.
7712 If that does not win, use a conditional jump to select the proper
7714 this_optab = optab_for_tree_code (code, type);
7715 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7720 /* At this point, a MEM target is no longer useful; we will get better
7723 if (! REG_P (target))
7724 target = gen_reg_rtx (mode);
7726 /* If op1 was placed in target, swap op0 and op1. */
7727 if (target != op0 && target == op1)
7734 /* We generate better code and avoid problems with op1 mentioning
7735 target by forcing op1 into a pseudo if it isn't a constant. */
7736 if (! CONSTANT_P (op1))
7737 op1 = force_reg (mode, op1);
7740 emit_move_insn (target, op0);
7742 op0 = gen_label_rtx ();
7744 /* If this mode is an integer too wide to compare properly,
7745 compare word by word. Rely on cse to optimize constant cases. */
7746 if (GET_MODE_CLASS (mode) == MODE_INT
7747 && ! can_compare_p (GE, mode, ccp_jump))
7749 if (code == MAX_EXPR)
7750 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7753 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7758 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7759 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7761 emit_move_insn (target, op1);
7766 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7767 if (modifier == EXPAND_STACK_PARM)
7769 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7773 /* ??? Can optimize bitwise operations with one arg constant.
7774 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7775 and (a bitwise1 b) bitwise2 b (etc)
7776 but that is probably not worth while. */
7778 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7779 boolean values when we want in all cases to compute both of them. In
7780 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7781 as actual zero-or-1 values and then bitwise anding. In cases where
7782 there cannot be any side effects, better code would be made by
7783 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7784 how to recognize those cases. */
7786 case TRUTH_AND_EXPR:
7787 code = BIT_AND_EXPR;
7792 code = BIT_IOR_EXPR;
7796 case TRUTH_XOR_EXPR:
7797 code = BIT_XOR_EXPR;
7805 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7807 if (modifier == EXPAND_STACK_PARM)
7809 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7810 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7813 /* Could determine the answer when only additive constants differ. Also,
7814 the addition of one can be handled by changing the condition. */
7821 case UNORDERED_EXPR:
7829 temp = do_store_flag (exp,
7830 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7831 tmode != VOIDmode ? tmode : mode, 0);
7835 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7836 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7838 && REG_P (original_target)
7839 && (GET_MODE (original_target)
7840 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7842 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7845 /* If temp is constant, we can just compute the result. */
7846 if (GET_CODE (temp) == CONST_INT)
7848 if (INTVAL (temp) != 0)
7849 emit_move_insn (target, const1_rtx);
7851 emit_move_insn (target, const0_rtx);
7856 if (temp != original_target)
7858 enum machine_mode mode1 = GET_MODE (temp);
7859 if (mode1 == VOIDmode)
7860 mode1 = tmode != VOIDmode ? tmode : mode;
7862 temp = copy_to_mode_reg (mode1, temp);
7865 op1 = gen_label_rtx ();
7866 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7867 GET_MODE (temp), unsignedp, op1);
7868 emit_move_insn (temp, const1_rtx);
7873 /* If no set-flag instruction, must generate a conditional store
7874 into a temporary variable. Drop through and handle this
7879 || modifier == EXPAND_STACK_PARM
7880 || ! safe_from_p (target, exp, 1)
7881 /* Make sure we don't have a hard reg (such as function's return
7882 value) live across basic blocks, if not optimizing. */
7883 || (!optimize && REG_P (target)
7884 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7885 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7888 emit_move_insn (target, const0_rtx);
7890 op1 = gen_label_rtx ();
7891 jumpifnot (exp, op1);
7894 emit_move_insn (target, const1_rtx);
7897 return ignore ? const0_rtx : target;
7899 case TRUTH_NOT_EXPR:
7900 if (modifier == EXPAND_STACK_PARM)
7902 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7903 /* The parser is careful to generate TRUTH_NOT_EXPR
7904 only with operands that are always zero or one. */
7905 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7906 target, 1, OPTAB_LIB_WIDEN);
7910 case STATEMENT_LIST:
7912 tree_stmt_iterator iter;
7914 gcc_assert (ignore);
7916 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7917 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7922 /* A COND_EXPR with its type being VOID_TYPE represents a
7923 conditional jump and is handled in
7924 expand_gimple_cond_expr. */
7925 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
7927 /* Note that COND_EXPRs whose type is a structure or union
7928 are required to be constructed to contain assignments of
7929 a temporary variable, so that we can evaluate them here
7930 for side effect only. If type is void, we must do likewise. */
7932 gcc_assert (!TREE_ADDRESSABLE (type)
7934 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7935 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7937 /* If we are not to produce a result, we have no target. Otherwise,
7938 if a target was specified use it; it will not be used as an
7939 intermediate target unless it is safe. If no target, use a
7942 if (modifier != EXPAND_STACK_PARM
7944 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7945 && GET_MODE (original_target) == mode
7946 #ifdef HAVE_conditional_move
7947 && (! can_conditionally_move_p (mode)
7948 || REG_P (original_target))
7950 && !MEM_P (original_target))
7951 temp = original_target;
7953 temp = assign_temp (type, 0, 0, 1);
7955 do_pending_stack_adjust ();
7957 op0 = gen_label_rtx ();
7958 op1 = gen_label_rtx ();
7959 jumpifnot (TREE_OPERAND (exp, 0), op0);
7960 store_expr (TREE_OPERAND (exp, 1), temp,
7961 modifier == EXPAND_STACK_PARM);
7963 emit_jump_insn (gen_jump (op1));
7966 store_expr (TREE_OPERAND (exp, 2), temp,
7967 modifier == EXPAND_STACK_PARM);
7974 target = expand_vec_cond_expr (exp, target);
7979 tree lhs = TREE_OPERAND (exp, 0);
7980 tree rhs = TREE_OPERAND (exp, 1);
7982 gcc_assert (ignore);
7984 /* Check for |= or &= of a bitfield of size one into another bitfield
7985 of size 1. In this case, (unless we need the result of the
7986 assignment) we can do this more efficiently with a
7987 test followed by an assignment, if necessary.
7989 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7990 things change so we do, this code should be enhanced to
7992 if (TREE_CODE (lhs) == COMPONENT_REF
7993 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7994 || TREE_CODE (rhs) == BIT_AND_EXPR)
7995 && TREE_OPERAND (rhs, 0) == lhs
7996 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7997 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7998 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8000 rtx label = gen_label_rtx ();
8002 do_jump (TREE_OPERAND (rhs, 1),
8003 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8004 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8005 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8006 (TREE_CODE (rhs) == BIT_IOR_EXPR
8008 : integer_zero_node)));
8009 do_pending_stack_adjust ();
8014 expand_assignment (lhs, rhs);
8020 if (!TREE_OPERAND (exp, 0))
8021 expand_null_return ();
8023 expand_return (TREE_OPERAND (exp, 0));
8027 return expand_expr_addr_expr (exp, target, tmode, modifier);
8030 /* Get the rtx code of the operands. */
8031 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8032 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8035 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8037 /* Move the real (op0) and imaginary (op1) parts to their location. */
8038 write_complex_part (target, op0, false);
8039 write_complex_part (target, op1, true);
8044 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8045 return read_complex_part (op0, false);
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8049 return read_complex_part (op0, true);
8052 expand_resx_expr (exp);
8055 case TRY_CATCH_EXPR:
8057 case EH_FILTER_EXPR:
8058 case TRY_FINALLY_EXPR:
8059 /* Lowered by tree-eh.c. */
8062 case WITH_CLEANUP_EXPR:
8063 case CLEANUP_POINT_EXPR:
8065 case CASE_LABEL_EXPR:
8071 case PREINCREMENT_EXPR:
8072 case PREDECREMENT_EXPR:
8073 case POSTINCREMENT_EXPR:
8074 case POSTDECREMENT_EXPR:
8077 case TRUTH_ANDIF_EXPR:
8078 case TRUTH_ORIF_EXPR:
8079 /* Lowered by gimplify.c. */
8083 return get_exception_pointer (cfun);
8086 return get_exception_filter (cfun);
8089 /* Function descriptors are not valid except for as
8090 initialization constants, and should not be expanded. */
8098 expand_label (TREE_OPERAND (exp, 0));
8102 expand_asm_expr (exp);
8105 case WITH_SIZE_EXPR:
8106 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8107 have pulled out the size to use in whatever context it needed. */
8108 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8111 case REALIGN_LOAD_EXPR:
8113 tree oprnd0 = TREE_OPERAND (exp, 0);
8114 tree oprnd1 = TREE_OPERAND (exp, 1);
8115 tree oprnd2 = TREE_OPERAND (exp, 2);
8118 this_optab = optab_for_tree_code (code, type);
8119 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8120 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8121 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8130 return lang_hooks.expand_expr (exp, original_target, tmode,
8134 /* Here to do an ordinary binary operator. */
8136 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8137 subtarget, &op0, &op1, 0);
8139 this_optab = optab_for_tree_code (code, type);
8141 if (modifier == EXPAND_STACK_PARM)
8143 temp = expand_binop (mode, this_optab, op0, op1, target,
8144 unsignedp, OPTAB_LIB_WIDEN);
8146 return REDUCE_BIT_FIELD (temp);
8148 #undef REDUCE_BIT_FIELD
8150 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8151 signedness of TYPE), possibly returning the result in TARGET. */
8153 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8155 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8156 if (target && GET_MODE (target) != GET_MODE (exp))
8158 if (TYPE_UNSIGNED (type))
8161 if (prec < HOST_BITS_PER_WIDE_INT)
8162 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8165 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8166 ((unsigned HOST_WIDE_INT) 1
8167 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8169 return expand_and (GET_MODE (exp), exp, mask, target);
8173 tree count = build_int_cst (NULL_TREE,
8174 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8175 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8176 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8180 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8181 when applied to the address of EXP produces an address known to be
8182 aligned more than BIGGEST_ALIGNMENT. */
8185 is_aligning_offset (tree offset, tree exp)
8187 /* Strip off any conversions. */
8188 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8189 || TREE_CODE (offset) == NOP_EXPR
8190 || TREE_CODE (offset) == CONVERT_EXPR)
8191 offset = TREE_OPERAND (offset, 0);
8193 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8194 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8195 if (TREE_CODE (offset) != BIT_AND_EXPR
8196 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8197 || compare_tree_int (TREE_OPERAND (offset, 1),
8198 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8199 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8202 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8203 It must be NEGATE_EXPR. Then strip any more conversions. */
8204 offset = TREE_OPERAND (offset, 0);
8205 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8206 || TREE_CODE (offset) == NOP_EXPR
8207 || TREE_CODE (offset) == CONVERT_EXPR)
8208 offset = TREE_OPERAND (offset, 0);
8210 if (TREE_CODE (offset) != NEGATE_EXPR)
8213 offset = TREE_OPERAND (offset, 0);
8214 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8215 || TREE_CODE (offset) == NOP_EXPR
8216 || TREE_CODE (offset) == CONVERT_EXPR)
8217 offset = TREE_OPERAND (offset, 0);
8219 /* This must now be the address of EXP. */
8220 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8223 /* Return the tree node if an ARG corresponds to a string constant or zero
8224 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8225 in bytes within the string that ARG is accessing. The type of the
8226 offset will be `sizetype'. */
8229 string_constant (tree arg, tree *ptr_offset)
8234 if (TREE_CODE (arg) == ADDR_EXPR)
8236 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8238 *ptr_offset = size_zero_node;
8239 return TREE_OPERAND (arg, 0);
8241 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8243 array = TREE_OPERAND (arg, 0);
8244 offset = size_zero_node;
8246 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8248 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8249 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8250 if (TREE_CODE (array) != STRING_CST
8251 && TREE_CODE (array) != VAR_DECL)
8257 else if (TREE_CODE (arg) == PLUS_EXPR)
8259 tree arg0 = TREE_OPERAND (arg, 0);
8260 tree arg1 = TREE_OPERAND (arg, 1);
8265 if (TREE_CODE (arg0) == ADDR_EXPR
8266 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8267 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8269 array = TREE_OPERAND (arg0, 0);
8272 else if (TREE_CODE (arg1) == ADDR_EXPR
8273 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8274 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8276 array = TREE_OPERAND (arg1, 0);
8285 if (TREE_CODE (array) == STRING_CST)
8287 *ptr_offset = convert (sizetype, offset);
8290 else if (TREE_CODE (array) == VAR_DECL)
8294 /* Variables initialized to string literals can be handled too. */
8295 if (DECL_INITIAL (array) == NULL_TREE
8296 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8299 /* If they are read-only, non-volatile and bind locally. */
8300 if (! TREE_READONLY (array)
8301 || TREE_SIDE_EFFECTS (array)
8302 || ! targetm.binds_local_p (array))
8305 /* Avoid const char foo[4] = "abcde"; */
8306 if (DECL_SIZE_UNIT (array) == NULL_TREE
8307 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8308 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8309 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8312 /* If variable is bigger than the string literal, OFFSET must be constant
8313 and inside of the bounds of the string literal. */
8314 offset = convert (sizetype, offset);
8315 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8316 && (! host_integerp (offset, 1)
8317 || compare_tree_int (offset, length) >= 0))
8320 *ptr_offset = offset;
8321 return DECL_INITIAL (array);
8327 /* Generate code to calculate EXP using a store-flag instruction
8328 and return an rtx for the result. EXP is either a comparison
8329 or a TRUTH_NOT_EXPR whose operand is a comparison.
8331 If TARGET is nonzero, store the result there if convenient.
8333 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8336 Return zero if there is no suitable set-flag instruction
8337 available on this machine.
8339 Once expand_expr has been called on the arguments of the comparison,
8340 we are committed to doing the store flag, since it is not safe to
8341 re-evaluate the expression. We emit the store-flag insn by calling
8342 emit_store_flag, but only expand the arguments if we have a reason
8343 to believe that emit_store_flag will be successful. If we think that
8344 it will, but it isn't, we have to simulate the store-flag with a
8345 set/jump/set sequence. */
8348 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8351 tree arg0, arg1, type;
8353 enum machine_mode operand_mode;
8357 enum insn_code icode;
8358 rtx subtarget = target;
8361 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8362 result at the end. We can't simply invert the test since it would
8363 have already been inverted if it were valid. This case occurs for
8364 some floating-point comparisons. */
8366 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8367 invert = 1, exp = TREE_OPERAND (exp, 0);
8369 arg0 = TREE_OPERAND (exp, 0);
8370 arg1 = TREE_OPERAND (exp, 1);
8372 /* Don't crash if the comparison was erroneous. */
8373 if (arg0 == error_mark_node || arg1 == error_mark_node)
8376 type = TREE_TYPE (arg0);
8377 operand_mode = TYPE_MODE (type);
8378 unsignedp = TYPE_UNSIGNED (type);
8380 /* We won't bother with BLKmode store-flag operations because it would mean
8381 passing a lot of information to emit_store_flag. */
8382 if (operand_mode == BLKmode)
8385 /* We won't bother with store-flag operations involving function pointers
8386 when function pointers must be canonicalized before comparisons. */
8387 #ifdef HAVE_canonicalize_funcptr_for_compare
8388 if (HAVE_canonicalize_funcptr_for_compare
8389 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8390 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8392 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8393 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8394 == FUNCTION_TYPE))))
8401 /* Get the rtx comparison code to use. We know that EXP is a comparison
8402 operation of some type. Some comparisons against 1 and -1 can be
8403 converted to comparisons with zero. Do so here so that the tests
8404 below will be aware that we have a comparison with zero. These
8405 tests will not catch constants in the first operand, but constants
8406 are rarely passed as the first operand. */
8408 switch (TREE_CODE (exp))
8417 if (integer_onep (arg1))
8418 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8420 code = unsignedp ? LTU : LT;
8423 if (! unsignedp && integer_all_onesp (arg1))
8424 arg1 = integer_zero_node, code = LT;
8426 code = unsignedp ? LEU : LE;
8429 if (! unsignedp && integer_all_onesp (arg1))
8430 arg1 = integer_zero_node, code = GE;
8432 code = unsignedp ? GTU : GT;
8435 if (integer_onep (arg1))
8436 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8438 code = unsignedp ? GEU : GE;
8441 case UNORDERED_EXPR:
8470 /* Put a constant second. */
8471 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8473 tem = arg0; arg0 = arg1; arg1 = tem;
8474 code = swap_condition (code);
8477 /* If this is an equality or inequality test of a single bit, we can
8478 do this by shifting the bit being tested to the low-order bit and
8479 masking the result with the constant 1. If the condition was EQ,
8480 we xor it with 1. This does not require an scc insn and is faster
8481 than an scc insn even if we have it.
8483 The code to make this transformation was moved into fold_single_bit_test,
8484 so we just call into the folder and expand its result. */
8486 if ((code == NE || code == EQ)
8487 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8488 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8490 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8491 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8493 target, VOIDmode, EXPAND_NORMAL);
8496 /* Now see if we are likely to be able to do this. Return if not. */
8497 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8500 icode = setcc_gen_code[(int) code];
8501 if (icode == CODE_FOR_nothing
8502 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8504 /* We can only do this if it is one of the special cases that
8505 can be handled without an scc insn. */
8506 if ((code == LT && integer_zerop (arg1))
8507 || (! only_cheap && code == GE && integer_zerop (arg1)))
8509 else if (BRANCH_COST >= 0
8510 && ! only_cheap && (code == NE || code == EQ)
8511 && TREE_CODE (type) != REAL_TYPE
8512 && ((abs_optab->handlers[(int) operand_mode].insn_code
8513 != CODE_FOR_nothing)
8514 || (ffs_optab->handlers[(int) operand_mode].insn_code
8515 != CODE_FOR_nothing)))
8521 if (! get_subtarget (target)
8522 || GET_MODE (subtarget) != operand_mode)
8525 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8528 target = gen_reg_rtx (mode);
8530 result = emit_store_flag (target, code, op0, op1,
8531 operand_mode, unsignedp, 1);
8536 result = expand_binop (mode, xor_optab, result, const1_rtx,
8537 result, 0, OPTAB_LIB_WIDEN);
8541 /* If this failed, we have to do this with set/compare/jump/set code. */
8543 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8544 target = gen_reg_rtx (GET_MODE (target));
8546 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8547 result = compare_from_rtx (op0, op1, code, unsignedp,
8548 operand_mode, NULL_RTX);
8549 if (GET_CODE (result) == CONST_INT)
8550 return (((result == const0_rtx && ! invert)
8551 || (result != const0_rtx && invert))
8552 ? const0_rtx : const1_rtx);
8554 /* The code of RESULT may not match CODE if compare_from_rtx
8555 decided to swap its operands and reverse the original code.
8557 We know that compare_from_rtx returns either a CONST_INT or
8558 a new comparison code, so it is safe to just extract the
8559 code from RESULT. */
8560 code = GET_CODE (result);
8562 label = gen_label_rtx ();
8563 gcc_assert (bcc_gen_fctn[(int) code]);
8565 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8566 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8573 /* Stubs in case we haven't got a casesi insn. */
8575 # define HAVE_casesi 0
8576 # define gen_casesi(a, b, c, d, e) (0)
8577 # define CODE_FOR_casesi CODE_FOR_nothing
8580 /* If the machine does not have a case insn that compares the bounds,
8581 this means extra overhead for dispatch tables, which raises the
8582 threshold for using them. */
8583 #ifndef CASE_VALUES_THRESHOLD
8584 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8585 #endif /* CASE_VALUES_THRESHOLD */
8588 case_values_threshold (void)
8590 return CASE_VALUES_THRESHOLD;
8593 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8594 0 otherwise (i.e. if there is no casesi instruction). */
8596 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8597 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8599 enum machine_mode index_mode = SImode;
8600 int index_bits = GET_MODE_BITSIZE (index_mode);
8601 rtx op1, op2, index;
8602 enum machine_mode op_mode;
8607 /* Convert the index to SImode. */
8608 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8610 enum machine_mode omode = TYPE_MODE (index_type);
8611 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8613 /* We must handle the endpoints in the original mode. */
8614 index_expr = build2 (MINUS_EXPR, index_type,
8615 index_expr, minval);
8616 minval = integer_zero_node;
8617 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8618 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8619 omode, 1, default_label);
8620 /* Now we can safely truncate. */
8621 index = convert_to_mode (index_mode, index, 0);
8625 if (TYPE_MODE (index_type) != index_mode)
8627 index_expr = convert (lang_hooks.types.type_for_size
8628 (index_bits, 0), index_expr);
8629 index_type = TREE_TYPE (index_expr);
8632 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8635 do_pending_stack_adjust ();
8637 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8638 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8640 index = copy_to_mode_reg (op_mode, index);
8642 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8644 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8645 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8646 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8647 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8649 op1 = copy_to_mode_reg (op_mode, op1);
8651 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8653 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8654 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8655 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8656 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8658 op2 = copy_to_mode_reg (op_mode, op2);
8660 emit_jump_insn (gen_casesi (index, op1, op2,
8661 table_label, default_label));
8665 /* Attempt to generate a tablejump instruction; same concept. */
8666 #ifndef HAVE_tablejump
8667 #define HAVE_tablejump 0
8668 #define gen_tablejump(x, y) (0)
8671 /* Subroutine of the next function.
8673 INDEX is the value being switched on, with the lowest value
8674 in the table already subtracted.
8675 MODE is its expected mode (needed if INDEX is constant).
8676 RANGE is the length of the jump table.
8677 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8679 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8680 index value is out of range. */
8683 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8688 if (INTVAL (range) > cfun->max_jumptable_ents)
8689 cfun->max_jumptable_ents = INTVAL (range);
8691 /* Do an unsigned comparison (in the proper mode) between the index
8692 expression and the value which represents the length of the range.
8693 Since we just finished subtracting the lower bound of the range
8694 from the index expression, this comparison allows us to simultaneously
8695 check that the original index expression value is both greater than
8696 or equal to the minimum value of the range and less than or equal to
8697 the maximum value of the range. */
8699 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8702 /* If index is in range, it must fit in Pmode.
8703 Convert to Pmode so we can index with it. */
8705 index = convert_to_mode (Pmode, index, 1);
8707 /* Don't let a MEM slip through, because then INDEX that comes
8708 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8709 and break_out_memory_refs will go to work on it and mess it up. */
8710 #ifdef PIC_CASE_VECTOR_ADDRESS
8711 if (flag_pic && !REG_P (index))
8712 index = copy_to_mode_reg (Pmode, index);
8715 /* If flag_force_addr were to affect this address
8716 it could interfere with the tricky assumptions made
8717 about addresses that contain label-refs,
8718 which may be valid only very near the tablejump itself. */
8719 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8720 GET_MODE_SIZE, because this indicates how large insns are. The other
8721 uses should all be Pmode, because they are addresses. This code
8722 could fail if addresses and insns are not the same size. */
8723 index = gen_rtx_PLUS (Pmode,
8724 gen_rtx_MULT (Pmode, index,
8725 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8726 gen_rtx_LABEL_REF (Pmode, table_label));
8727 #ifdef PIC_CASE_VECTOR_ADDRESS
8729 index = PIC_CASE_VECTOR_ADDRESS (index);
8732 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8733 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8734 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8735 convert_move (temp, vector, 0);
8737 emit_jump_insn (gen_tablejump (temp, table_label));
8739 /* If we are generating PIC code or if the table is PC-relative, the
8740 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8741 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8746 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8747 rtx table_label, rtx default_label)
8751 if (! HAVE_tablejump)
8754 index_expr = fold (build2 (MINUS_EXPR, index_type,
8755 convert (index_type, index_expr),
8756 convert (index_type, minval)));
8757 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8758 do_pending_stack_adjust ();
8760 do_tablejump (index, TYPE_MODE (index_type),
8761 convert_modes (TYPE_MODE (index_type),
8762 TYPE_MODE (TREE_TYPE (range)),
8763 expand_expr (range, NULL_RTX,
8765 TYPE_UNSIGNED (TREE_TYPE (range))),
8766 table_label, default_label);
8770 /* Nonzero if the mode is a valid vector mode for this architecture.
8771 This returns nonzero even if there is no hardware support for the
8772 vector mode, but we can emulate with narrower modes. */
8775 vector_mode_valid_p (enum machine_mode mode)
8777 enum mode_class class = GET_MODE_CLASS (mode);
8778 enum machine_mode innermode;
8780 /* Doh! What's going on? */
8781 if (class != MODE_VECTOR_INT
8782 && class != MODE_VECTOR_FLOAT)
8785 /* Hardware support. Woo hoo! */
8786 if (targetm.vector_mode_supported_p (mode))
8789 innermode = GET_MODE_INNER (mode);
8791 /* We should probably return 1 if requesting V4DI and we have no DI,
8792 but we have V2DI, but this is probably very unlikely. */
8794 /* If we have support for the inner mode, we can safely emulate it.
8795 We may not have V2DI, but me can emulate with a pair of DIs. */
8796 return targetm.scalar_mode_supported_p (innermode);
8799 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8801 const_vector_from_tree (tree exp)
8806 enum machine_mode inner, mode;
8808 mode = TYPE_MODE (TREE_TYPE (exp));
8810 if (initializer_zerop (exp))
8811 return CONST0_RTX (mode);
8813 units = GET_MODE_NUNITS (mode);
8814 inner = GET_MODE_INNER (mode);
8816 v = rtvec_alloc (units);
8818 link = TREE_VECTOR_CST_ELTS (exp);
8819 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8821 elt = TREE_VALUE (link);
8823 if (TREE_CODE (elt) == REAL_CST)
8824 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8827 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8828 TREE_INT_CST_HIGH (elt),
8832 /* Initialize remaining elements to 0. */
8833 for (; i < units; ++i)
8834 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8836 return gen_rtx_CONST_VECTOR (mode, v);
8838 #include "gt-expr.h"