1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (COMPLEX_MODE_P (mode)
1654 && GET_MODE (src) == mode
1655 && bytelen == GET_MODE_SIZE (mode))
1656 /* Let emit_move_complex do the bulk of the work. */
1658 else if (GET_CODE (src) == CONCAT)
1660 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1661 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1663 if ((bytepos == 0 && bytelen == slen0)
1664 || (bytepos != 0 && bytepos + bytelen <= slen))
1666 /* The following assumes that the concatenated objects all
1667 have the same size. In this case, a simple calculation
1668 can be used to determine the object and the bit field
1670 tmps[i] = XEXP (src, bytepos / slen0);
1671 if (! CONSTANT_P (tmps[i])
1672 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1673 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1674 (bytepos % slen0) * BITS_PER_UNIT,
1675 1, NULL_RTX, mode, mode);
1681 gcc_assert (!bytepos);
1682 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1683 emit_move_insn (mem, src);
1684 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1685 0, 1, NULL_RTX, mode, mode);
1688 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1689 SIMD register, which is currently broken. While we get GCC
1690 to emit proper RTL for these cases, let's dump to memory. */
1691 else if (VECTOR_MODE_P (GET_MODE (dst))
1694 int slen = GET_MODE_SIZE (GET_MODE (src));
1697 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1698 emit_move_insn (mem, src);
1699 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1701 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1702 && XVECLEN (dst, 0) > 1)
1703 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1704 else if (CONSTANT_P (src)
1705 || (REG_P (src) && GET_MODE (src) == mode))
1708 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1709 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1713 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1714 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1718 /* Emit code to move a block SRC of type TYPE to a block DST,
1719 where DST is non-consecutive registers represented by a PARALLEL.
1720 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1724 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1729 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1730 emit_group_load_1 (tmps, dst, src, type, ssize);
1732 /* Copy the extracted pieces into the proper (probable) hard regs. */
1733 for (i = 0; i < XVECLEN (dst, 0); i++)
1735 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1738 emit_move_insn (d, tmps[i]);
1742 /* Similar, but load SRC into new pseudos in a format that looks like
1743 PARALLEL. This can later be fed to emit_group_move to get things
1744 in the right place. */
1747 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1752 vec = rtvec_alloc (XVECLEN (parallel, 0));
1753 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1755 /* Convert the vector to look just like the original PARALLEL, except
1756 with the computed values. */
1757 for (i = 0; i < XVECLEN (parallel, 0); i++)
1759 rtx e = XVECEXP (parallel, 0, i);
1760 rtx d = XEXP (e, 0);
1764 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1765 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1767 RTVEC_ELT (vec, i) = e;
1770 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1773 /* Emit code to move a block SRC to block DST, where SRC and DST are
1774 non-consecutive groups of registers, each represented by a PARALLEL. */
1777 emit_group_move (rtx dst, rtx src)
1781 gcc_assert (GET_CODE (src) == PARALLEL
1782 && GET_CODE (dst) == PARALLEL
1783 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1785 /* Skip first entry if NULL. */
1786 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1787 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1788 XEXP (XVECEXP (src, 0, i), 0));
1791 /* Move a group of registers represented by a PARALLEL into pseudos. */
1794 emit_group_move_into_temps (rtx src)
1796 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1799 for (i = 0; i < XVECLEN (src, 0); i++)
1801 rtx e = XVECEXP (src, 0, i);
1802 rtx d = XEXP (e, 0);
1805 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1806 RTVEC_ELT (vec, i) = e;
1809 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1812 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1813 where SRC is non-consecutive registers represented by a PARALLEL.
1814 SSIZE represents the total size of block ORIG_DST, or -1 if not
1818 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1822 enum machine_mode m = GET_MODE (orig_dst);
1824 gcc_assert (GET_CODE (src) == PARALLEL);
1826 if (!SCALAR_INT_MODE_P (m)
1827 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1829 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1830 if (imode == BLKmode)
1831 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1833 dst = gen_reg_rtx (imode);
1834 emit_group_store (dst, src, type, ssize);
1835 if (imode != BLKmode)
1836 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1837 emit_move_insn (orig_dst, dst);
1841 /* Check for a NULL entry, used to indicate that the parameter goes
1842 both on the stack and in registers. */
1843 if (XEXP (XVECEXP (src, 0, 0), 0))
1848 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1850 /* Copy the (probable) hard regs into pseudos. */
1851 for (i = start; i < XVECLEN (src, 0); i++)
1853 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1854 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1855 emit_move_insn (tmps[i], reg);
1858 /* If we won't be storing directly into memory, protect the real destination
1859 from strange tricks we might play. */
1861 if (GET_CODE (dst) == PARALLEL)
1865 /* We can get a PARALLEL dst if there is a conditional expression in
1866 a return statement. In that case, the dst and src are the same,
1867 so no action is necessary. */
1868 if (rtx_equal_p (dst, src))
1871 /* It is unclear if we can ever reach here, but we may as well handle
1872 it. Allocate a temporary, and split this into a store/load to/from
1875 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1876 emit_group_store (temp, src, type, ssize);
1877 emit_group_load (dst, temp, type, ssize);
1880 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1882 dst = gen_reg_rtx (GET_MODE (orig_dst));
1883 /* Make life a bit easier for combine. */
1884 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1887 /* Process the pieces. */
1888 for (i = start; i < XVECLEN (src, 0); i++)
1890 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1891 enum machine_mode mode = GET_MODE (tmps[i]);
1892 unsigned int bytelen = GET_MODE_SIZE (mode);
1895 /* Handle trailing fragments that run over the size of the struct. */
1896 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1898 /* store_bit_field always takes its value from the lsb.
1899 Move the fragment to the lsb if it's not already there. */
1901 #ifdef BLOCK_REG_PADDING
1902 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1903 == (BYTES_BIG_ENDIAN ? upward : downward)
1909 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1910 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1911 build_int_cst (NULL_TREE, shift),
1914 bytelen = ssize - bytepos;
1917 if (GET_CODE (dst) == CONCAT)
1919 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1920 dest = XEXP (dst, 0);
1921 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1923 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1924 dest = XEXP (dst, 1);
1928 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1929 dest = assign_stack_temp (GET_MODE (dest),
1930 GET_MODE_SIZE (GET_MODE (dest)), 0);
1931 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1938 /* Optimize the access just a bit. */
1940 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1941 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1942 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1943 && bytelen == GET_MODE_SIZE (mode))
1944 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1946 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1950 /* Copy from the pseudo into the (probable) hard reg. */
1951 if (orig_dst != dst)
1952 emit_move_insn (orig_dst, dst);
1955 /* Generate code to copy a BLKmode object of TYPE out of a
1956 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1957 is null, a stack temporary is created. TGTBLK is returned.
1959 The purpose of this routine is to handle functions that return
1960 BLKmode structures in registers. Some machines (the PA for example)
1961 want to return all small structures in registers regardless of the
1962 structure's alignment. */
1965 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1967 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1968 rtx src = NULL, dst = NULL;
1969 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1970 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1974 tgtblk = assign_temp (build_qualified_type (type,
1976 | TYPE_QUAL_CONST)),
1978 preserve_temp_slots (tgtblk);
1981 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1982 into a new pseudo which is a full word. */
1984 if (GET_MODE (srcreg) != BLKmode
1985 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1986 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1988 /* If the structure doesn't take up a whole number of words, see whether
1989 SRCREG is padded on the left or on the right. If it's on the left,
1990 set PADDING_CORRECTION to the number of bits to skip.
1992 In most ABIs, the structure will be returned at the least end of
1993 the register, which translates to right padding on little-endian
1994 targets and left padding on big-endian targets. The opposite
1995 holds if the structure is returned at the most significant
1996 end of the register. */
1997 if (bytes % UNITS_PER_WORD != 0
1998 && (targetm.calls.return_in_msb (type)
2000 : BYTES_BIG_ENDIAN))
2002 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2004 /* Copy the structure BITSIZE bites at a time.
2006 We could probably emit more efficient code for machines which do not use
2007 strict alignment, but it doesn't seem worth the effort at the current
2009 for (bitpos = 0, xbitpos = padding_correction;
2010 bitpos < bytes * BITS_PER_UNIT;
2011 bitpos += bitsize, xbitpos += bitsize)
2013 /* We need a new source operand each time xbitpos is on a
2014 word boundary and when xbitpos == padding_correction
2015 (the first time through). */
2016 if (xbitpos % BITS_PER_WORD == 0
2017 || xbitpos == padding_correction)
2018 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2021 /* We need a new destination operand each time bitpos is on
2023 if (bitpos % BITS_PER_WORD == 0)
2024 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2026 /* Use xbitpos for the source extraction (right justified) and
2027 xbitpos for the destination store (left justified). */
2028 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2029 extract_bit_field (src, bitsize,
2030 xbitpos % BITS_PER_WORD, 1,
2031 NULL_RTX, word_mode, word_mode));
2037 /* Add a USE expression for REG to the (possibly empty) list pointed
2038 to by CALL_FUSAGE. REG must denote a hard register. */
2041 use_reg (rtx *call_fusage, rtx reg)
2043 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2046 = gen_rtx_EXPR_LIST (VOIDmode,
2047 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2050 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2051 starting at REGNO. All of these registers must be hard registers. */
2054 use_regs (rtx *call_fusage, int regno, int nregs)
2058 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2060 for (i = 0; i < nregs; i++)
2061 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2064 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2065 PARALLEL REGS. This is for calls that pass values in multiple
2066 non-contiguous locations. The Irix 6 ABI has examples of this. */
2069 use_group_regs (rtx *call_fusage, rtx regs)
2073 for (i = 0; i < XVECLEN (regs, 0); i++)
2075 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2077 /* A NULL entry means the parameter goes both on the stack and in
2078 registers. This can also be a MEM for targets that pass values
2079 partially on the stack and partially in registers. */
2080 if (reg != 0 && REG_P (reg))
2081 use_reg (call_fusage, reg);
2086 /* Determine whether the LEN bytes generated by CONSTFUN can be
2087 stored to memory using several move instructions. CONSTFUNDATA is
2088 a pointer which will be passed as argument in every CONSTFUN call.
2089 ALIGN is maximum alignment we can assume. Return nonzero if a
2090 call to store_by_pieces should succeed. */
2093 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2094 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2095 void *constfundata, unsigned int align)
2097 unsigned HOST_WIDE_INT l;
2098 unsigned int max_size;
2099 HOST_WIDE_INT offset = 0;
2100 enum machine_mode mode, tmode;
2101 enum insn_code icode;
2108 if (! STORE_BY_PIECES_P (len, align))
2111 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2112 if (align >= GET_MODE_ALIGNMENT (tmode))
2113 align = GET_MODE_ALIGNMENT (tmode);
2116 enum machine_mode xmode;
2118 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2120 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2121 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2122 || SLOW_UNALIGNED_ACCESS (tmode, align))
2125 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2128 /* We would first store what we can in the largest integer mode, then go to
2129 successively smaller modes. */
2132 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2137 max_size = STORE_MAX_PIECES + 1;
2138 while (max_size > 1)
2140 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2141 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2142 if (GET_MODE_SIZE (tmode) < max_size)
2145 if (mode == VOIDmode)
2148 icode = mov_optab->handlers[(int) mode].insn_code;
2149 if (icode != CODE_FOR_nothing
2150 && align >= GET_MODE_ALIGNMENT (mode))
2152 unsigned int size = GET_MODE_SIZE (mode);
2159 cst = (*constfun) (constfundata, offset, mode);
2160 if (!LEGITIMATE_CONSTANT_P (cst))
2170 max_size = GET_MODE_SIZE (mode);
2173 /* The code above should have handled everything. */
2180 /* Generate several move instructions to store LEN bytes generated by
2181 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2182 pointer which will be passed as argument in every CONSTFUN call.
2183 ALIGN is maximum alignment we can assume.
2184 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2185 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2189 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2190 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2191 void *constfundata, unsigned int align, int endp)
2193 struct store_by_pieces data;
2197 gcc_assert (endp != 2);
2201 gcc_assert (STORE_BY_PIECES_P (len, align));
2202 data.constfun = constfun;
2203 data.constfundata = constfundata;
2206 store_by_pieces_1 (&data, align);
2211 gcc_assert (!data.reverse);
2216 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2217 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2219 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2222 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2229 to1 = adjust_address (data.to, QImode, data.offset);
2237 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2238 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2241 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2243 struct store_by_pieces data;
2248 data.constfun = clear_by_pieces_1;
2249 data.constfundata = NULL;
2252 store_by_pieces_1 (&data, align);
2255 /* Callback routine for clear_by_pieces.
2256 Return const0_rtx unconditionally. */
2259 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2260 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2261 enum machine_mode mode ATTRIBUTE_UNUSED)
2266 /* Subroutine of clear_by_pieces and store_by_pieces.
2267 Generate several move instructions to store LEN bytes of block TO. (A MEM
2268 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2271 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2272 unsigned int align ATTRIBUTE_UNUSED)
2274 rtx to_addr = XEXP (data->to, 0);
2275 unsigned int max_size = STORE_MAX_PIECES + 1;
2276 enum machine_mode mode = VOIDmode, tmode;
2277 enum insn_code icode;
2280 data->to_addr = to_addr;
2282 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2283 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2285 data->explicit_inc_to = 0;
2287 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2289 data->offset = data->len;
2291 /* If storing requires more than two move insns,
2292 copy addresses to registers (to make displacements shorter)
2293 and use post-increment if available. */
2294 if (!data->autinc_to
2295 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2297 /* Determine the main mode we'll be using. */
2298 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2299 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2300 if (GET_MODE_SIZE (tmode) < max_size)
2303 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2305 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2306 data->autinc_to = 1;
2307 data->explicit_inc_to = -1;
2310 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2311 && ! data->autinc_to)
2313 data->to_addr = copy_addr_to_reg (to_addr);
2314 data->autinc_to = 1;
2315 data->explicit_inc_to = 1;
2318 if ( !data->autinc_to && CONSTANT_P (to_addr))
2319 data->to_addr = copy_addr_to_reg (to_addr);
2322 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2323 if (align >= GET_MODE_ALIGNMENT (tmode))
2324 align = GET_MODE_ALIGNMENT (tmode);
2327 enum machine_mode xmode;
2329 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2331 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2332 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2333 || SLOW_UNALIGNED_ACCESS (tmode, align))
2336 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2339 /* First store what we can in the largest integer mode, then go to
2340 successively smaller modes. */
2342 while (max_size > 1)
2344 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2345 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2346 if (GET_MODE_SIZE (tmode) < max_size)
2349 if (mode == VOIDmode)
2352 icode = mov_optab->handlers[(int) mode].insn_code;
2353 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2354 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2356 max_size = GET_MODE_SIZE (mode);
2359 /* The code above should have handled everything. */
2360 gcc_assert (!data->len);
2363 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2364 with move instructions for mode MODE. GENFUN is the gen_... function
2365 to make a move insn for that mode. DATA has all the other info. */
2368 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2369 struct store_by_pieces *data)
2371 unsigned int size = GET_MODE_SIZE (mode);
2374 while (data->len >= size)
2377 data->offset -= size;
2379 if (data->autinc_to)
2380 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2383 to1 = adjust_address (data->to, mode, data->offset);
2385 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2386 emit_insn (gen_add2_insn (data->to_addr,
2387 GEN_INT (-(HOST_WIDE_INT) size)));
2389 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2390 emit_insn ((*genfun) (to1, cst));
2392 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2393 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2395 if (! data->reverse)
2396 data->offset += size;
2402 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2403 its length in bytes. */
2406 clear_storage (rtx object, rtx size)
2409 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2410 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2412 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2413 just move a zero. Otherwise, do this a piece at a time. */
2414 if (GET_MODE (object) != BLKmode
2415 && GET_CODE (size) == CONST_INT
2416 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2417 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2420 if (size == const0_rtx)
2422 else if (GET_CODE (size) == CONST_INT
2423 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2424 clear_by_pieces (object, INTVAL (size), align);
2425 else if (clear_storage_via_clrmem (object, size, align))
2428 retval = clear_storage_via_libcall (object, size);
2434 /* A subroutine of clear_storage. Expand a clrmem pattern;
2435 return true if successful. */
2438 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2440 /* Try the most limited insn first, because there's no point
2441 including more than one in the machine description unless
2442 the more limited one has some advantage. */
2444 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2445 enum machine_mode mode;
2447 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2448 mode = GET_MODE_WIDER_MODE (mode))
2450 enum insn_code code = clrmem_optab[(int) mode];
2451 insn_operand_predicate_fn pred;
2453 if (code != CODE_FOR_nothing
2454 /* We don't need MODE to be narrower than
2455 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2456 the mode mask, as it is returned by the macro, it will
2457 definitely be less than the actual mode mask. */
2458 && ((GET_CODE (size) == CONST_INT
2459 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2460 <= (GET_MODE_MASK (mode) >> 1)))
2461 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2462 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2463 || (*pred) (object, BLKmode))
2464 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2465 || (*pred) (opalign, VOIDmode)))
2468 rtx last = get_last_insn ();
2471 op1 = convert_to_mode (mode, size, 1);
2472 pred = insn_data[(int) code].operand[1].predicate;
2473 if (pred != 0 && ! (*pred) (op1, mode))
2474 op1 = copy_to_mode_reg (mode, op1);
2476 pat = GEN_FCN ((int) code) (object, op1, opalign);
2483 delete_insns_since (last);
2490 /* A subroutine of clear_storage. Expand a call to memset.
2491 Return the return value of memset, 0 otherwise. */
2494 clear_storage_via_libcall (rtx object, rtx size)
2496 tree call_expr, arg_list, fn, object_tree, size_tree;
2497 enum machine_mode size_mode;
2500 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2501 place those into new pseudos into a VAR_DECL and use them later. */
2503 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2505 size_mode = TYPE_MODE (sizetype);
2506 size = convert_to_mode (size_mode, size, 1);
2507 size = copy_to_mode_reg (size_mode, size);
2509 /* It is incorrect to use the libcall calling conventions to call
2510 memset in this context. This could be a user call to memset and
2511 the user may wish to examine the return value from memset. For
2512 targets where libcalls and normal calls have different conventions
2513 for returning pointers, we could end up generating incorrect code. */
2515 object_tree = make_tree (ptr_type_node, object);
2516 size_tree = make_tree (sizetype, size);
2518 fn = clear_storage_libcall_fn (true);
2519 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2520 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2521 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2523 /* Now we have to build up the CALL_EXPR itself. */
2524 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2525 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2526 call_expr, arg_list, NULL_TREE);
2528 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2533 /* A subroutine of clear_storage_via_libcall. Create the tree node
2534 for the function we use for block clears. The first time FOR_CALL
2535 is true, we call assemble_external. */
2537 static GTY(()) tree block_clear_fn;
2540 init_block_clear_fn (const char *asmspec)
2542 if (!block_clear_fn)
2546 fn = get_identifier ("memset");
2547 args = build_function_type_list (ptr_type_node, ptr_type_node,
2548 integer_type_node, sizetype,
2551 fn = build_decl (FUNCTION_DECL, fn, args);
2552 DECL_EXTERNAL (fn) = 1;
2553 TREE_PUBLIC (fn) = 1;
2554 DECL_ARTIFICIAL (fn) = 1;
2555 TREE_NOTHROW (fn) = 1;
2557 block_clear_fn = fn;
2561 set_user_assembler_name (block_clear_fn, asmspec);
2565 clear_storage_libcall_fn (int for_call)
2567 static bool emitted_extern;
2569 if (!block_clear_fn)
2570 init_block_clear_fn (NULL);
2572 if (for_call && !emitted_extern)
2574 emitted_extern = true;
2575 make_decl_rtl (block_clear_fn);
2576 assemble_external (block_clear_fn);
2579 return block_clear_fn;
2582 /* Write to one of the components of the complex value CPLX. Write VAL to
2583 the real part if IMAG_P is false, and the imaginary part if its true. */
2586 write_complex_part (rtx cplx, rtx val, bool imag_p)
2588 enum machine_mode cmode;
2589 enum machine_mode imode;
2592 if (GET_CODE (cplx) == CONCAT)
2594 emit_move_insn (XEXP (cplx, imag_p), val);
2598 cmode = GET_MODE (cplx);
2599 imode = GET_MODE_INNER (cmode);
2600 ibitsize = GET_MODE_BITSIZE (imode);
2602 /* If the sub-object is at least word sized, then we know that subregging
2603 will work. This special case is important, since store_bit_field
2604 wants to operate on integer modes, and there's rarely an OImode to
2605 correspond to TCmode. */
2606 if (ibitsize >= BITS_PER_WORD
2607 /* For hard regs we have exact predicates. Assume we can split
2608 the original object if it spans an even number of hard regs.
2609 This special case is important for SCmode on 64-bit platforms
2610 where the natural size of floating-point regs is 32-bit. */
2611 || (GET_CODE (cplx) == REG
2612 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2613 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2614 /* For MEMs we always try to make a "subreg", that is to adjust
2615 the MEM, because store_bit_field may generate overly
2616 convoluted RTL for sub-word fields. */
2619 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2620 imag_p ? GET_MODE_SIZE (imode) : 0);
2623 emit_move_insn (part, val);
2627 /* simplify_gen_subreg may fail for sub-word MEMs. */
2628 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2631 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2634 /* Extract one of the components of the complex value CPLX. Extract the
2635 real part if IMAG_P is false, and the imaginary part if it's true. */
2638 read_complex_part (rtx cplx, bool imag_p)
2640 enum machine_mode cmode, imode;
2643 if (GET_CODE (cplx) == CONCAT)
2644 return XEXP (cplx, imag_p);
2646 cmode = GET_MODE (cplx);
2647 imode = GET_MODE_INNER (cmode);
2648 ibitsize = GET_MODE_BITSIZE (imode);
2650 /* Special case reads from complex constants that got spilled to memory. */
2651 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2653 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2654 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2656 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2657 if (CONSTANT_CLASS_P (part))
2658 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2662 /* If the sub-object is at least word sized, then we know that subregging
2663 will work. This special case is important, since extract_bit_field
2664 wants to operate on integer modes, and there's rarely an OImode to
2665 correspond to TCmode. */
2666 if (ibitsize >= BITS_PER_WORD
2667 /* For hard regs we have exact predicates. Assume we can split
2668 the original object if it spans an even number of hard regs.
2669 This special case is important for SCmode on 64-bit platforms
2670 where the natural size of floating-point regs is 32-bit. */
2671 || (GET_CODE (cplx) == REG
2672 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2673 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0)
2674 /* For MEMs we always try to make a "subreg", that is to adjust
2675 the MEM, because extract_bit_field may generate overly
2676 convoluted RTL for sub-word fields. */
2679 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2680 imag_p ? GET_MODE_SIZE (imode) : 0);
2684 /* simplify_gen_subreg may fail for sub-word MEMs. */
2685 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2688 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2689 true, NULL_RTX, imode, imode);
2692 /* A subroutine of emit_move_via_alt_mode. Yet another lowpart generator.
2693 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2694 represented in NEW_MODE. */
2697 emit_move_change_mode (enum machine_mode new_mode,
2698 enum machine_mode old_mode, rtx x)
2702 if (reload_in_progress && MEM_P (x))
2704 /* We can't use gen_lowpart here because it may call change_address
2705 which is not appropriate if we were called when a reload was in
2706 progress. We don't have to worry about changing the address since
2707 the size in bytes is supposed to be the same. Copy the MEM to
2708 change the mode and move any substitutions from the old MEM to
2711 ret = adjust_address_nv (x, new_mode, 0);
2712 copy_replacements (x, ret);
2716 /* Note that we do want simplify_subreg's behaviour of validating
2717 that the new mode is ok for a hard register. If we were to use
2718 simplify_gen_subreg, we would create the subreg, but would
2719 probably run into the target not being able to implement it. */
2720 ret = simplify_subreg (new_mode, x, old_mode, 0);
2726 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2727 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2728 code for the move in ALT_MODE, and is known to be valid. Returns the
2729 instruction emitted, or NULL if X or Y cannot be represented in ALT_MODE. */
2732 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2733 enum insn_code code, rtx x, rtx y)
2735 x = emit_move_change_mode (alt_mode, mode, x);
2738 y = emit_move_change_mode (alt_mode, mode, y);
2741 return emit_insn (GEN_FCN (code) (x, y));
2744 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2745 an integer mode of the same size as MODE. Returns the instruction
2746 emitted, or NULL if such a move could not be generated. */
2749 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2751 enum machine_mode imode;
2752 enum insn_code code;
2754 /* There must exist a mode of the exact size we require. */
2755 imode = int_mode_for_mode (mode);
2756 if (imode == BLKmode)
2759 /* The target must support moves in this mode. */
2760 code = mov_optab->handlers[imode].insn_code;
2761 if (code == CODE_FOR_nothing)
2764 return emit_move_via_alt_mode (imode, mode, code, x, y);
2767 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2768 Return an equivalent MEM that does not use an auto-increment. */
2771 emit_move_resolve_push (enum machine_mode mode, rtx x)
2773 enum rtx_code code = GET_CODE (XEXP (x, 0));
2774 HOST_WIDE_INT adjust;
2777 adjust = GET_MODE_SIZE (mode);
2778 #ifdef PUSH_ROUNDING
2779 adjust = PUSH_ROUNDING (adjust);
2781 if (code == PRE_DEC || code == POST_DEC)
2784 /* Do not use anti_adjust_stack, since we don't want to update
2785 stack_pointer_delta. */
2786 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2787 GEN_INT (adjust), stack_pointer_rtx,
2788 0, OPTAB_LIB_WIDEN);
2789 if (temp != stack_pointer_rtx)
2790 emit_move_insn (stack_pointer_rtx, temp);
2796 temp = stack_pointer_rtx;
2799 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2802 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2808 return replace_equiv_address (x, temp);
2811 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2812 X is known to satisfy push_operand, and MODE is known to be complex.
2813 Returns the last instruction emitted. */
2816 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2818 enum machine_mode submode = GET_MODE_INNER (mode);
2821 #ifdef PUSH_ROUNDING
2822 unsigned int submodesize = GET_MODE_SIZE (submode);
2824 /* In case we output to the stack, but the size is smaller than the
2825 machine can push exactly, we need to use move instructions. */
2826 if (PUSH_ROUNDING (submodesize) != submodesize)
2828 x = emit_move_resolve_push (mode, x);
2829 return emit_move_insn (x, y);
2833 /* Note that the real part always precedes the imag part in memory
2834 regardless of machine's endianness. */
2835 switch (GET_CODE (XEXP (x, 0)))
2849 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2850 read_complex_part (y, imag_first));
2851 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2852 read_complex_part (y, !imag_first));
2855 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2856 MODE is known to be complex. Returns the last instruction emitted. */
2859 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2863 /* Need to take special care for pushes, to maintain proper ordering
2864 of the data, and possibly extra padding. */
2865 if (push_operand (x, mode))
2866 return emit_move_complex_push (mode, x, y);
2868 /* For memory to memory moves, optimial behaviour can be had with the
2869 existing block move logic. */
2870 if (MEM_P (x) && MEM_P (y))
2872 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2873 BLOCK_OP_NO_LIBCALL);
2874 return get_last_insn ();
2877 /* See if we can coerce the target into moving both values at once. */
2879 /* Not possible if the values are inherently not adjacent. */
2880 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2882 /* Is possible if both are registers (or subregs of registers). */
2883 else if (register_operand (x, mode) && register_operand (y, mode))
2885 /* If one of the operands is a memory, and alignment constraints
2886 are friendly enough, we may be able to do combined memory operations.
2887 We do not attempt this if Y is a constant because that combination is
2888 usually better with the by-parts thing below. */
2889 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2890 && (!STRICT_ALIGNMENT
2891 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2898 rtx ret = emit_move_via_integer (mode, x, y);
2903 /* Show the output dies here. This is necessary for SUBREGs
2904 of pseudos since we cannot track their lifetimes correctly;
2905 hard regs shouldn't appear here except as return values. */
2906 if (!reload_completed && !reload_in_progress
2907 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2908 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2910 write_complex_part (x, read_complex_part (y, false), false);
2911 write_complex_part (x, read_complex_part (y, true), true);
2912 return get_last_insn ();
2915 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2916 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2919 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2923 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2926 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2927 if (code != CODE_FOR_nothing)
2928 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2931 /* Otherwise, find the MODE_INT mode of the same width. */
2932 ret = emit_move_via_integer (mode, x, y);
2933 gcc_assert (ret != NULL);
2937 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2938 MODE is any multi-word or full-word mode that lacks a move_insn
2939 pattern. Note that you will get better code if you define such
2940 patterns, even if they must turn into multiple assembler instructions. */
2943 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2950 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2952 /* If X is a push on the stack, do the push now and replace
2953 X with a reference to the stack pointer. */
2954 if (push_operand (x, mode))
2955 x = emit_move_resolve_push (mode, x);
2957 /* If we are in reload, see if either operand is a MEM whose address
2958 is scheduled for replacement. */
2959 if (reload_in_progress && MEM_P (x)
2960 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2961 x = replace_equiv_address_nv (x, inner);
2962 if (reload_in_progress && MEM_P (y)
2963 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2964 y = replace_equiv_address_nv (y, inner);
2968 need_clobber = false;
2970 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2973 rtx xpart = operand_subword (x, i, 1, mode);
2974 rtx ypart = operand_subword (y, i, 1, mode);
2976 /* If we can't get a part of Y, put Y into memory if it is a
2977 constant. Otherwise, force it into a register. If we still
2978 can't get a part of Y, abort. */
2979 if (ypart == 0 && CONSTANT_P (y))
2981 y = force_const_mem (mode, y);
2982 ypart = operand_subword (y, i, 1, mode);
2984 else if (ypart == 0)
2985 ypart = operand_subword_force (y, i, mode);
2987 gcc_assert (xpart && ypart);
2989 need_clobber |= (GET_CODE (xpart) == SUBREG);
2991 last_insn = emit_move_insn (xpart, ypart);
2997 /* Show the output dies here. This is necessary for SUBREGs
2998 of pseudos since we cannot track their lifetimes correctly;
2999 hard regs shouldn't appear here except as return values.
3000 We never want to emit such a clobber after reload. */
3002 && ! (reload_in_progress || reload_completed)
3003 && need_clobber != 0)
3004 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3011 /* Low level part of emit_move_insn.
3012 Called just like emit_move_insn, but assumes X and Y
3013 are basically valid. */
3016 emit_move_insn_1 (rtx x, rtx y)
3018 enum machine_mode mode = GET_MODE (x);
3019 enum insn_code code;
3021 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3023 code = mov_optab->handlers[mode].insn_code;
3024 if (code != CODE_FOR_nothing)
3025 return emit_insn (GEN_FCN (code) (x, y));
3027 /* Expand complex moves by moving real part and imag part. */
3028 if (COMPLEX_MODE_P (mode))
3029 return emit_move_complex (mode, x, y);
3031 if (GET_MODE_CLASS (mode) == MODE_CC)
3032 return emit_move_ccmode (mode, x, y);
3034 /* Try using a move pattern for the corresponding integer mode. This is
3035 only safe when simplify_subreg can convert MODE constants into integer
3036 constants. At present, it can only do this reliably if the value
3037 fits within a HOST_WIDE_INT. */
3038 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3040 rtx ret = emit_move_via_integer (mode, x, y);
3045 return emit_move_multi_word (mode, x, y);
3048 /* Generate code to copy Y into X.
3049 Both Y and X must have the same mode, except that
3050 Y can be a constant with VOIDmode.
3051 This mode cannot be BLKmode; use emit_block_move for that.
3053 Return the last instruction emitted. */
3056 emit_move_insn (rtx x, rtx y)
3058 enum machine_mode mode = GET_MODE (x);
3059 rtx y_cst = NULL_RTX;
3062 gcc_assert (mode != BLKmode
3063 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3068 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3069 && (last_insn = compress_float_constant (x, y)))
3074 if (!LEGITIMATE_CONSTANT_P (y))
3076 y = force_const_mem (mode, y);
3078 /* If the target's cannot_force_const_mem prevented the spill,
3079 assume that the target's move expanders will also take care
3080 of the non-legitimate constant. */
3086 /* If X or Y are memory references, verify that their addresses are valid
3089 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3090 && ! push_operand (x, GET_MODE (x)))
3092 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3093 x = validize_mem (x);
3096 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3098 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3099 y = validize_mem (y);
3101 gcc_assert (mode != BLKmode);
3103 last_insn = emit_move_insn_1 (x, y);
3105 if (y_cst && REG_P (x)
3106 && (set = single_set (last_insn)) != NULL_RTX
3107 && SET_DEST (set) == x
3108 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3109 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3114 /* If Y is representable exactly in a narrower mode, and the target can
3115 perform the extension directly from constant or memory, then emit the
3116 move as an extension. */
3119 compress_float_constant (rtx x, rtx y)
3121 enum machine_mode dstmode = GET_MODE (x);
3122 enum machine_mode orig_srcmode = GET_MODE (y);
3123 enum machine_mode srcmode;
3126 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3128 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3129 srcmode != orig_srcmode;
3130 srcmode = GET_MODE_WIDER_MODE (srcmode))
3133 rtx trunc_y, last_insn;
3135 /* Skip if the target can't extend this way. */
3136 ic = can_extend_p (dstmode, srcmode, 0);
3137 if (ic == CODE_FOR_nothing)
3140 /* Skip if the narrowed value isn't exact. */
3141 if (! exact_real_truncate (srcmode, &r))
3144 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3146 if (LEGITIMATE_CONSTANT_P (trunc_y))
3148 /* Skip if the target needs extra instructions to perform
3150 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3153 else if (float_extend_from_mem[dstmode][srcmode])
3154 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3158 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3159 last_insn = get_last_insn ();
3162 set_unique_reg_note (last_insn, REG_EQUAL, y);
3170 /* Pushing data onto the stack. */
3172 /* Push a block of length SIZE (perhaps variable)
3173 and return an rtx to address the beginning of the block.
3174 The value may be virtual_outgoing_args_rtx.
3176 EXTRA is the number of bytes of padding to push in addition to SIZE.
3177 BELOW nonzero means this padding comes at low addresses;
3178 otherwise, the padding comes at high addresses. */
3181 push_block (rtx size, int extra, int below)
3185 size = convert_modes (Pmode, ptr_mode, size, 1);
3186 if (CONSTANT_P (size))
3187 anti_adjust_stack (plus_constant (size, extra));
3188 else if (REG_P (size) && extra == 0)
3189 anti_adjust_stack (size);
3192 temp = copy_to_mode_reg (Pmode, size);
3194 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3195 temp, 0, OPTAB_LIB_WIDEN);
3196 anti_adjust_stack (temp);
3199 #ifndef STACK_GROWS_DOWNWARD
3205 temp = virtual_outgoing_args_rtx;
3206 if (extra != 0 && below)
3207 temp = plus_constant (temp, extra);
3211 if (GET_CODE (size) == CONST_INT)
3212 temp = plus_constant (virtual_outgoing_args_rtx,
3213 -INTVAL (size) - (below ? 0 : extra));
3214 else if (extra != 0 && !below)
3215 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3216 negate_rtx (Pmode, plus_constant (size, extra)));
3218 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3219 negate_rtx (Pmode, size));
3222 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3225 #ifdef PUSH_ROUNDING
3227 /* Emit single push insn. */
3230 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3233 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3235 enum insn_code icode;
3236 insn_operand_predicate_fn pred;
3238 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3239 /* If there is push pattern, use it. Otherwise try old way of throwing
3240 MEM representing push operation to move expander. */
3241 icode = push_optab->handlers[(int) mode].insn_code;
3242 if (icode != CODE_FOR_nothing)
3244 if (((pred = insn_data[(int) icode].operand[0].predicate)
3245 && !((*pred) (x, mode))))
3246 x = force_reg (mode, x);
3247 emit_insn (GEN_FCN (icode) (x));
3250 if (GET_MODE_SIZE (mode) == rounded_size)
3251 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3252 /* If we are to pad downward, adjust the stack pointer first and
3253 then store X into the stack location using an offset. This is
3254 because emit_move_insn does not know how to pad; it does not have
3256 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3258 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3259 HOST_WIDE_INT offset;
3261 emit_move_insn (stack_pointer_rtx,
3262 expand_binop (Pmode,
3263 #ifdef STACK_GROWS_DOWNWARD
3269 GEN_INT (rounded_size),
3270 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3272 offset = (HOST_WIDE_INT) padding_size;
3273 #ifdef STACK_GROWS_DOWNWARD
3274 if (STACK_PUSH_CODE == POST_DEC)
3275 /* We have already decremented the stack pointer, so get the
3277 offset += (HOST_WIDE_INT) rounded_size;
3279 if (STACK_PUSH_CODE == POST_INC)
3280 /* We have already incremented the stack pointer, so get the
3282 offset -= (HOST_WIDE_INT) rounded_size;
3284 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3288 #ifdef STACK_GROWS_DOWNWARD
3289 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3290 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3291 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3293 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3294 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3295 GEN_INT (rounded_size));
3297 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3300 dest = gen_rtx_MEM (mode, dest_addr);
3304 set_mem_attributes (dest, type, 1);
3306 if (flag_optimize_sibling_calls)
3307 /* Function incoming arguments may overlap with sibling call
3308 outgoing arguments and we cannot allow reordering of reads
3309 from function arguments with stores to outgoing arguments
3310 of sibling calls. */
3311 set_mem_alias_set (dest, 0);
3313 emit_move_insn (dest, x);
3317 /* Generate code to push X onto the stack, assuming it has mode MODE and
3319 MODE is redundant except when X is a CONST_INT (since they don't
3321 SIZE is an rtx for the size of data to be copied (in bytes),
3322 needed only if X is BLKmode.
3324 ALIGN (in bits) is maximum alignment we can assume.
3326 If PARTIAL and REG are both nonzero, then copy that many of the first
3327 words of X into registers starting with REG, and push the rest of X.
3328 The amount of space pushed is decreased by PARTIAL words,
3329 rounded *down* to a multiple of PARM_BOUNDARY.
3330 REG must be a hard register in this case.
3331 If REG is zero but PARTIAL is not, take any all others actions for an
3332 argument partially in registers, but do not actually load any
3335 EXTRA is the amount in bytes of extra space to leave next to this arg.
3336 This is ignored if an argument block has already been allocated.
3338 On a machine that lacks real push insns, ARGS_ADDR is the address of
3339 the bottom of the argument block for this call. We use indexing off there
3340 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3341 argument block has not been preallocated.
3343 ARGS_SO_FAR is the size of args previously pushed for this call.
3345 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3346 for arguments passed in registers. If nonzero, it will be the number
3347 of bytes required. */
3350 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3351 unsigned int align, int partial, rtx reg, int extra,
3352 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3356 enum direction stack_direction
3357 #ifdef STACK_GROWS_DOWNWARD
3363 /* Decide where to pad the argument: `downward' for below,
3364 `upward' for above, or `none' for don't pad it.
3365 Default is below for small data on big-endian machines; else above. */
3366 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3368 /* Invert direction if stack is post-decrement.
3370 if (STACK_PUSH_CODE == POST_DEC)
3371 if (where_pad != none)
3372 where_pad = (where_pad == downward ? upward : downward);
3376 if (mode == BLKmode)
3378 /* Copy a block into the stack, entirely or partially. */
3381 int used = partial * UNITS_PER_WORD;
3385 if (reg && GET_CODE (reg) == PARALLEL)
3387 /* Use the size of the elt to compute offset. */
3388 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3389 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3390 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3393 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3399 /* USED is now the # of bytes we need not copy to the stack
3400 because registers will take care of them. */
3403 xinner = adjust_address (xinner, BLKmode, used);
3405 /* If the partial register-part of the arg counts in its stack size,
3406 skip the part of stack space corresponding to the registers.
3407 Otherwise, start copying to the beginning of the stack space,
3408 by setting SKIP to 0. */
3409 skip = (reg_parm_stack_space == 0) ? 0 : used;
3411 #ifdef PUSH_ROUNDING
3412 /* Do it with several push insns if that doesn't take lots of insns
3413 and if there is no difficulty with push insns that skip bytes
3414 on the stack for alignment purposes. */
3417 && GET_CODE (size) == CONST_INT
3419 && MEM_ALIGN (xinner) >= align
3420 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3421 /* Here we avoid the case of a structure whose weak alignment
3422 forces many pushes of a small amount of data,
3423 and such small pushes do rounding that causes trouble. */
3424 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3425 || align >= BIGGEST_ALIGNMENT
3426 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3427 == (align / BITS_PER_UNIT)))
3428 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3430 /* Push padding now if padding above and stack grows down,
3431 or if padding below and stack grows up.
3432 But if space already allocated, this has already been done. */
3433 if (extra && args_addr == 0
3434 && where_pad != none && where_pad != stack_direction)
3435 anti_adjust_stack (GEN_INT (extra));
3437 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3440 #endif /* PUSH_ROUNDING */
3444 /* Otherwise make space on the stack and copy the data
3445 to the address of that space. */
3447 /* Deduct words put into registers from the size we must copy. */
3450 if (GET_CODE (size) == CONST_INT)
3451 size = GEN_INT (INTVAL (size) - used);
3453 size = expand_binop (GET_MODE (size), sub_optab, size,
3454 GEN_INT (used), NULL_RTX, 0,
3458 /* Get the address of the stack space.
3459 In this case, we do not deal with EXTRA separately.
3460 A single stack adjust will do. */
3463 temp = push_block (size, extra, where_pad == downward);
3466 else if (GET_CODE (args_so_far) == CONST_INT)
3467 temp = memory_address (BLKmode,
3468 plus_constant (args_addr,
3469 skip + INTVAL (args_so_far)));
3471 temp = memory_address (BLKmode,
3472 plus_constant (gen_rtx_PLUS (Pmode,
3477 if (!ACCUMULATE_OUTGOING_ARGS)
3479 /* If the source is referenced relative to the stack pointer,
3480 copy it to another register to stabilize it. We do not need
3481 to do this if we know that we won't be changing sp. */
3483 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3484 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3485 temp = copy_to_reg (temp);
3488 target = gen_rtx_MEM (BLKmode, temp);
3490 /* We do *not* set_mem_attributes here, because incoming arguments
3491 may overlap with sibling call outgoing arguments and we cannot
3492 allow reordering of reads from function arguments with stores
3493 to outgoing arguments of sibling calls. We do, however, want
3494 to record the alignment of the stack slot. */
3495 /* ALIGN may well be better aligned than TYPE, e.g. due to
3496 PARM_BOUNDARY. Assume the caller isn't lying. */
3497 set_mem_align (target, align);
3499 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3502 else if (partial > 0)
3504 /* Scalar partly in registers. */
3506 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3509 /* # words of start of argument
3510 that we must make space for but need not store. */
3511 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3512 int args_offset = INTVAL (args_so_far);
3515 /* Push padding now if padding above and stack grows down,
3516 or if padding below and stack grows up.
3517 But if space already allocated, this has already been done. */
3518 if (extra && args_addr == 0
3519 && where_pad != none && where_pad != stack_direction)
3520 anti_adjust_stack (GEN_INT (extra));
3522 /* If we make space by pushing it, we might as well push
3523 the real data. Otherwise, we can leave OFFSET nonzero
3524 and leave the space uninitialized. */
3528 /* Now NOT_STACK gets the number of words that we don't need to
3529 allocate on the stack. */
3530 not_stack = partial - offset;
3532 /* If the partial register-part of the arg counts in its stack size,
3533 skip the part of stack space corresponding to the registers.
3534 Otherwise, start copying to the beginning of the stack space,
3535 by setting SKIP to 0. */
3536 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3538 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3539 x = validize_mem (force_const_mem (mode, x));
3541 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3542 SUBREGs of such registers are not allowed. */
3543 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3544 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3545 x = copy_to_reg (x);
3547 /* Loop over all the words allocated on the stack for this arg. */
3548 /* We can do it by words, because any scalar bigger than a word
3549 has a size a multiple of a word. */
3550 #ifndef PUSH_ARGS_REVERSED
3551 for (i = not_stack; i < size; i++)
3553 for (i = size - 1; i >= not_stack; i--)
3555 if (i >= not_stack + offset)
3556 emit_push_insn (operand_subword_force (x, i, mode),
3557 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3559 GEN_INT (args_offset + ((i - not_stack + skip)
3561 reg_parm_stack_space, alignment_pad);
3568 /* Push padding now if padding above and stack grows down,
3569 or if padding below and stack grows up.
3570 But if space already allocated, this has already been done. */
3571 if (extra && args_addr == 0
3572 && where_pad != none && where_pad != stack_direction)
3573 anti_adjust_stack (GEN_INT (extra));
3575 #ifdef PUSH_ROUNDING
3576 if (args_addr == 0 && PUSH_ARGS)
3577 emit_single_push_insn (mode, x, type);
3581 if (GET_CODE (args_so_far) == CONST_INT)
3583 = memory_address (mode,
3584 plus_constant (args_addr,
3585 INTVAL (args_so_far)));
3587 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3589 dest = gen_rtx_MEM (mode, addr);
3591 /* We do *not* set_mem_attributes here, because incoming arguments
3592 may overlap with sibling call outgoing arguments and we cannot
3593 allow reordering of reads from function arguments with stores
3594 to outgoing arguments of sibling calls. We do, however, want
3595 to record the alignment of the stack slot. */
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (dest, align);
3600 emit_move_insn (dest, x);
3604 /* If part should go in registers, copy that part
3605 into the appropriate registers. Do this now, at the end,
3606 since mem-to-mem copies above may do function calls. */
3607 if (partial > 0 && reg != 0)
3609 /* Handle calls that pass values in multiple non-contiguous locations.
3610 The Irix 6 ABI has examples of this. */
3611 if (GET_CODE (reg) == PARALLEL)
3612 emit_group_load (reg, x, type, -1);
3614 move_block_to_reg (REGNO (reg), x, partial, mode);
3617 if (extra && args_addr == 0 && where_pad == stack_direction)
3618 anti_adjust_stack (GEN_INT (extra));
3620 if (alignment_pad && args_addr == 0)
3621 anti_adjust_stack (alignment_pad);
3624 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3628 get_subtarget (rtx x)
3632 /* Only registers can be subtargets. */
3634 /* Don't use hard regs to avoid extending their life. */
3635 || REGNO (x) < FIRST_PSEUDO_REGISTER
3639 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3640 FIELD is a bitfield. Returns true if the optimization was successful,
3641 and there's nothing else to do. */
3644 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3645 unsigned HOST_WIDE_INT bitpos,
3646 enum machine_mode mode1, rtx str_rtx,
3649 enum machine_mode str_mode = GET_MODE (str_rtx);
3650 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3655 if (mode1 != VOIDmode
3656 || bitsize >= BITS_PER_WORD
3657 || str_bitsize > BITS_PER_WORD
3658 || TREE_SIDE_EFFECTS (to)
3659 || TREE_THIS_VOLATILE (to))
3663 if (!BINARY_CLASS_P (src)
3664 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3667 op0 = TREE_OPERAND (src, 0);
3668 op1 = TREE_OPERAND (src, 1);
3671 if (!operand_equal_p (to, op0, 0))
3674 if (MEM_P (str_rtx))
3676 unsigned HOST_WIDE_INT offset1;
3678 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3679 str_mode = word_mode;
3680 str_mode = get_best_mode (bitsize, bitpos,
3681 MEM_ALIGN (str_rtx), str_mode, 0);
3682 if (str_mode == VOIDmode)
3684 str_bitsize = GET_MODE_BITSIZE (str_mode);
3687 bitpos %= str_bitsize;
3688 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3689 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3691 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3694 /* If the bit field covers the whole REG/MEM, store_field
3695 will likely generate better code. */
3696 if (bitsize >= str_bitsize)
3699 /* We can't handle fields split across multiple entities. */
3700 if (bitpos + bitsize > str_bitsize)
3703 if (BYTES_BIG_ENDIAN)
3704 bitpos = str_bitsize - bitpos - bitsize;
3706 switch (TREE_CODE (src))
3710 /* For now, just optimize the case of the topmost bitfield
3711 where we don't need to do any masking and also
3712 1 bit bitfields where xor can be used.
3713 We might win by one instruction for the other bitfields
3714 too if insv/extv instructions aren't used, so that
3715 can be added later. */
3716 if (bitpos + bitsize != str_bitsize
3717 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3720 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3721 value = convert_modes (str_mode,
3722 TYPE_MODE (TREE_TYPE (op1)), value,
3723 TYPE_UNSIGNED (TREE_TYPE (op1)));
3725 /* We may be accessing data outside the field, which means
3726 we can alias adjacent data. */
3727 if (MEM_P (str_rtx))
3729 str_rtx = shallow_copy_rtx (str_rtx);
3730 set_mem_alias_set (str_rtx, 0);
3731 set_mem_expr (str_rtx, 0);
3734 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3735 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3737 value = expand_and (str_mode, value, const1_rtx, NULL);
3740 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3741 build_int_cst (NULL_TREE, bitpos),
3743 result = expand_binop (str_mode, binop, str_rtx,
3744 value, str_rtx, 1, OPTAB_WIDEN);
3745 if (result != str_rtx)
3746 emit_move_insn (str_rtx, result);
3757 /* Expand an assignment that stores the value of FROM into TO. */
3760 expand_assignment (tree to, tree from)
3765 /* Don't crash if the lhs of the assignment was erroneous. */
3767 if (TREE_CODE (to) == ERROR_MARK)
3769 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3773 /* Assignment of a structure component needs special treatment
3774 if the structure component's rtx is not simply a MEM.
3775 Assignment of an array element at a constant index, and assignment of
3776 an array element in an unaligned packed structure field, has the same
3778 if (handled_component_p (to)
3779 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3781 enum machine_mode mode1;
3782 HOST_WIDE_INT bitsize, bitpos;
3790 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3791 &unsignedp, &volatilep);
3793 /* If we are going to use store_bit_field and extract_bit_field,
3794 make sure to_rtx will be safe for multiple use. */
3796 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3800 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3802 gcc_assert (MEM_P (to_rtx));
3804 #ifdef POINTERS_EXTEND_UNSIGNED
3805 if (GET_MODE (offset_rtx) != Pmode)
3806 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3808 if (GET_MODE (offset_rtx) != ptr_mode)
3809 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3812 /* A constant address in TO_RTX can have VOIDmode, we must not try
3813 to call force_reg for that case. Avoid that case. */
3815 && GET_MODE (to_rtx) == BLKmode
3816 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3818 && (bitpos % bitsize) == 0
3819 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3820 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3822 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3826 to_rtx = offset_address (to_rtx, offset_rtx,
3827 highest_pow2_factor_for_target (to,
3831 /* Handle expand_expr of a complex value returning a CONCAT. */
3832 if (GET_CODE (to_rtx) == CONCAT)
3834 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3836 gcc_assert (bitpos == 0);
3837 result = store_expr (from, to_rtx, false);
3841 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3842 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3849 /* If the field is at offset zero, we could have been given the
3850 DECL_RTX of the parent struct. Don't munge it. */
3851 to_rtx = shallow_copy_rtx (to_rtx);
3853 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3855 /* Deal with volatile and readonly fields. The former is only
3856 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3858 MEM_VOLATILE_P (to_rtx) = 1;
3859 if (component_uses_parent_alias_set (to))
3860 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3863 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3868 TREE_TYPE (tem), get_alias_set (to));
3872 preserve_temp_slots (result);
3878 /* If the rhs is a function call and its value is not an aggregate,
3879 call the function before we start to compute the lhs.
3880 This is needed for correct code for cases such as
3881 val = setjmp (buf) on machines where reference to val
3882 requires loading up part of an address in a separate insn.
3884 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3885 since it might be a promoted variable where the zero- or sign- extension
3886 needs to be done. Handling this in the normal way is safe because no
3887 computation is done before the call. */
3888 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3889 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3890 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3891 && REG_P (DECL_RTL (to))))
3896 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3898 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3900 /* Handle calls that return values in multiple non-contiguous locations.
3901 The Irix 6 ABI has examples of this. */
3902 if (GET_CODE (to_rtx) == PARALLEL)
3903 emit_group_load (to_rtx, value, TREE_TYPE (from),
3904 int_size_in_bytes (TREE_TYPE (from)));
3905 else if (GET_MODE (to_rtx) == BLKmode)
3906 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3909 if (POINTER_TYPE_P (TREE_TYPE (to)))
3910 value = convert_memory_address (GET_MODE (to_rtx), value);
3911 emit_move_insn (to_rtx, value);
3913 preserve_temp_slots (to_rtx);
3919 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3920 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3923 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3936 int_size_in_bytes (TREE_TYPE (from)));
3938 emit_move_insn (to_rtx, temp);
3940 preserve_temp_slots (to_rtx);
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3959 emit_library_call (memmove_libfunc, LCT_NORMAL,
3960 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3961 XEXP (from_rtx, 0), Pmode,
3962 convert_to_mode (TYPE_MODE (sizetype),
3963 size, TYPE_UNSIGNED (sizetype)),
3964 TYPE_MODE (sizetype));
3966 preserve_temp_slots (to_rtx);
3972 /* Compute FROM and store the value in the rtx we got. */
3975 result = store_expr (from, to_rtx, 0);
3976 preserve_temp_slots (result);
3982 /* Generate code for computing expression EXP,
3983 and storing the value into TARGET.
3985 If the mode is BLKmode then we may return TARGET itself.
3986 It turns out that in BLKmode it doesn't cause a problem.
3987 because C has no operators that could combine two different
3988 assignments into the same BLKmode object with different values
3989 with no sequence point. Will other languages need this to
3992 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3993 stack, and block moves may need to be treated specially. */
3996 store_expr (tree exp, rtx target, int call_param_p)
3999 rtx alt_rtl = NULL_RTX;
4000 int dont_return_target = 0;
4002 if (VOID_TYPE_P (TREE_TYPE (exp)))
4004 /* C++ can generate ?: expressions with a throw expression in one
4005 branch and an rvalue in the other. Here, we resolve attempts to
4006 store the throw expression's nonexistent result. */
4007 gcc_assert (!call_param_p);
4008 expand_expr (exp, const0_rtx, VOIDmode, 0);
4011 if (TREE_CODE (exp) == COMPOUND_EXPR)
4013 /* Perform first part of compound expression, then assign from second
4015 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4016 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4017 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4019 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4021 /* For conditional expression, get safe form of the target. Then
4022 test the condition, doing the appropriate assignment on either
4023 side. This avoids the creation of unnecessary temporaries.
4024 For non-BLKmode, it is more efficient not to do this. */
4026 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4028 do_pending_stack_adjust ();
4030 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4031 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
4032 emit_jump_insn (gen_jump (lab2));
4035 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4041 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4042 /* If this is a scalar in a register that is stored in a wider mode
4043 than the declared mode, compute the result into its declared mode
4044 and then convert to the wider mode. Our value is the computed
4047 rtx inner_target = 0;
4049 /* We can do the conversion inside EXP, which will often result
4050 in some optimizations. Do the conversion in two steps: first
4051 change the signedness, if needed, then the extend. But don't
4052 do this if the type of EXP is a subtype of something else
4053 since then the conversion might involve more than just
4054 converting modes. */
4055 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4056 && TREE_TYPE (TREE_TYPE (exp)) == 0
4057 && (!lang_hooks.reduce_bit_field_operations
4058 || (GET_MODE_PRECISION (GET_MODE (target))
4059 == TYPE_PRECISION (TREE_TYPE (exp)))))
4061 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4062 != SUBREG_PROMOTED_UNSIGNED_P (target))
4064 (lang_hooks.types.signed_or_unsigned_type
4065 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4067 exp = convert (lang_hooks.types.type_for_mode
4068 (GET_MODE (SUBREG_REG (target)),
4069 SUBREG_PROMOTED_UNSIGNED_P (target)),
4072 inner_target = SUBREG_REG (target);
4075 temp = expand_expr (exp, inner_target, VOIDmode,
4076 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4078 /* If TEMP is a VOIDmode constant, use convert_modes to make
4079 sure that we properly convert it. */
4080 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4082 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4083 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4084 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4085 GET_MODE (target), temp,
4086 SUBREG_PROMOTED_UNSIGNED_P (target));
4089 convert_move (SUBREG_REG (target), temp,
4090 SUBREG_PROMOTED_UNSIGNED_P (target));
4096 temp = expand_expr_real (exp, target, GET_MODE (target),
4098 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4100 /* Return TARGET if it's a specified hardware register.
4101 If TARGET is a volatile mem ref, either return TARGET
4102 or return a reg copied *from* TARGET; ANSI requires this.
4104 Otherwise, if TEMP is not TARGET, return TEMP
4105 if it is constant (for efficiency),
4106 or if we really want the correct value. */
4107 if (!(target && REG_P (target)
4108 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4109 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4110 && ! rtx_equal_p (temp, target)
4111 && CONSTANT_P (temp))
4112 dont_return_target = 1;
4115 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4116 the same as that of TARGET, adjust the constant. This is needed, for
4117 example, in case it is a CONST_DOUBLE and we want only a word-sized
4119 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4120 && TREE_CODE (exp) != ERROR_MARK
4121 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4122 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4123 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4125 /* If value was not generated in the target, store it there.
4126 Convert the value to TARGET's type first if necessary and emit the
4127 pending incrementations that have been queued when expanding EXP.
4128 Note that we cannot emit the whole queue blindly because this will
4129 effectively disable the POST_INC optimization later.
4131 If TEMP and TARGET compare equal according to rtx_equal_p, but
4132 one or both of them are volatile memory refs, we have to distinguish
4134 - expand_expr has used TARGET. In this case, we must not generate
4135 another copy. This can be detected by TARGET being equal according
4137 - expand_expr has not used TARGET - that means that the source just
4138 happens to have the same RTX form. Since temp will have been created
4139 by expand_expr, it will compare unequal according to == .
4140 We must generate a copy in this case, to reach the correct number
4141 of volatile memory references. */
4143 if ((! rtx_equal_p (temp, target)
4144 || (temp != target && (side_effects_p (temp)
4145 || side_effects_p (target))))
4146 && TREE_CODE (exp) != ERROR_MARK
4147 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4148 but TARGET is not valid memory reference, TEMP will differ
4149 from TARGET although it is really the same location. */
4150 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4151 /* If there's nothing to copy, don't bother. Don't call expr_size
4152 unless necessary, because some front-ends (C++) expr_size-hook
4153 aborts on objects that are not supposed to be bit-copied or
4155 && expr_size (exp) != const0_rtx)
4157 if (GET_MODE (temp) != GET_MODE (target)
4158 && GET_MODE (temp) != VOIDmode)
4160 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4161 if (dont_return_target)
4163 /* In this case, we will return TEMP,
4164 so make sure it has the proper mode.
4165 But don't forget to store the value into TARGET. */
4166 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4167 emit_move_insn (target, temp);
4170 convert_move (target, temp, unsignedp);
4173 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4175 /* Handle copying a string constant into an array. The string
4176 constant may be shorter than the array. So copy just the string's
4177 actual length, and clear the rest. First get the size of the data
4178 type of the string, which is actually the size of the target. */
4179 rtx size = expr_size (exp);
4181 if (GET_CODE (size) == CONST_INT
4182 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4183 emit_block_move (target, temp, size,
4185 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4188 /* Compute the size of the data to copy from the string. */
4190 = size_binop (MIN_EXPR,
4191 make_tree (sizetype, size),
4192 size_int (TREE_STRING_LENGTH (exp)));
4194 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4196 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4199 /* Copy that much. */
4200 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4201 TYPE_UNSIGNED (sizetype));
4202 emit_block_move (target, temp, copy_size_rtx,
4204 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4206 /* Figure out how much is left in TARGET that we have to clear.
4207 Do all calculations in ptr_mode. */
4208 if (GET_CODE (copy_size_rtx) == CONST_INT)
4210 size = plus_constant (size, -INTVAL (copy_size_rtx));
4211 target = adjust_address (target, BLKmode,
4212 INTVAL (copy_size_rtx));
4216 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4217 copy_size_rtx, NULL_RTX, 0,
4220 #ifdef POINTERS_EXTEND_UNSIGNED
4221 if (GET_MODE (copy_size_rtx) != Pmode)
4222 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4223 TYPE_UNSIGNED (sizetype));
4226 target = offset_address (target, copy_size_rtx,
4227 highest_pow2_factor (copy_size));
4228 label = gen_label_rtx ();
4229 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4230 GET_MODE (size), 0, label);
4233 if (size != const0_rtx)
4234 clear_storage (target, size);
4240 /* Handle calls that return values in multiple non-contiguous locations.
4241 The Irix 6 ABI has examples of this. */
4242 else if (GET_CODE (target) == PARALLEL)
4243 emit_group_load (target, temp, TREE_TYPE (exp),
4244 int_size_in_bytes (TREE_TYPE (exp)));
4245 else if (GET_MODE (temp) == BLKmode)
4246 emit_block_move (target, temp, expr_size (exp),
4248 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4251 temp = force_operand (temp, target);
4253 emit_move_insn (target, temp);
4260 /* Examine CTOR to discover:
4261 * how many scalar fields are set to nonzero values,
4262 and place it in *P_NZ_ELTS;
4263 * how many scalar fields are set to non-constant values,
4264 and place it in *P_NC_ELTS; and
4265 * how many scalar fields in total are in CTOR,
4266 and place it in *P_ELT_COUNT. */
4269 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4270 HOST_WIDE_INT *p_nc_elts,
4271 HOST_WIDE_INT *p_elt_count)
4273 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4280 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4282 tree value = TREE_VALUE (list);
4283 tree purpose = TREE_PURPOSE (list);
4287 if (TREE_CODE (purpose) == RANGE_EXPR)
4289 tree lo_index = TREE_OPERAND (purpose, 0);
4290 tree hi_index = TREE_OPERAND (purpose, 1);
4292 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4293 mult = (tree_low_cst (hi_index, 1)
4294 - tree_low_cst (lo_index, 1) + 1);
4297 switch (TREE_CODE (value))
4301 HOST_WIDE_INT nz = 0, nc = 0, count = 0;
4302 categorize_ctor_elements_1 (value, &nz, &nc, &count);
4303 nz_elts += mult * nz;
4304 nc_elts += mult * nc;
4305 elt_count += mult * count;
4311 if (!initializer_zerop (value))
4317 nz_elts += mult * TREE_STRING_LENGTH (value);
4318 elt_count += mult * TREE_STRING_LENGTH (value);
4322 if (!initializer_zerop (TREE_REALPART (value)))
4324 if (!initializer_zerop (TREE_IMAGPART (value)))
4332 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4334 if (!initializer_zerop (TREE_VALUE (v)))
4344 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4350 *p_nz_elts += nz_elts;
4351 *p_nc_elts += nc_elts;
4352 *p_elt_count += elt_count;
4356 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4357 HOST_WIDE_INT *p_nc_elts,
4358 HOST_WIDE_INT *p_elt_count)
4363 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count);
4366 /* Count the number of scalars in TYPE. Return -1 on overflow or
4370 count_type_elements (tree type)
4372 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4373 switch (TREE_CODE (type))
4377 tree telts = array_type_nelts (type);
4378 if (telts && host_integerp (telts, 1))
4380 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4381 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4384 else if (max / n > m)
4392 HOST_WIDE_INT n = 0, t;
4395 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4396 if (TREE_CODE (f) == FIELD_DECL)
4398 t = count_type_elements (TREE_TYPE (f));
4408 case QUAL_UNION_TYPE:
4410 /* Ho hum. How in the world do we guess here? Clearly it isn't
4411 right to count the fields. Guess based on the number of words. */
4412 HOST_WIDE_INT n = int_size_in_bytes (type);
4415 return n / UNITS_PER_WORD;
4422 return TYPE_VECTOR_SUBPARTS (type);
4431 case REFERENCE_TYPE:
4444 /* Return 1 if EXP contains mostly (3/4) zeros. */
4447 mostly_zeros_p (tree exp)
4449 if (TREE_CODE (exp) == CONSTRUCTOR)
4452 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4454 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count);
4455 elts = count_type_elements (TREE_TYPE (exp));
4457 return nz_elts < elts / 4;
4460 return initializer_zerop (exp);
4463 /* Helper function for store_constructor.
4464 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4465 TYPE is the type of the CONSTRUCTOR, not the element type.
4466 CLEARED is as for store_constructor.
4467 ALIAS_SET is the alias set to use for any stores.
4469 This provides a recursive shortcut back to store_constructor when it isn't
4470 necessary to go through store_field. This is so that we can pass through
4471 the cleared field to let store_constructor know that we may not have to
4472 clear a substructure if the outer structure has already been cleared. */
4475 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4476 HOST_WIDE_INT bitpos, enum machine_mode mode,
4477 tree exp, tree type, int cleared, int alias_set)
4479 if (TREE_CODE (exp) == CONSTRUCTOR
4480 /* We can only call store_constructor recursively if the size and
4481 bit position are on a byte boundary. */
4482 && bitpos % BITS_PER_UNIT == 0
4483 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4484 /* If we have a nonzero bitpos for a register target, then we just
4485 let store_field do the bitfield handling. This is unlikely to
4486 generate unnecessary clear instructions anyways. */
4487 && (bitpos == 0 || MEM_P (target)))
4491 = adjust_address (target,
4492 GET_MODE (target) == BLKmode
4494 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4495 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4498 /* Update the alias set, if required. */
4499 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4500 && MEM_ALIAS_SET (target) != 0)
4502 target = copy_rtx (target);
4503 set_mem_alias_set (target, alias_set);
4506 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4509 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4512 /* Store the value of constructor EXP into the rtx TARGET.
4513 TARGET is either a REG or a MEM; we know it cannot conflict, since
4514 safe_from_p has been called.
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
4521 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4528 switch (TREE_CODE (type))
4532 case QUAL_UNION_TYPE:
4536 /* If size is zero or the target is already cleared, do nothing. */
4537 if (size == 0 || cleared)
4539 /* We either clear the aggregate or indicate the value is dead. */
4540 else if ((TREE_CODE (type) == UNION_TYPE
4541 || TREE_CODE (type) == QUAL_UNION_TYPE)
4542 && ! CONSTRUCTOR_ELTS (exp))
4543 /* If the constructor is empty, clear the union. */
4545 clear_storage (target, expr_size (exp));
4549 /* If we are building a static constructor into a register,
4550 set the initial value as zero so we can fold the value into
4551 a constant. But if more than one register is involved,
4552 this probably loses. */
4553 else if (REG_P (target) && TREE_STATIC (exp)
4554 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4556 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4560 /* If the constructor has fewer fields than the structure or
4561 if we are initializing the structure to mostly zeros, clear
4562 the whole structure first. Don't do this if TARGET is a
4563 register whose mode size isn't equal to SIZE since
4564 clear_storage can't handle this case. */
4566 && ((list_length (CONSTRUCTOR_ELTS (exp))
4567 != fields_length (type))
4568 || mostly_zeros_p (exp))
4570 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4573 clear_storage (target, GEN_INT (size));
4578 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4580 /* Store each element of the constructor into the
4581 corresponding field of TARGET. */
4583 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4585 tree field = TREE_PURPOSE (elt);
4586 tree value = TREE_VALUE (elt);
4587 enum machine_mode mode;
4588 HOST_WIDE_INT bitsize;
4589 HOST_WIDE_INT bitpos = 0;
4591 rtx to_rtx = target;
4593 /* Just ignore missing fields. We cleared the whole
4594 structure, above, if any fields are missing. */
4598 if (cleared && initializer_zerop (value))
4601 if (host_integerp (DECL_SIZE (field), 1))
4602 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4606 mode = DECL_MODE (field);
4607 if (DECL_BIT_FIELD (field))
4610 offset = DECL_FIELD_OFFSET (field);
4611 if (host_integerp (offset, 0)
4612 && host_integerp (bit_position (field), 0))
4614 bitpos = int_bit_position (field);
4618 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4625 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4626 make_tree (TREE_TYPE (exp),
4629 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4630 gcc_assert (MEM_P (to_rtx));
4632 #ifdef POINTERS_EXTEND_UNSIGNED
4633 if (GET_MODE (offset_rtx) != Pmode)
4634 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4636 if (GET_MODE (offset_rtx) != ptr_mode)
4637 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4640 to_rtx = offset_address (to_rtx, offset_rtx,
4641 highest_pow2_factor (offset));
4644 #ifdef WORD_REGISTER_OPERATIONS
4645 /* If this initializes a field that is smaller than a
4646 word, at the start of a word, try to widen it to a full
4647 word. This special case allows us to output C++ member
4648 function initializations in a form that the optimizers
4651 && bitsize < BITS_PER_WORD
4652 && bitpos % BITS_PER_WORD == 0
4653 && GET_MODE_CLASS (mode) == MODE_INT
4654 && TREE_CODE (value) == INTEGER_CST
4656 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4658 tree type = TREE_TYPE (value);
4660 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4662 type = lang_hooks.types.type_for_size
4663 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4664 value = convert (type, value);
4667 if (BYTES_BIG_ENDIAN)
4669 = fold (build2 (LSHIFT_EXPR, type, value,
4670 build_int_cst (NULL_TREE,
4671 BITS_PER_WORD - bitsize)));
4672 bitsize = BITS_PER_WORD;
4677 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4678 && DECL_NONADDRESSABLE_P (field))
4680 to_rtx = copy_rtx (to_rtx);
4681 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4684 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4685 value, type, cleared,
4686 get_alias_set (TREE_TYPE (field)));
4696 tree elttype = TREE_TYPE (type);
4698 HOST_WIDE_INT minelt = 0;
4699 HOST_WIDE_INT maxelt = 0;
4701 domain = TYPE_DOMAIN (type);
4702 const_bounds_p = (TYPE_MIN_VALUE (domain)
4703 && TYPE_MAX_VALUE (domain)
4704 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4705 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4707 /* If we have constant bounds for the range of the type, get them. */
4710 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4711 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4714 /* If the constructor has fewer elements than the array, clear
4715 the whole array first. Similarly if this is static
4716 constructor of a non-BLKmode object. */
4719 else if (REG_P (target) && TREE_STATIC (exp))
4723 HOST_WIDE_INT count = 0, zero_count = 0;
4724 need_to_clear = ! const_bounds_p;
4726 /* This loop is a more accurate version of the loop in
4727 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4728 is also needed to check for missing elements. */
4729 for (elt = CONSTRUCTOR_ELTS (exp);
4730 elt != NULL_TREE && ! need_to_clear;
4731 elt = TREE_CHAIN (elt))
4733 tree index = TREE_PURPOSE (elt);
4734 HOST_WIDE_INT this_node_count;
4736 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4738 tree lo_index = TREE_OPERAND (index, 0);
4739 tree hi_index = TREE_OPERAND (index, 1);
4741 if (! host_integerp (lo_index, 1)
4742 || ! host_integerp (hi_index, 1))
4748 this_node_count = (tree_low_cst (hi_index, 1)
4749 - tree_low_cst (lo_index, 1) + 1);
4752 this_node_count = 1;
4754 count += this_node_count;
4755 if (mostly_zeros_p (TREE_VALUE (elt)))
4756 zero_count += this_node_count;
4759 /* Clear the entire array first if there are any missing
4760 elements, or if the incidence of zero elements is >=
4763 && (count < maxelt - minelt + 1
4764 || 4 * zero_count >= 3 * count))
4768 if (need_to_clear && size > 0)
4771 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4773 clear_storage (target, GEN_INT (size));
4777 if (!cleared && REG_P (target))
4778 /* Inform later passes that the old value is dead. */
4779 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4781 /* Store each element of the constructor into the
4782 corresponding element of TARGET, determined by counting the
4784 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4786 elt = TREE_CHAIN (elt), i++)
4788 enum machine_mode mode;
4789 HOST_WIDE_INT bitsize;
4790 HOST_WIDE_INT bitpos;
4792 tree value = TREE_VALUE (elt);
4793 tree index = TREE_PURPOSE (elt);
4794 rtx xtarget = target;
4796 if (cleared && initializer_zerop (value))
4799 unsignedp = TYPE_UNSIGNED (elttype);
4800 mode = TYPE_MODE (elttype);
4801 if (mode == BLKmode)
4802 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4803 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4806 bitsize = GET_MODE_BITSIZE (mode);
4808 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4810 tree lo_index = TREE_OPERAND (index, 0);
4811 tree hi_index = TREE_OPERAND (index, 1);
4812 rtx index_r, pos_rtx;
4813 HOST_WIDE_INT lo, hi, count;
4816 /* If the range is constant and "small", unroll the loop. */
4818 && host_integerp (lo_index, 0)
4819 && host_integerp (hi_index, 0)
4820 && (lo = tree_low_cst (lo_index, 0),
4821 hi = tree_low_cst (hi_index, 0),
4822 count = hi - lo + 1,
4825 || (host_integerp (TYPE_SIZE (elttype), 1)
4826 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4829 lo -= minelt; hi -= minelt;
4830 for (; lo <= hi; lo++)
4832 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4835 && !MEM_KEEP_ALIAS_SET_P (target)
4836 && TREE_CODE (type) == ARRAY_TYPE
4837 && TYPE_NONALIASED_COMPONENT (type))
4839 target = copy_rtx (target);
4840 MEM_KEEP_ALIAS_SET_P (target) = 1;
4843 store_constructor_field
4844 (target, bitsize, bitpos, mode, value, type, cleared,
4845 get_alias_set (elttype));
4850 rtx loop_start = gen_label_rtx ();
4851 rtx loop_end = gen_label_rtx ();
4854 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4855 unsignedp = TYPE_UNSIGNED (domain);
4857 index = build_decl (VAR_DECL, NULL_TREE, domain);
4860 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4862 SET_DECL_RTL (index, index_r);
4863 store_expr (lo_index, index_r, 0);
4865 /* Build the head of the loop. */
4866 do_pending_stack_adjust ();
4867 emit_label (loop_start);
4869 /* Assign value to element index. */
4871 = convert (ssizetype,
4872 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4873 index, TYPE_MIN_VALUE (domain))));
4874 position = size_binop (MULT_EXPR, position,
4876 TYPE_SIZE_UNIT (elttype)));
4878 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4879 xtarget = offset_address (target, pos_rtx,
4880 highest_pow2_factor (position));
4881 xtarget = adjust_address (xtarget, mode, 0);
4882 if (TREE_CODE (value) == CONSTRUCTOR)
4883 store_constructor (value, xtarget, cleared,
4884 bitsize / BITS_PER_UNIT);
4886 store_expr (value, xtarget, 0);
4888 /* Generate a conditional jump to exit the loop. */
4889 exit_cond = build2 (LT_EXPR, integer_type_node,
4891 jumpif (exit_cond, loop_end);
4893 /* Update the loop counter, and jump to the head of
4895 expand_assignment (index,
4896 build2 (PLUS_EXPR, TREE_TYPE (index),
4897 index, integer_one_node));
4899 emit_jump (loop_start);
4901 /* Build the end of the loop. */
4902 emit_label (loop_end);
4905 else if ((index != 0 && ! host_integerp (index, 0))
4906 || ! host_integerp (TYPE_SIZE (elttype), 1))
4911 index = ssize_int (1);
4914 index = fold_convert (ssizetype,
4915 fold (build2 (MINUS_EXPR,
4918 TYPE_MIN_VALUE (domain))));
4920 position = size_binop (MULT_EXPR, index,
4922 TYPE_SIZE_UNIT (elttype)));
4923 xtarget = offset_address (target,
4924 expand_expr (position, 0, VOIDmode, 0),
4925 highest_pow2_factor (position));
4926 xtarget = adjust_address (xtarget, mode, 0);
4927 store_expr (value, xtarget, 0);
4932 bitpos = ((tree_low_cst (index, 0) - minelt)
4933 * tree_low_cst (TYPE_SIZE (elttype), 1));
4935 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4937 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4938 && TREE_CODE (type) == ARRAY_TYPE
4939 && TYPE_NONALIASED_COMPONENT (type))
4941 target = copy_rtx (target);
4942 MEM_KEEP_ALIAS_SET_P (target) = 1;
4944 store_constructor_field (target, bitsize, bitpos, mode, value,
4945 type, cleared, get_alias_set (elttype));
4957 tree elttype = TREE_TYPE (type);
4958 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4959 enum machine_mode eltmode = TYPE_MODE (elttype);
4960 HOST_WIDE_INT bitsize;
4961 HOST_WIDE_INT bitpos;
4965 gcc_assert (eltmode != BLKmode);
4967 n_elts = TYPE_VECTOR_SUBPARTS (type);
4968 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4970 enum machine_mode mode = GET_MODE (target);
4972 icode = (int) vec_init_optab->handlers[mode].insn_code;
4973 if (icode != CODE_FOR_nothing)
4977 vector = alloca (n_elts);
4978 for (i = 0; i < n_elts; i++)
4979 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4983 /* If the constructor has fewer elements than the vector,
4984 clear the whole array first. Similarly if this is static
4985 constructor of a non-BLKmode object. */
4988 else if (REG_P (target) && TREE_STATIC (exp))
4992 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4994 for (elt = CONSTRUCTOR_ELTS (exp);
4996 elt = TREE_CHAIN (elt))
4998 int n_elts_here = tree_low_cst
4999 (int_const_binop (TRUNC_DIV_EXPR,
5000 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
5001 TYPE_SIZE (elttype), 0), 1);
5003 count += n_elts_here;
5004 if (mostly_zeros_p (TREE_VALUE (elt)))
5005 zero_count += n_elts_here;
5008 /* Clear the entire vector first if there are any missing elements,
5009 or if the incidence of zero elements is >= 75%. */
5010 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5013 if (need_to_clear && size > 0 && !vector)
5016 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5018 clear_storage (target, GEN_INT (size));
5022 if (!cleared && REG_P (target))
5023 /* Inform later passes that the old value is dead. */
5024 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5026 /* Store each element of the constructor into the corresponding
5027 element of TARGET, determined by counting the elements. */
5028 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5030 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
5032 tree value = TREE_VALUE (elt);
5033 tree index = TREE_PURPOSE (elt);
5034 HOST_WIDE_INT eltpos;
5036 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5037 if (cleared && initializer_zerop (value))
5041 eltpos = tree_low_cst (index, 1);
5047 /* Vector CONSTRUCTORs should only be built from smaller
5048 vectors in the case of BLKmode vectors. */
5049 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5050 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5054 enum machine_mode value_mode =
5055 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5056 ? TYPE_MODE (TREE_TYPE (value))
5058 bitpos = eltpos * elt_size;
5059 store_constructor_field (target, bitsize, bitpos,
5060 value_mode, value, type,
5061 cleared, get_alias_set (elttype));
5066 emit_insn (GEN_FCN (icode)
5068 gen_rtx_PARALLEL (GET_MODE (target),
5069 gen_rtvec_v (n_elts, vector))));
5078 /* Store the value of EXP (an expression tree)
5079 into a subfield of TARGET which has mode MODE and occupies
5080 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5081 If MODE is VOIDmode, it means that we are storing into a bit-field.
5083 Always return const0_rtx unless we have something particular to
5086 TYPE is the type of the underlying object,
5088 ALIAS_SET is the alias set for the destination. This value will
5089 (in general) be different from that for TARGET, since TARGET is a
5090 reference to the containing structure. */
5093 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5094 enum machine_mode mode, tree exp, tree type, int alias_set)
5096 HOST_WIDE_INT width_mask = 0;
5098 if (TREE_CODE (exp) == ERROR_MARK)
5101 /* If we have nothing to store, do nothing unless the expression has
5104 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5105 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5106 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5108 /* If we are storing into an unaligned field of an aligned union that is
5109 in a register, we may have the mode of TARGET being an integer mode but
5110 MODE == BLKmode. In that case, get an aligned object whose size and
5111 alignment are the same as TARGET and store TARGET into it (we can avoid
5112 the store if the field being stored is the entire width of TARGET). Then
5113 call ourselves recursively to store the field into a BLKmode version of
5114 that object. Finally, load from the object into TARGET. This is not
5115 very efficient in general, but should only be slightly more expensive
5116 than the otherwise-required unaligned accesses. Perhaps this can be
5117 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5118 twice, once with emit_move_insn and once via store_field. */
5121 && (REG_P (target) || GET_CODE (target) == SUBREG))
5123 rtx object = assign_temp (type, 0, 1, 1);
5124 rtx blk_object = adjust_address (object, BLKmode, 0);
5126 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5127 emit_move_insn (object, target);
5129 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5131 emit_move_insn (target, object);
5133 /* We want to return the BLKmode version of the data. */
5137 if (GET_CODE (target) == CONCAT)
5139 /* We're storing into a struct containing a single __complex. */
5141 gcc_assert (!bitpos);
5142 return store_expr (exp, target, 0);
5145 /* If the structure is in a register or if the component
5146 is a bit field, we cannot use addressing to access it.
5147 Use bit-field techniques or SUBREG to store in it. */
5149 if (mode == VOIDmode
5150 || (mode != BLKmode && ! direct_store[(int) mode]
5151 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5152 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5154 || GET_CODE (target) == SUBREG
5155 /* If the field isn't aligned enough to store as an ordinary memref,
5156 store it as a bit field. */
5158 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5159 || bitpos % GET_MODE_ALIGNMENT (mode))
5160 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5161 || (bitpos % BITS_PER_UNIT != 0)))
5162 /* If the RHS and field are a constant size and the size of the
5163 RHS isn't the same size as the bitfield, we must use bitfield
5166 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5167 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5169 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5171 /* If BITSIZE is narrower than the size of the type of EXP
5172 we will be narrowing TEMP. Normally, what's wanted are the
5173 low-order bits. However, if EXP's type is a record and this is
5174 big-endian machine, we want the upper BITSIZE bits. */
5175 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5176 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5177 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5178 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5179 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5183 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5185 if (mode != VOIDmode && mode != BLKmode
5186 && mode != TYPE_MODE (TREE_TYPE (exp)))
5187 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5189 /* If the modes of TARGET and TEMP are both BLKmode, both
5190 must be in memory and BITPOS must be aligned on a byte
5191 boundary. If so, we simply do a block copy. */
5192 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5194 gcc_assert (MEM_P (target) && MEM_P (temp)
5195 && !(bitpos % BITS_PER_UNIT));
5197 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5198 emit_block_move (target, temp,
5199 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5206 /* Store the value in the bitfield. */
5207 store_bit_field (target, bitsize, bitpos, mode, temp);
5213 /* Now build a reference to just the desired component. */
5214 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5216 if (to_rtx == target)
5217 to_rtx = copy_rtx (to_rtx);
5219 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5220 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5221 set_mem_alias_set (to_rtx, alias_set);
5223 return store_expr (exp, to_rtx, 0);
5227 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5228 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5229 codes and find the ultimate containing object, which we return.
5231 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5232 bit position, and *PUNSIGNEDP to the signedness of the field.
5233 If the position of the field is variable, we store a tree
5234 giving the variable offset (in units) in *POFFSET.
5235 This offset is in addition to the bit position.
5236 If the position is not variable, we store 0 in *POFFSET.
5238 If any of the extraction expressions is volatile,
5239 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5241 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5242 is a mode that can be used to access the field. In that case, *PBITSIZE
5245 If the field describes a variable-sized object, *PMODE is set to
5246 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5247 this case, but the address of the object can be found. */
5250 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5251 HOST_WIDE_INT *pbitpos, tree *poffset,
5252 enum machine_mode *pmode, int *punsignedp,
5256 enum machine_mode mode = VOIDmode;
5257 tree offset = size_zero_node;
5258 tree bit_offset = bitsize_zero_node;
5261 /* First get the mode, signedness, and size. We do this from just the
5262 outermost expression. */
5263 if (TREE_CODE (exp) == COMPONENT_REF)
5265 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5266 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5267 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5269 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5271 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5273 size_tree = TREE_OPERAND (exp, 1);
5274 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5278 mode = TYPE_MODE (TREE_TYPE (exp));
5279 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5281 if (mode == BLKmode)
5282 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5284 *pbitsize = GET_MODE_BITSIZE (mode);
5289 if (! host_integerp (size_tree, 1))
5290 mode = BLKmode, *pbitsize = -1;
5292 *pbitsize = tree_low_cst (size_tree, 1);
5295 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5296 and find the ultimate containing object. */
5299 switch (TREE_CODE (exp))
5302 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5303 TREE_OPERAND (exp, 2));
5308 tree field = TREE_OPERAND (exp, 1);
5309 tree this_offset = component_ref_field_offset (exp);
5311 /* If this field hasn't been filled in yet, don't go past it.
5312 This should only happen when folding expressions made during
5313 type construction. */
5314 if (this_offset == 0)
5317 offset = size_binop (PLUS_EXPR, offset, this_offset);
5318 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5319 DECL_FIELD_BIT_OFFSET (field));
5321 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5326 case ARRAY_RANGE_REF:
5328 tree index = TREE_OPERAND (exp, 1);
5329 tree low_bound = array_ref_low_bound (exp);
5330 tree unit_size = array_ref_element_size (exp);
5332 /* We assume all arrays have sizes that are a multiple of a byte.
5333 First subtract the lower bound, if any, in the type of the
5334 index, then convert to sizetype and multiply by the size of
5335 the array element. */
5336 if (! integer_zerop (low_bound))
5337 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5340 offset = size_binop (PLUS_EXPR, offset,
5341 size_binop (MULT_EXPR,
5342 convert (sizetype, index),
5351 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5352 bitsize_int (*pbitsize));
5355 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5356 conversions that don't change the mode, and all view conversions
5357 except those that need to "step up" the alignment. */
5359 case VIEW_CONVERT_EXPR:
5360 if ((TYPE_ALIGN (TREE_TYPE (exp))
5361 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5363 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5364 < BIGGEST_ALIGNMENT)
5365 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5366 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5374 /* If any reference in the chain is volatile, the effect is volatile. */
5375 if (TREE_THIS_VOLATILE (exp))
5378 exp = TREE_OPERAND (exp, 0);
5382 /* If OFFSET is constant, see if we can return the whole thing as a
5383 constant bit position. Otherwise, split it up. */
5384 if (host_integerp (offset, 0)
5385 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5387 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5388 && host_integerp (tem, 0))
5389 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5391 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5397 /* Return a tree of sizetype representing the size, in bytes, of the element
5398 of EXP, an ARRAY_REF. */
5401 array_ref_element_size (tree exp)
5403 tree aligned_size = TREE_OPERAND (exp, 3);
5404 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5406 /* If a size was specified in the ARRAY_REF, it's the size measured
5407 in alignment units of the element type. So multiply by that value. */
5410 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5411 sizetype from another type of the same width and signedness. */
5412 if (TREE_TYPE (aligned_size) != sizetype)
5413 aligned_size = fold_convert (sizetype, aligned_size);
5414 return size_binop (MULT_EXPR, aligned_size,
5415 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5418 /* Otherwise, take the size from that of the element type. Substitute
5419 any PLACEHOLDER_EXPR that we have. */
5421 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5424 /* Return a tree representing the lower bound of the array mentioned in
5425 EXP, an ARRAY_REF. */
5428 array_ref_low_bound (tree exp)
5430 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5432 /* If a lower bound is specified in EXP, use it. */
5433 if (TREE_OPERAND (exp, 2))
5434 return TREE_OPERAND (exp, 2);
5436 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5437 substituting for a PLACEHOLDER_EXPR as needed. */
5438 if (domain_type && TYPE_MIN_VALUE (domain_type))
5439 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5441 /* Otherwise, return a zero of the appropriate type. */
5442 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5445 /* Return a tree representing the upper bound of the array mentioned in
5446 EXP, an ARRAY_REF. */
5449 array_ref_up_bound (tree exp)
5451 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5453 /* If there is a domain type and it has an upper bound, use it, substituting
5454 for a PLACEHOLDER_EXPR as needed. */
5455 if (domain_type && TYPE_MAX_VALUE (domain_type))
5456 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5458 /* Otherwise fail. */
5462 /* Return a tree representing the offset, in bytes, of the field referenced
5463 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5466 component_ref_field_offset (tree exp)
5468 tree aligned_offset = TREE_OPERAND (exp, 2);
5469 tree field = TREE_OPERAND (exp, 1);
5471 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5472 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5476 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5477 sizetype from another type of the same width and signedness. */
5478 if (TREE_TYPE (aligned_offset) != sizetype)
5479 aligned_offset = fold_convert (sizetype, aligned_offset);
5480 return size_binop (MULT_EXPR, aligned_offset,
5481 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5484 /* Otherwise, take the offset from that of the field. Substitute
5485 any PLACEHOLDER_EXPR that we have. */
5487 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5490 /* Return 1 if T is an expression that get_inner_reference handles. */
5493 handled_component_p (tree t)
5495 switch (TREE_CODE (t))
5500 case ARRAY_RANGE_REF:
5501 case VIEW_CONVERT_EXPR:
5511 /* Given an rtx VALUE that may contain additions and multiplications, return
5512 an equivalent value that just refers to a register, memory, or constant.
5513 This is done by generating instructions to perform the arithmetic and
5514 returning a pseudo-register containing the value.
5516 The returned value may be a REG, SUBREG, MEM or constant. */
5519 force_operand (rtx value, rtx target)
5522 /* Use subtarget as the target for operand 0 of a binary operation. */
5523 rtx subtarget = get_subtarget (target);
5524 enum rtx_code code = GET_CODE (value);
5526 /* Check for subreg applied to an expression produced by loop optimizer. */
5528 && !REG_P (SUBREG_REG (value))
5529 && !MEM_P (SUBREG_REG (value)))
5531 value = simplify_gen_subreg (GET_MODE (value),
5532 force_reg (GET_MODE (SUBREG_REG (value)),
5533 force_operand (SUBREG_REG (value),
5535 GET_MODE (SUBREG_REG (value)),
5536 SUBREG_BYTE (value));
5537 code = GET_CODE (value);
5540 /* Check for a PIC address load. */
5541 if ((code == PLUS || code == MINUS)
5542 && XEXP (value, 0) == pic_offset_table_rtx
5543 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5544 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5545 || GET_CODE (XEXP (value, 1)) == CONST))
5548 subtarget = gen_reg_rtx (GET_MODE (value));
5549 emit_move_insn (subtarget, value);
5553 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5556 target = gen_reg_rtx (GET_MODE (value));
5557 convert_move (target, force_operand (XEXP (value, 0), NULL),
5558 code == ZERO_EXTEND);
5562 if (ARITHMETIC_P (value))
5564 op2 = XEXP (value, 1);
5565 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5567 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5570 op2 = negate_rtx (GET_MODE (value), op2);
5573 /* Check for an addition with OP2 a constant integer and our first
5574 operand a PLUS of a virtual register and something else. In that
5575 case, we want to emit the sum of the virtual register and the
5576 constant first and then add the other value. This allows virtual
5577 register instantiation to simply modify the constant rather than
5578 creating another one around this addition. */
5579 if (code == PLUS && GET_CODE (op2) == CONST_INT
5580 && GET_CODE (XEXP (value, 0)) == PLUS
5581 && REG_P (XEXP (XEXP (value, 0), 0))
5582 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5583 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5585 rtx temp = expand_simple_binop (GET_MODE (value), code,
5586 XEXP (XEXP (value, 0), 0), op2,
5587 subtarget, 0, OPTAB_LIB_WIDEN);
5588 return expand_simple_binop (GET_MODE (value), code, temp,
5589 force_operand (XEXP (XEXP (value,
5591 target, 0, OPTAB_LIB_WIDEN);
5594 op1 = force_operand (XEXP (value, 0), subtarget);
5595 op2 = force_operand (op2, NULL_RTX);
5599 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5601 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5602 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5603 target, 1, OPTAB_LIB_WIDEN);
5605 return expand_divmod (0,
5606 FLOAT_MODE_P (GET_MODE (value))
5607 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5608 GET_MODE (value), op1, op2, target, 0);
5611 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5615 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5619 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5623 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5624 target, 0, OPTAB_LIB_WIDEN);
5627 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5628 target, 1, OPTAB_LIB_WIDEN);
5631 if (UNARY_P (value))
5633 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5634 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5637 #ifdef INSN_SCHEDULING
5638 /* On machines that have insn scheduling, we want all memory reference to be
5639 explicit, so we need to deal with such paradoxical SUBREGs. */
5640 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5641 && (GET_MODE_SIZE (GET_MODE (value))
5642 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5644 = simplify_gen_subreg (GET_MODE (value),
5645 force_reg (GET_MODE (SUBREG_REG (value)),
5646 force_operand (SUBREG_REG (value),
5648 GET_MODE (SUBREG_REG (value)),
5649 SUBREG_BYTE (value));
5655 /* Subroutine of expand_expr: return nonzero iff there is no way that
5656 EXP can reference X, which is being modified. TOP_P is nonzero if this
5657 call is going to be used to determine whether we need a temporary
5658 for EXP, as opposed to a recursive call to this function.
5660 It is always safe for this routine to return zero since it merely
5661 searches for optimization opportunities. */
5664 safe_from_p (rtx x, tree exp, int top_p)
5670 /* If EXP has varying size, we MUST use a target since we currently
5671 have no way of allocating temporaries of variable size
5672 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5673 So we assume here that something at a higher level has prevented a
5674 clash. This is somewhat bogus, but the best we can do. Only
5675 do this when X is BLKmode and when we are at the top level. */
5676 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5677 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5678 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5679 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5680 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5682 && GET_MODE (x) == BLKmode)
5683 /* If X is in the outgoing argument area, it is always safe. */
5685 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5686 || (GET_CODE (XEXP (x, 0)) == PLUS
5687 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5690 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5691 find the underlying pseudo. */
5692 if (GET_CODE (x) == SUBREG)
5695 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5699 /* Now look at our tree code and possibly recurse. */
5700 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5702 case tcc_declaration:
5703 exp_rtl = DECL_RTL_IF_SET (exp);
5709 case tcc_exceptional:
5710 if (TREE_CODE (exp) == TREE_LIST)
5714 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5716 exp = TREE_CHAIN (exp);
5719 if (TREE_CODE (exp) != TREE_LIST)
5720 return safe_from_p (x, exp, 0);
5723 else if (TREE_CODE (exp) == ERROR_MARK)
5724 return 1; /* An already-visited SAVE_EXPR? */
5729 /* The only case we look at here is the DECL_INITIAL inside a
5731 return (TREE_CODE (exp) != DECL_EXPR
5732 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5733 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5734 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5737 case tcc_comparison:
5738 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5743 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5745 case tcc_expression:
5747 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5748 the expression. If it is set, we conflict iff we are that rtx or
5749 both are in memory. Otherwise, we check all operands of the
5750 expression recursively. */
5752 switch (TREE_CODE (exp))
5755 /* If the operand is static or we are static, we can't conflict.
5756 Likewise if we don't conflict with the operand at all. */
5757 if (staticp (TREE_OPERAND (exp, 0))
5758 || TREE_STATIC (exp)
5759 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5762 /* Otherwise, the only way this can conflict is if we are taking
5763 the address of a DECL a that address if part of X, which is
5765 exp = TREE_OPERAND (exp, 0);
5768 if (!DECL_RTL_SET_P (exp)
5769 || !MEM_P (DECL_RTL (exp)))
5772 exp_rtl = XEXP (DECL_RTL (exp), 0);
5776 case MISALIGNED_INDIRECT_REF:
5777 case ALIGN_INDIRECT_REF:
5780 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5781 get_alias_set (exp)))
5786 /* Assume that the call will clobber all hard registers and
5788 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5793 case WITH_CLEANUP_EXPR:
5794 case CLEANUP_POINT_EXPR:
5795 /* Lowered by gimplify.c. */
5799 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5805 /* If we have an rtx, we do not need to scan our operands. */
5809 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5810 for (i = 0; i < nops; i++)
5811 if (TREE_OPERAND (exp, i) != 0
5812 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5815 /* If this is a language-specific tree code, it may require
5816 special handling. */
5817 if ((unsigned int) TREE_CODE (exp)
5818 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5819 && !lang_hooks.safe_from_p (x, exp))
5824 /* Should never get a type here. */
5828 /* If we have an rtl, find any enclosed object. Then see if we conflict
5832 if (GET_CODE (exp_rtl) == SUBREG)
5834 exp_rtl = SUBREG_REG (exp_rtl);
5836 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5840 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5841 are memory and they conflict. */
5842 return ! (rtx_equal_p (x, exp_rtl)
5843 || (MEM_P (x) && MEM_P (exp_rtl)
5844 && true_dependence (exp_rtl, VOIDmode, x,
5845 rtx_addr_varies_p)));
5848 /* If we reach here, it is safe. */
5853 /* Return the highest power of two that EXP is known to be a multiple of.
5854 This is used in updating alignment of MEMs in array references. */
5856 static unsigned HOST_WIDE_INT
5857 highest_pow2_factor (tree exp)
5859 unsigned HOST_WIDE_INT c0, c1;
5861 switch (TREE_CODE (exp))
5864 /* We can find the lowest bit that's a one. If the low
5865 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5866 We need to handle this case since we can find it in a COND_EXPR,
5867 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5868 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5870 if (TREE_CONSTANT_OVERFLOW (exp))
5871 return BIGGEST_ALIGNMENT;
5874 /* Note: tree_low_cst is intentionally not used here,
5875 we don't care about the upper bits. */
5876 c0 = TREE_INT_CST_LOW (exp);
5878 return c0 ? c0 : BIGGEST_ALIGNMENT;
5882 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5883 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5884 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5885 return MIN (c0, c1);
5888 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5889 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5892 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5894 if (integer_pow2p (TREE_OPERAND (exp, 1))
5895 && host_integerp (TREE_OPERAND (exp, 1), 1))
5897 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5898 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5899 return MAX (1, c0 / c1);
5903 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5905 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5908 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5911 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5912 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5913 return MIN (c0, c1);
5922 /* Similar, except that the alignment requirements of TARGET are
5923 taken into account. Assume it is at least as aligned as its
5924 type, unless it is a COMPONENT_REF in which case the layout of
5925 the structure gives the alignment. */
5927 static unsigned HOST_WIDE_INT
5928 highest_pow2_factor_for_target (tree target, tree exp)
5930 unsigned HOST_WIDE_INT target_align, factor;
5932 factor = highest_pow2_factor (exp);
5933 if (TREE_CODE (target) == COMPONENT_REF)
5934 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5936 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5937 return MAX (factor, target_align);
5940 /* Expands variable VAR. */
5943 expand_var (tree var)
5945 if (DECL_EXTERNAL (var))
5948 if (TREE_STATIC (var))
5949 /* If this is an inlined copy of a static local variable,
5950 look up the original decl. */
5951 var = DECL_ORIGIN (var);
5953 if (TREE_STATIC (var)
5954 ? !TREE_ASM_WRITTEN (var)
5955 : !DECL_RTL_SET_P (var))
5957 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5958 /* Should be ignored. */;
5959 else if (lang_hooks.expand_decl (var))
5961 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5963 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5964 rest_of_decl_compilation (var, 0, 0);
5966 /* No expansion needed. */
5967 gcc_assert (TREE_CODE (var) == TYPE_DECL
5968 || TREE_CODE (var) == CONST_DECL
5969 || TREE_CODE (var) == FUNCTION_DECL
5970 || TREE_CODE (var) == LABEL_DECL);
5974 /* Subroutine of expand_expr. Expand the two operands of a binary
5975 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5976 The value may be stored in TARGET if TARGET is nonzero. The
5977 MODIFIER argument is as documented by expand_expr. */
5980 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5981 enum expand_modifier modifier)
5983 if (! safe_from_p (target, exp1, 1))
5985 if (operand_equal_p (exp0, exp1, 0))
5987 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5988 *op1 = copy_rtx (*op0);
5992 /* If we need to preserve evaluation order, copy exp0 into its own
5993 temporary variable so that it can't be clobbered by exp1. */
5994 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5995 exp0 = save_expr (exp0);
5996 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5997 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6002 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6003 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6006 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6007 enum expand_modifier modifier)
6009 rtx result, subtarget;
6011 HOST_WIDE_INT bitsize, bitpos;
6012 int volatilep, unsignedp;
6013 enum machine_mode mode1;
6015 /* If we are taking the address of a constant and are at the top level,
6016 we have to use output_constant_def since we can't call force_const_mem
6018 /* ??? This should be considered a front-end bug. We should not be
6019 generating ADDR_EXPR of something that isn't an LVALUE. The only
6020 exception here is STRING_CST. */
6021 if (TREE_CODE (exp) == CONSTRUCTOR
6022 || CONSTANT_CLASS_P (exp))
6023 return XEXP (output_constant_def (exp, 0), 0);
6025 /* Everything must be something allowed by is_gimple_addressable. */
6026 switch (TREE_CODE (exp))
6029 /* This case will happen via recursion for &a->b. */
6030 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6033 /* Recurse and make the output_constant_def clause above handle this. */
6034 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6038 /* The real part of the complex number is always first, therefore
6039 the address is the same as the address of the parent object. */
6042 inner = TREE_OPERAND (exp, 0);
6046 /* The imaginary part of the complex number is always second.
6047 The expression is therefore always offset by the size of the
6050 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6051 inner = TREE_OPERAND (exp, 0);
6055 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6056 expand_expr, as that can have various side effects; LABEL_DECLs for
6057 example, may not have their DECL_RTL set yet. Assume language
6058 specific tree nodes can be expanded in some interesting way. */
6060 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6062 result = expand_expr (exp, target, tmode,
6063 modifier == EXPAND_INITIALIZER
6064 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6066 /* If the DECL isn't in memory, then the DECL wasn't properly
6067 marked TREE_ADDRESSABLE, which will be either a front-end
6068 or a tree optimizer bug. */
6069 gcc_assert (GET_CODE (result) == MEM);
6070 result = XEXP (result, 0);
6072 /* ??? Is this needed anymore? */
6073 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6075 assemble_external (exp);
6076 TREE_USED (exp) = 1;
6079 if (modifier != EXPAND_INITIALIZER
6080 && modifier != EXPAND_CONST_ADDRESS)
6081 result = force_operand (result, target);
6085 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6086 &mode1, &unsignedp, &volatilep);
6090 /* We must have made progress. */
6091 gcc_assert (inner != exp);
6093 subtarget = offset || bitpos ? NULL_RTX : target;
6094 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6100 if (modifier != EXPAND_NORMAL)
6101 result = force_operand (result, NULL);
6102 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6104 result = convert_memory_address (tmode, result);
6105 tmp = convert_memory_address (tmode, tmp);
6107 if (modifier == EXPAND_SUM)
6108 result = gen_rtx_PLUS (tmode, result, tmp);
6111 subtarget = bitpos ? NULL_RTX : target;
6112 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6113 1, OPTAB_LIB_WIDEN);
6119 /* Someone beforehand should have rejected taking the address
6120 of such an object. */
6121 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6123 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6124 if (modifier < EXPAND_SUM)
6125 result = force_operand (result, target);
6131 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6132 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6135 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6136 enum expand_modifier modifier)
6138 enum machine_mode rmode;
6141 /* Target mode of VOIDmode says "whatever's natural". */
6142 if (tmode == VOIDmode)
6143 tmode = TYPE_MODE (TREE_TYPE (exp));
6145 /* We can get called with some Weird Things if the user does silliness
6146 like "(short) &a". In that case, convert_memory_address won't do
6147 the right thing, so ignore the given target mode. */
6148 if (tmode != Pmode && tmode != ptr_mode)
6151 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6154 /* Despite expand_expr claims concerning ignoring TMODE when not
6155 strictly convenient, stuff breaks if we don't honor it. Note
6156 that combined with the above, we only do this for pointer modes. */
6157 rmode = GET_MODE (result);
6158 if (rmode == VOIDmode)
6161 result = convert_memory_address (tmode, result);
6167 /* expand_expr: generate code for computing expression EXP.
6168 An rtx for the computed value is returned. The value is never null.
6169 In the case of a void EXP, const0_rtx is returned.
6171 The value may be stored in TARGET if TARGET is nonzero.
6172 TARGET is just a suggestion; callers must assume that
6173 the rtx returned may not be the same as TARGET.
6175 If TARGET is CONST0_RTX, it means that the value will be ignored.
6177 If TMODE is not VOIDmode, it suggests generating the
6178 result in mode TMODE. But this is done only when convenient.
6179 Otherwise, TMODE is ignored and the value generated in its natural mode.
6180 TMODE is just a suggestion; callers must assume that
6181 the rtx returned may not have mode TMODE.
6183 Note that TARGET may have neither TMODE nor MODE. In that case, it
6184 probably will not be used.
6186 If MODIFIER is EXPAND_SUM then when EXP is an addition
6187 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6188 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6189 products as above, or REG or MEM, or constant.
6190 Ordinarily in such cases we would output mul or add instructions
6191 and then return a pseudo reg containing the sum.
6193 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6194 it also marks a label as absolutely required (it can't be dead).
6195 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6196 This is used for outputting expressions used in initializers.
6198 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6199 with a constant address even if that address is not normally legitimate.
6200 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6202 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6203 a call parameter. Such targets require special care as we haven't yet
6204 marked TARGET so that it's safe from being trashed by libcalls. We
6205 don't want to use TARGET for anything but the final result;
6206 Intermediate values must go elsewhere. Additionally, calls to
6207 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6209 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6210 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6211 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6212 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6215 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6216 enum expand_modifier, rtx *);
6219 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6220 enum expand_modifier modifier, rtx *alt_rtl)
6223 rtx ret, last = NULL;
6225 /* Handle ERROR_MARK before anybody tries to access its type. */
6226 if (TREE_CODE (exp) == ERROR_MARK
6227 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6229 ret = CONST0_RTX (tmode);
6230 return ret ? ret : const0_rtx;
6233 if (flag_non_call_exceptions)
6235 rn = lookup_stmt_eh_region (exp);
6236 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6238 last = get_last_insn ();
6241 /* If this is an expression of some kind and it has an associated line
6242 number, then emit the line number before expanding the expression.
6244 We need to save and restore the file and line information so that
6245 errors discovered during expansion are emitted with the right
6246 information. It would be better of the diagnostic routines
6247 used the file/line information embedded in the tree nodes rather
6249 if (cfun && EXPR_HAS_LOCATION (exp))
6251 location_t saved_location = input_location;
6252 input_location = EXPR_LOCATION (exp);
6253 emit_line_note (input_location);
6255 /* Record where the insns produced belong. */
6256 record_block_change (TREE_BLOCK (exp));
6258 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6260 input_location = saved_location;
6264 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6267 /* If using non-call exceptions, mark all insns that may trap.
6268 expand_call() will mark CALL_INSNs before we get to this code,
6269 but it doesn't handle libcalls, and these may trap. */
6273 for (insn = next_real_insn (last); insn;
6274 insn = next_real_insn (insn))
6276 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6277 /* If we want exceptions for non-call insns, any
6278 may_trap_p instruction may throw. */
6279 && GET_CODE (PATTERN (insn)) != CLOBBER
6280 && GET_CODE (PATTERN (insn)) != USE
6281 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6283 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6293 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6294 enum expand_modifier modifier, rtx *alt_rtl)
6297 tree type = TREE_TYPE (exp);
6299 enum machine_mode mode;
6300 enum tree_code code = TREE_CODE (exp);
6302 rtx subtarget, original_target;
6305 bool reduce_bit_field = false;
6306 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6307 ? reduce_to_bit_field_precision ((expr), \
6312 mode = TYPE_MODE (type);
6313 unsignedp = TYPE_UNSIGNED (type);
6314 if (lang_hooks.reduce_bit_field_operations
6315 && TREE_CODE (type) == INTEGER_TYPE
6316 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6318 /* An operation in what may be a bit-field type needs the
6319 result to be reduced to the precision of the bit-field type,
6320 which is narrower than that of the type's mode. */
6321 reduce_bit_field = true;
6322 if (modifier == EXPAND_STACK_PARM)
6326 /* Use subtarget as the target for operand 0 of a binary operation. */
6327 subtarget = get_subtarget (target);
6328 original_target = target;
6329 ignore = (target == const0_rtx
6330 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6331 || code == CONVERT_EXPR || code == COND_EXPR
6332 || code == VIEW_CONVERT_EXPR)
6333 && TREE_CODE (type) == VOID_TYPE));
6335 /* If we are going to ignore this result, we need only do something
6336 if there is a side-effect somewhere in the expression. If there
6337 is, short-circuit the most common cases here. Note that we must
6338 not call expand_expr with anything but const0_rtx in case this
6339 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6343 if (! TREE_SIDE_EFFECTS (exp))
6346 /* Ensure we reference a volatile object even if value is ignored, but
6347 don't do this if all we are doing is taking its address. */
6348 if (TREE_THIS_VOLATILE (exp)
6349 && TREE_CODE (exp) != FUNCTION_DECL
6350 && mode != VOIDmode && mode != BLKmode
6351 && modifier != EXPAND_CONST_ADDRESS)
6353 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6355 temp = copy_to_reg (temp);
6359 if (TREE_CODE_CLASS (code) == tcc_unary
6360 || code == COMPONENT_REF || code == INDIRECT_REF)
6361 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6364 else if (TREE_CODE_CLASS (code) == tcc_binary
6365 || TREE_CODE_CLASS (code) == tcc_comparison
6366 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6368 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6369 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6372 else if (code == BIT_FIELD_REF)
6374 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6375 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6376 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6383 /* If will do cse, generate all results into pseudo registers
6384 since 1) that allows cse to find more things
6385 and 2) otherwise cse could produce an insn the machine
6386 cannot support. An exception is a CONSTRUCTOR into a multi-word
6387 MEM: that's much more likely to be most efficient into the MEM.
6388 Another is a CALL_EXPR which must return in memory. */
6390 if (! cse_not_expected && mode != BLKmode && target
6391 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6392 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6393 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6400 tree function = decl_function_context (exp);
6402 temp = label_rtx (exp);
6403 temp = gen_rtx_LABEL_REF (Pmode, temp);
6405 if (function != current_function_decl
6407 LABEL_REF_NONLOCAL_P (temp) = 1;
6409 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6414 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6419 /* If a static var's type was incomplete when the decl was written,
6420 but the type is complete now, lay out the decl now. */
6421 if (DECL_SIZE (exp) == 0
6422 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6423 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6424 layout_decl (exp, 0);
6426 /* ... fall through ... */
6430 gcc_assert (DECL_RTL (exp));
6432 /* Ensure variable marked as used even if it doesn't go through
6433 a parser. If it hasn't be used yet, write out an external
6435 if (! TREE_USED (exp))
6437 assemble_external (exp);
6438 TREE_USED (exp) = 1;
6441 /* Show we haven't gotten RTL for this yet. */
6444 /* Variables inherited from containing functions should have
6445 been lowered by this point. */
6446 context = decl_function_context (exp);
6447 gcc_assert (!context
6448 || context == current_function_decl
6449 || TREE_STATIC (exp)
6450 /* ??? C++ creates functions that are not TREE_STATIC. */
6451 || TREE_CODE (exp) == FUNCTION_DECL);
6453 /* This is the case of an array whose size is to be determined
6454 from its initializer, while the initializer is still being parsed.
6457 if (MEM_P (DECL_RTL (exp))
6458 && REG_P (XEXP (DECL_RTL (exp), 0)))
6459 temp = validize_mem (DECL_RTL (exp));
6461 /* If DECL_RTL is memory, we are in the normal case and either
6462 the address is not valid or it is not a register and -fforce-addr
6463 is specified, get the address into a register. */
6465 else if (MEM_P (DECL_RTL (exp))
6466 && modifier != EXPAND_CONST_ADDRESS
6467 && modifier != EXPAND_SUM
6468 && modifier != EXPAND_INITIALIZER
6469 && (! memory_address_p (DECL_MODE (exp),
6470 XEXP (DECL_RTL (exp), 0))
6472 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6475 *alt_rtl = DECL_RTL (exp);
6476 temp = replace_equiv_address (DECL_RTL (exp),
6477 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6480 /* If we got something, return it. But first, set the alignment
6481 if the address is a register. */
6484 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6485 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6490 /* If the mode of DECL_RTL does not match that of the decl, it
6491 must be a promoted value. We return a SUBREG of the wanted mode,
6492 but mark it so that we know that it was already extended. */
6494 if (REG_P (DECL_RTL (exp))
6495 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6497 enum machine_mode pmode;
6499 /* Get the signedness used for this variable. Ensure we get the
6500 same mode we got when the variable was declared. */
6501 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6502 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6503 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6505 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6506 SUBREG_PROMOTED_VAR_P (temp) = 1;
6507 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6511 return DECL_RTL (exp);
6514 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6515 TREE_INT_CST_HIGH (exp), mode);
6517 /* ??? If overflow is set, fold will have done an incomplete job,
6518 which can result in (plus xx (const_int 0)), which can get
6519 simplified by validate_replace_rtx during virtual register
6520 instantiation, which can result in unrecognizable insns.
6521 Avoid this by forcing all overflows into registers. */
6522 if (TREE_CONSTANT_OVERFLOW (exp)
6523 && modifier != EXPAND_INITIALIZER)
6524 temp = force_reg (mode, temp);
6529 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6530 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6531 return const_vector_from_tree (exp);
6533 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6534 TREE_VECTOR_CST_ELTS (exp)),
6535 ignore ? const0_rtx : target, tmode, modifier);
6538 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6541 /* If optimized, generate immediate CONST_DOUBLE
6542 which will be turned into memory by reload if necessary.
6544 We used to force a register so that loop.c could see it. But
6545 this does not allow gen_* patterns to perform optimizations with
6546 the constants. It also produces two insns in cases like "x = 1.0;".
6547 On most machines, floating-point constants are not permitted in
6548 many insns, so we'd end up copying it to a register in any case.
6550 Now, we do the copying in expand_binop, if appropriate. */
6551 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6552 TYPE_MODE (TREE_TYPE (exp)));
6555 /* Handle evaluating a complex constant in a CONCAT target. */
6556 if (original_target && GET_CODE (original_target) == CONCAT)
6558 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6561 rtarg = XEXP (original_target, 0);
6562 itarg = XEXP (original_target, 1);
6564 /* Move the real and imaginary parts separately. */
6565 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6566 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6569 emit_move_insn (rtarg, op0);
6571 emit_move_insn (itarg, op1);
6573 return original_target;
6576 /* ... fall through ... */
6579 temp = output_constant_def (exp, 1);
6581 /* temp contains a constant address.
6582 On RISC machines where a constant address isn't valid,
6583 make some insns to get that address into a register. */
6584 if (modifier != EXPAND_CONST_ADDRESS
6585 && modifier != EXPAND_INITIALIZER
6586 && modifier != EXPAND_SUM
6587 && (! memory_address_p (mode, XEXP (temp, 0))
6588 || flag_force_addr))
6589 return replace_equiv_address (temp,
6590 copy_rtx (XEXP (temp, 0)));
6595 tree val = TREE_OPERAND (exp, 0);
6596 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6598 if (!SAVE_EXPR_RESOLVED_P (exp))
6600 /* We can indeed still hit this case, typically via builtin
6601 expanders calling save_expr immediately before expanding
6602 something. Assume this means that we only have to deal
6603 with non-BLKmode values. */
6604 gcc_assert (GET_MODE (ret) != BLKmode);
6606 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6607 DECL_ARTIFICIAL (val) = 1;
6608 DECL_IGNORED_P (val) = 1;
6609 TREE_OPERAND (exp, 0) = val;
6610 SAVE_EXPR_RESOLVED_P (exp) = 1;
6612 if (!CONSTANT_P (ret))
6613 ret = copy_to_reg (ret);
6614 SET_DECL_RTL (val, ret);
6621 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6622 expand_goto (TREE_OPERAND (exp, 0));
6624 expand_computed_goto (TREE_OPERAND (exp, 0));
6628 /* If we don't need the result, just ensure we evaluate any
6634 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6635 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6640 /* All elts simple constants => refer to a constant in memory. But
6641 if this is a non-BLKmode mode, let it store a field at a time
6642 since that should make a CONST_INT or CONST_DOUBLE when we
6643 fold. Likewise, if we have a target we can use, it is best to
6644 store directly into the target unless the type is large enough
6645 that memcpy will be used. If we are making an initializer and
6646 all operands are constant, put it in memory as well.
6648 FIXME: Avoid trying to fill vector constructors piece-meal.
6649 Output them with output_constant_def below unless we're sure
6650 they're zeros. This should go away when vector initializers
6651 are treated like VECTOR_CST instead of arrays.
6653 else if ((TREE_STATIC (exp)
6654 && ((mode == BLKmode
6655 && ! (target != 0 && safe_from_p (target, exp, 1)))
6656 || TREE_ADDRESSABLE (exp)
6657 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6658 && (! MOVE_BY_PIECES_P
6659 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6661 && ! mostly_zeros_p (exp))))
6662 || ((modifier == EXPAND_INITIALIZER
6663 || modifier == EXPAND_CONST_ADDRESS)
6664 && TREE_CONSTANT (exp)))
6666 rtx constructor = output_constant_def (exp, 1);
6668 if (modifier != EXPAND_CONST_ADDRESS
6669 && modifier != EXPAND_INITIALIZER
6670 && modifier != EXPAND_SUM)
6671 constructor = validize_mem (constructor);
6677 /* Handle calls that pass values in multiple non-contiguous
6678 locations. The Irix 6 ABI has examples of this. */
6679 if (target == 0 || ! safe_from_p (target, exp, 1)
6680 || GET_CODE (target) == PARALLEL
6681 || modifier == EXPAND_STACK_PARM)
6683 = assign_temp (build_qualified_type (type,
6685 | (TREE_READONLY (exp)
6686 * TYPE_QUAL_CONST))),
6687 0, TREE_ADDRESSABLE (exp), 1);
6689 store_constructor (exp, target, 0, int_expr_size (exp));
6693 case MISALIGNED_INDIRECT_REF:
6694 case ALIGN_INDIRECT_REF:
6697 tree exp1 = TREE_OPERAND (exp, 0);
6700 if (modifier != EXPAND_WRITE)
6704 t = fold_read_from_constant_string (exp);
6706 return expand_expr (t, target, tmode, modifier);
6709 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6710 op0 = memory_address (mode, op0);
6712 if (code == ALIGN_INDIRECT_REF)
6714 int align = TYPE_ALIGN_UNIT (type);
6715 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6716 op0 = memory_address (mode, op0);
6719 temp = gen_rtx_MEM (mode, op0);
6721 orig = REF_ORIGINAL (exp);
6724 set_mem_attributes (temp, orig, 0);
6726 /* Resolve the misalignment now, so that we don't have to remember
6727 to resolve it later. Of course, this only works for reads. */
6728 /* ??? When we get around to supporting writes, we'll have to handle
6729 this in store_expr directly. The vectorizer isn't generating
6730 those yet, however. */
6731 if (code == MISALIGNED_INDIRECT_REF)
6736 gcc_assert (modifier == EXPAND_NORMAL);
6738 /* The vectorizer should have already checked the mode. */
6739 icode = movmisalign_optab->handlers[mode].insn_code;
6740 gcc_assert (icode != CODE_FOR_nothing);
6742 /* We've already validated the memory, and we're creating a
6743 new pseudo destination. The predicates really can't fail. */
6744 reg = gen_reg_rtx (mode);
6746 /* Nor can the insn generator. */
6747 insn = GEN_FCN (icode) (reg, temp);
6759 tree array = TREE_OPERAND (exp, 0);
6760 tree index = TREE_OPERAND (exp, 1);
6762 /* Fold an expression like: "foo"[2].
6763 This is not done in fold so it won't happen inside &.
6764 Don't fold if this is for wide characters since it's too
6765 difficult to do correctly and this is a very rare case. */
6767 if (modifier != EXPAND_CONST_ADDRESS
6768 && modifier != EXPAND_INITIALIZER
6769 && modifier != EXPAND_MEMORY)
6771 tree t = fold_read_from_constant_string (exp);
6774 return expand_expr (t, target, tmode, modifier);
6777 /* If this is a constant index into a constant array,
6778 just get the value from the array. Handle both the cases when
6779 we have an explicit constructor and when our operand is a variable
6780 that was declared const. */
6782 if (modifier != EXPAND_CONST_ADDRESS
6783 && modifier != EXPAND_INITIALIZER
6784 && modifier != EXPAND_MEMORY
6785 && TREE_CODE (array) == CONSTRUCTOR
6786 && ! TREE_SIDE_EFFECTS (array)
6787 && TREE_CODE (index) == INTEGER_CST)
6791 for (elem = CONSTRUCTOR_ELTS (array);
6792 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6793 elem = TREE_CHAIN (elem))
6796 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6797 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6801 else if (optimize >= 1
6802 && modifier != EXPAND_CONST_ADDRESS
6803 && modifier != EXPAND_INITIALIZER
6804 && modifier != EXPAND_MEMORY
6805 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6806 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6807 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6808 && targetm.binds_local_p (array))
6810 if (TREE_CODE (index) == INTEGER_CST)
6812 tree init = DECL_INITIAL (array);
6814 if (TREE_CODE (init) == CONSTRUCTOR)
6818 for (elem = CONSTRUCTOR_ELTS (init);
6820 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6821 elem = TREE_CHAIN (elem))
6824 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6825 return expand_expr (fold (TREE_VALUE (elem)), target,
6828 else if (TREE_CODE (init) == STRING_CST
6829 && 0 > compare_tree_int (index,
6830 TREE_STRING_LENGTH (init)))
6832 tree type = TREE_TYPE (TREE_TYPE (init));
6833 enum machine_mode mode = TYPE_MODE (type);
6835 if (GET_MODE_CLASS (mode) == MODE_INT
6836 && GET_MODE_SIZE (mode) == 1)
6837 return gen_int_mode (TREE_STRING_POINTER (init)
6838 [TREE_INT_CST_LOW (index)], mode);
6843 goto normal_inner_ref;
6846 /* If the operand is a CONSTRUCTOR, we can just extract the
6847 appropriate field if it is present. */
6848 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6852 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6853 elt = TREE_CHAIN (elt))
6854 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6855 /* We can normally use the value of the field in the
6856 CONSTRUCTOR. However, if this is a bitfield in
6857 an integral mode that we can fit in a HOST_WIDE_INT,
6858 we must mask only the number of bits in the bitfield,
6859 since this is done implicitly by the constructor. If
6860 the bitfield does not meet either of those conditions,
6861 we can't do this optimization. */
6862 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6863 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6865 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6866 <= HOST_BITS_PER_WIDE_INT))))
6868 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6869 && modifier == EXPAND_STACK_PARM)
6871 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6872 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6874 HOST_WIDE_INT bitsize
6875 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6876 enum machine_mode imode
6877 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6879 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6881 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6882 op0 = expand_and (imode, op0, op1, target);
6887 = build_int_cst (NULL_TREE,
6888 GET_MODE_BITSIZE (imode) - bitsize);
6890 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6892 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6900 goto normal_inner_ref;
6903 case ARRAY_RANGE_REF:
6906 enum machine_mode mode1;
6907 HOST_WIDE_INT bitsize, bitpos;
6910 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6911 &mode1, &unsignedp, &volatilep);
6914 /* If we got back the original object, something is wrong. Perhaps
6915 we are evaluating an expression too early. In any event, don't
6916 infinitely recurse. */
6917 gcc_assert (tem != exp);
6919 /* If TEM's type is a union of variable size, pass TARGET to the inner
6920 computation, since it will need a temporary and TARGET is known
6921 to have to do. This occurs in unchecked conversion in Ada. */
6925 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6926 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6928 && modifier != EXPAND_STACK_PARM
6929 ? target : NULL_RTX),
6931 (modifier == EXPAND_INITIALIZER
6932 || modifier == EXPAND_CONST_ADDRESS
6933 || modifier == EXPAND_STACK_PARM)
6934 ? modifier : EXPAND_NORMAL);
6936 /* If this is a constant, put it into a register if it is a
6937 legitimate constant and OFFSET is 0 and memory if it isn't. */
6938 if (CONSTANT_P (op0))
6940 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6941 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6943 op0 = force_reg (mode, op0);
6945 op0 = validize_mem (force_const_mem (mode, op0));
6948 /* Otherwise, if this object not in memory and we either have an
6949 offset or a BLKmode result, put it there. This case can't occur in
6950 C, but can in Ada if we have unchecked conversion of an expression
6951 from a scalar type to an array or record type or for an
6952 ARRAY_RANGE_REF whose type is BLKmode. */
6953 else if (!MEM_P (op0)
6955 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6957 tree nt = build_qualified_type (TREE_TYPE (tem),
6958 (TYPE_QUALS (TREE_TYPE (tem))
6959 | TYPE_QUAL_CONST));
6960 rtx memloc = assign_temp (nt, 1, 1, 1);
6962 emit_move_insn (memloc, op0);
6968 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6971 gcc_assert (MEM_P (op0));
6973 #ifdef POINTERS_EXTEND_UNSIGNED
6974 if (GET_MODE (offset_rtx) != Pmode)
6975 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6977 if (GET_MODE (offset_rtx) != ptr_mode)
6978 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6981 if (GET_MODE (op0) == BLKmode
6982 /* A constant address in OP0 can have VOIDmode, we must
6983 not try to call force_reg in that case. */
6984 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6986 && (bitpos % bitsize) == 0
6987 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6988 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6990 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6994 op0 = offset_address (op0, offset_rtx,
6995 highest_pow2_factor (offset));
6998 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6999 record its alignment as BIGGEST_ALIGNMENT. */
7000 if (MEM_P (op0) && bitpos == 0 && offset != 0
7001 && is_aligning_offset (offset, tem))
7002 set_mem_align (op0, BIGGEST_ALIGNMENT);
7004 /* Don't forget about volatility even if this is a bitfield. */
7005 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7007 if (op0 == orig_op0)
7008 op0 = copy_rtx (op0);
7010 MEM_VOLATILE_P (op0) = 1;
7013 /* The following code doesn't handle CONCAT.
7014 Assume only bitpos == 0 can be used for CONCAT, due to
7015 one element arrays having the same mode as its element. */
7016 if (GET_CODE (op0) == CONCAT)
7018 gcc_assert (bitpos == 0
7019 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7023 /* In cases where an aligned union has an unaligned object
7024 as a field, we might be extracting a BLKmode value from
7025 an integer-mode (e.g., SImode) object. Handle this case
7026 by doing the extract into an object as wide as the field
7027 (which we know to be the width of a basic mode), then
7028 storing into memory, and changing the mode to BLKmode. */
7029 if (mode1 == VOIDmode
7030 || REG_P (op0) || GET_CODE (op0) == SUBREG
7031 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7032 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7033 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7034 && modifier != EXPAND_CONST_ADDRESS
7035 && modifier != EXPAND_INITIALIZER)
7036 /* If the field isn't aligned enough to fetch as a memref,
7037 fetch it as a bit field. */
7038 || (mode1 != BLKmode
7039 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7040 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7042 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7043 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7044 && ((modifier == EXPAND_CONST_ADDRESS
7045 || modifier == EXPAND_INITIALIZER)
7047 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7048 || (bitpos % BITS_PER_UNIT != 0)))
7049 /* If the type and the field are a constant size and the
7050 size of the type isn't the same size as the bitfield,
7051 we must use bitfield operations. */
7053 && TYPE_SIZE (TREE_TYPE (exp))
7054 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7055 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7058 enum machine_mode ext_mode = mode;
7060 if (ext_mode == BLKmode
7061 && ! (target != 0 && MEM_P (op0)
7063 && bitpos % BITS_PER_UNIT == 0))
7064 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7066 if (ext_mode == BLKmode)
7069 target = assign_temp (type, 0, 1, 1);
7074 /* In this case, BITPOS must start at a byte boundary and
7075 TARGET, if specified, must be a MEM. */
7076 gcc_assert (MEM_P (op0)
7077 && (!target || MEM_P (target))
7078 && !(bitpos % BITS_PER_UNIT));
7080 emit_block_move (target,
7081 adjust_address (op0, VOIDmode,
7082 bitpos / BITS_PER_UNIT),
7083 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7085 (modifier == EXPAND_STACK_PARM
7086 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7091 op0 = validize_mem (op0);
7093 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7094 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7096 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7097 (modifier == EXPAND_STACK_PARM
7098 ? NULL_RTX : target),
7099 ext_mode, ext_mode);
7101 /* If the result is a record type and BITSIZE is narrower than
7102 the mode of OP0, an integral mode, and this is a big endian
7103 machine, we must put the field into the high-order bits. */
7104 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7105 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7106 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7107 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7108 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7112 /* If the result type is BLKmode, store the data into a temporary
7113 of the appropriate type, but with the mode corresponding to the
7114 mode for the data we have (op0's mode). It's tempting to make
7115 this a constant type, since we know it's only being stored once,
7116 but that can cause problems if we are taking the address of this
7117 COMPONENT_REF because the MEM of any reference via that address
7118 will have flags corresponding to the type, which will not
7119 necessarily be constant. */
7120 if (mode == BLKmode)
7123 = assign_stack_temp_for_type
7124 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7126 emit_move_insn (new, op0);
7127 op0 = copy_rtx (new);
7128 PUT_MODE (op0, BLKmode);
7129 set_mem_attributes (op0, exp, 1);
7135 /* If the result is BLKmode, use that to access the object
7137 if (mode == BLKmode)
7140 /* Get a reference to just this component. */
7141 if (modifier == EXPAND_CONST_ADDRESS
7142 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7143 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7145 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7147 if (op0 == orig_op0)
7148 op0 = copy_rtx (op0);
7150 set_mem_attributes (op0, exp, 0);
7151 if (REG_P (XEXP (op0, 0)))
7152 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7154 MEM_VOLATILE_P (op0) |= volatilep;
7155 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7156 || modifier == EXPAND_CONST_ADDRESS
7157 || modifier == EXPAND_INITIALIZER)
7159 else if (target == 0)
7160 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7162 convert_move (target, op0, unsignedp);
7167 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7170 /* Check for a built-in function. */
7171 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7172 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7174 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7176 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7177 == BUILT_IN_FRONTEND)
7178 return lang_hooks.expand_expr (exp, original_target,
7182 return expand_builtin (exp, target, subtarget, tmode, ignore);
7185 return expand_call (exp, target, ignore);
7187 case NON_LVALUE_EXPR:
7190 if (TREE_OPERAND (exp, 0) == error_mark_node)
7193 if (TREE_CODE (type) == UNION_TYPE)
7195 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7197 /* If both input and output are BLKmode, this conversion isn't doing
7198 anything except possibly changing memory attribute. */
7199 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7201 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7204 result = copy_rtx (result);
7205 set_mem_attributes (result, exp, 0);
7211 if (TYPE_MODE (type) != BLKmode)
7212 target = gen_reg_rtx (TYPE_MODE (type));
7214 target = assign_temp (type, 0, 1, 1);
7218 /* Store data into beginning of memory target. */
7219 store_expr (TREE_OPERAND (exp, 0),
7220 adjust_address (target, TYPE_MODE (valtype), 0),
7221 modifier == EXPAND_STACK_PARM);
7225 gcc_assert (REG_P (target));
7227 /* Store this field into a union of the proper type. */
7228 store_field (target,
7229 MIN ((int_size_in_bytes (TREE_TYPE
7230 (TREE_OPERAND (exp, 0)))
7232 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7233 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7237 /* Return the entire union. */
7241 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7243 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7246 /* If the signedness of the conversion differs and OP0 is
7247 a promoted SUBREG, clear that indication since we now
7248 have to do the proper extension. */
7249 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7250 && GET_CODE (op0) == SUBREG)
7251 SUBREG_PROMOTED_VAR_P (op0) = 0;
7253 return REDUCE_BIT_FIELD (op0);
7256 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7257 if (GET_MODE (op0) == mode)
7260 /* If OP0 is a constant, just convert it into the proper mode. */
7261 else if (CONSTANT_P (op0))
7263 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7264 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7266 if (modifier == EXPAND_INITIALIZER)
7267 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7268 subreg_lowpart_offset (mode,
7271 op0= convert_modes (mode, inner_mode, op0,
7272 TYPE_UNSIGNED (inner_type));
7275 else if (modifier == EXPAND_INITIALIZER)
7276 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7278 else if (target == 0)
7279 op0 = convert_to_mode (mode, op0,
7280 TYPE_UNSIGNED (TREE_TYPE
7281 (TREE_OPERAND (exp, 0))));
7284 convert_move (target, op0,
7285 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7289 return REDUCE_BIT_FIELD (op0);
7291 case VIEW_CONVERT_EXPR:
7292 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7294 /* If the input and output modes are both the same, we are done.
7295 Otherwise, if neither mode is BLKmode and both are integral and within
7296 a word, we can use gen_lowpart. If neither is true, make sure the
7297 operand is in memory and convert the MEM to the new mode. */
7298 if (TYPE_MODE (type) == GET_MODE (op0))
7300 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7301 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7302 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7303 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7304 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7305 op0 = gen_lowpart (TYPE_MODE (type), op0);
7306 else if (!MEM_P (op0))
7308 /* If the operand is not a MEM, force it into memory. Since we
7309 are going to be be changing the mode of the MEM, don't call
7310 force_const_mem for constants because we don't allow pool
7311 constants to change mode. */
7312 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7314 gcc_assert (!TREE_ADDRESSABLE (exp));
7316 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7318 = assign_stack_temp_for_type
7319 (TYPE_MODE (inner_type),
7320 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7322 emit_move_insn (target, op0);
7326 /* At this point, OP0 is in the correct mode. If the output type is such
7327 that the operand is known to be aligned, indicate that it is.
7328 Otherwise, we need only be concerned about alignment for non-BLKmode
7332 op0 = copy_rtx (op0);
7334 if (TYPE_ALIGN_OK (type))
7335 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7336 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7337 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7339 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7340 HOST_WIDE_INT temp_size
7341 = MAX (int_size_in_bytes (inner_type),
7342 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7343 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7344 temp_size, 0, type);
7345 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7347 gcc_assert (!TREE_ADDRESSABLE (exp));
7349 if (GET_MODE (op0) == BLKmode)
7350 emit_block_move (new_with_op0_mode, op0,
7351 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7352 (modifier == EXPAND_STACK_PARM
7353 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7355 emit_move_insn (new_with_op0_mode, op0);
7360 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7366 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7367 something else, make sure we add the register to the constant and
7368 then to the other thing. This case can occur during strength
7369 reduction and doing it this way will produce better code if the
7370 frame pointer or argument pointer is eliminated.
7372 fold-const.c will ensure that the constant is always in the inner
7373 PLUS_EXPR, so the only case we need to do anything about is if
7374 sp, ap, or fp is our second argument, in which case we must swap
7375 the innermost first argument and our second argument. */
7377 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7378 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7379 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7380 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7381 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7382 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7384 tree t = TREE_OPERAND (exp, 1);
7386 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7387 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7390 /* If the result is to be ptr_mode and we are adding an integer to
7391 something, we might be forming a constant. So try to use
7392 plus_constant. If it produces a sum and we can't accept it,
7393 use force_operand. This allows P = &ARR[const] to generate
7394 efficient code on machines where a SYMBOL_REF is not a valid
7397 If this is an EXPAND_SUM call, always return the sum. */
7398 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7399 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7401 if (modifier == EXPAND_STACK_PARM)
7403 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7404 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7405 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7409 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7411 /* Use immed_double_const to ensure that the constant is
7412 truncated according to the mode of OP1, then sign extended
7413 to a HOST_WIDE_INT. Using the constant directly can result
7414 in non-canonical RTL in a 64x32 cross compile. */
7416 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7418 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7419 op1 = plus_constant (op1, INTVAL (constant_part));
7420 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7421 op1 = force_operand (op1, target);
7422 return REDUCE_BIT_FIELD (op1);
7425 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7426 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7427 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7431 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7432 (modifier == EXPAND_INITIALIZER
7433 ? EXPAND_INITIALIZER : EXPAND_SUM));
7434 if (! CONSTANT_P (op0))
7436 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7437 VOIDmode, modifier);
7438 /* Return a PLUS if modifier says it's OK. */
7439 if (modifier == EXPAND_SUM
7440 || modifier == EXPAND_INITIALIZER)
7441 return simplify_gen_binary (PLUS, mode, op0, op1);
7444 /* Use immed_double_const to ensure that the constant is
7445 truncated according to the mode of OP1, then sign extended
7446 to a HOST_WIDE_INT. Using the constant directly can result
7447 in non-canonical RTL in a 64x32 cross compile. */
7449 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7451 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7452 op0 = plus_constant (op0, INTVAL (constant_part));
7453 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7454 op0 = force_operand (op0, target);
7455 return REDUCE_BIT_FIELD (op0);
7459 /* No sense saving up arithmetic to be done
7460 if it's all in the wrong mode to form part of an address.
7461 And force_operand won't know whether to sign-extend or
7463 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7464 || mode != ptr_mode)
7466 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7467 subtarget, &op0, &op1, 0);
7468 if (op0 == const0_rtx)
7470 if (op1 == const0_rtx)
7475 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7476 subtarget, &op0, &op1, modifier);
7477 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7480 /* For initializers, we are allowed to return a MINUS of two
7481 symbolic constants. Here we handle all cases when both operands
7483 /* Handle difference of two symbolic constants,
7484 for the sake of an initializer. */
7485 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7486 && really_constant_p (TREE_OPERAND (exp, 0))
7487 && really_constant_p (TREE_OPERAND (exp, 1)))
7489 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7490 NULL_RTX, &op0, &op1, modifier);
7492 /* If the last operand is a CONST_INT, use plus_constant of
7493 the negated constant. Else make the MINUS. */
7494 if (GET_CODE (op1) == CONST_INT)
7495 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7497 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7500 /* No sense saving up arithmetic to be done
7501 if it's all in the wrong mode to form part of an address.
7502 And force_operand won't know whether to sign-extend or
7504 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7505 || mode != ptr_mode)
7508 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7509 subtarget, &op0, &op1, modifier);
7511 /* Convert A - const to A + (-const). */
7512 if (GET_CODE (op1) == CONST_INT)
7514 op1 = negate_rtx (mode, op1);
7515 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7521 /* If first operand is constant, swap them.
7522 Thus the following special case checks need only
7523 check the second operand. */
7524 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7526 tree t1 = TREE_OPERAND (exp, 0);
7527 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7528 TREE_OPERAND (exp, 1) = t1;
7531 /* Attempt to return something suitable for generating an
7532 indexed address, for machines that support that. */
7534 if (modifier == EXPAND_SUM && mode == ptr_mode
7535 && host_integerp (TREE_OPERAND (exp, 1), 0))
7537 tree exp1 = TREE_OPERAND (exp, 1);
7539 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7543 op0 = force_operand (op0, NULL_RTX);
7545 op0 = copy_to_mode_reg (mode, op0);
7547 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7548 gen_int_mode (tree_low_cst (exp1, 0),
7549 TYPE_MODE (TREE_TYPE (exp1)))));
7552 if (modifier == EXPAND_STACK_PARM)
7555 /* Check for multiplying things that have been extended
7556 from a narrower type. If this machine supports multiplying
7557 in that narrower type with a result in the desired type,
7558 do it that way, and avoid the explicit type-conversion. */
7559 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7560 && TREE_CODE (type) == INTEGER_TYPE
7561 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7562 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7563 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7564 && int_fits_type_p (TREE_OPERAND (exp, 1),
7565 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7566 /* Don't use a widening multiply if a shift will do. */
7567 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7568 > HOST_BITS_PER_WIDE_INT)
7569 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7571 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7572 && (TYPE_PRECISION (TREE_TYPE
7573 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7574 == TYPE_PRECISION (TREE_TYPE
7576 (TREE_OPERAND (exp, 0), 0))))
7577 /* If both operands are extended, they must either both
7578 be zero-extended or both be sign-extended. */
7579 && (TYPE_UNSIGNED (TREE_TYPE
7580 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7581 == TYPE_UNSIGNED (TREE_TYPE
7583 (TREE_OPERAND (exp, 0), 0)))))))
7585 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7586 enum machine_mode innermode = TYPE_MODE (op0type);
7587 bool zextend_p = TYPE_UNSIGNED (op0type);
7588 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7589 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7591 if (mode == GET_MODE_WIDER_MODE (innermode))
7593 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7595 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7596 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7597 TREE_OPERAND (exp, 1),
7598 NULL_RTX, &op0, &op1, 0);
7600 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7601 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7602 NULL_RTX, &op0, &op1, 0);
7605 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7606 && innermode == word_mode)
7609 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7610 NULL_RTX, VOIDmode, 0);
7611 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7612 op1 = convert_modes (innermode, mode,
7613 expand_expr (TREE_OPERAND (exp, 1),
7614 NULL_RTX, VOIDmode, 0),
7617 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7618 NULL_RTX, VOIDmode, 0);
7619 temp = expand_binop (mode, other_optab, op0, op1, target,
7620 unsignedp, OPTAB_LIB_WIDEN);
7621 hipart = gen_highpart (innermode, temp);
7622 htem = expand_mult_highpart_adjust (innermode, hipart,
7626 emit_move_insn (hipart, htem);
7627 return REDUCE_BIT_FIELD (temp);
7631 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7632 subtarget, &op0, &op1, 0);
7633 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7635 case TRUNC_DIV_EXPR:
7636 case FLOOR_DIV_EXPR:
7638 case ROUND_DIV_EXPR:
7639 case EXACT_DIV_EXPR:
7640 if (modifier == EXPAND_STACK_PARM)
7642 /* Possible optimization: compute the dividend with EXPAND_SUM
7643 then if the divisor is constant can optimize the case
7644 where some terms of the dividend have coeffs divisible by it. */
7645 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7646 subtarget, &op0, &op1, 0);
7647 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7650 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7651 expensive divide. If not, combine will rebuild the original
7653 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7654 && TREE_CODE (type) == REAL_TYPE
7655 && !real_onep (TREE_OPERAND (exp, 0)))
7656 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7657 build2 (RDIV_EXPR, type,
7658 build_real (type, dconst1),
7659 TREE_OPERAND (exp, 1))),
7660 target, tmode, modifier);
7664 case TRUNC_MOD_EXPR:
7665 case FLOOR_MOD_EXPR:
7667 case ROUND_MOD_EXPR:
7668 if (modifier == EXPAND_STACK_PARM)
7670 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7671 subtarget, &op0, &op1, 0);
7672 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7674 case FIX_ROUND_EXPR:
7675 case FIX_FLOOR_EXPR:
7677 gcc_unreachable (); /* Not used for C. */
7679 case FIX_TRUNC_EXPR:
7680 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7681 if (target == 0 || modifier == EXPAND_STACK_PARM)
7682 target = gen_reg_rtx (mode);
7683 expand_fix (target, op0, unsignedp);
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7688 if (target == 0 || modifier == EXPAND_STACK_PARM)
7689 target = gen_reg_rtx (mode);
7690 /* expand_float can't figure out what to do if FROM has VOIDmode.
7691 So give it the correct mode. With -O, cse will optimize this. */
7692 if (GET_MODE (op0) == VOIDmode)
7693 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7695 expand_float (target, op0,
7696 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7700 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7701 if (modifier == EXPAND_STACK_PARM)
7703 temp = expand_unop (mode,
7704 optab_for_tree_code (NEGATE_EXPR, type),
7707 return REDUCE_BIT_FIELD (temp);
7710 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7711 if (modifier == EXPAND_STACK_PARM)
7714 /* ABS_EXPR is not valid for complex arguments. */
7715 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7716 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7718 /* Unsigned abs is simply the operand. Testing here means we don't
7719 risk generating incorrect code below. */
7720 if (TYPE_UNSIGNED (type))
7723 return expand_abs (mode, op0, target, unsignedp,
7724 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7728 target = original_target;
7730 || modifier == EXPAND_STACK_PARM
7731 || (MEM_P (target) && MEM_VOLATILE_P (target))
7732 || GET_MODE (target) != mode
7734 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7735 target = gen_reg_rtx (mode);
7736 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7737 target, &op0, &op1, 0);
7739 /* First try to do it with a special MIN or MAX instruction.
7740 If that does not win, use a conditional jump to select the proper
7742 this_optab = optab_for_tree_code (code, type);
7743 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7748 /* At this point, a MEM target is no longer useful; we will get better
7751 if (! REG_P (target))
7752 target = gen_reg_rtx (mode);
7754 /* If op1 was placed in target, swap op0 and op1. */
7755 if (target != op0 && target == op1)
7762 /* We generate better code and avoid problems with op1 mentioning
7763 target by forcing op1 into a pseudo if it isn't a constant. */
7764 if (! CONSTANT_P (op1))
7765 op1 = force_reg (mode, op1);
7768 emit_move_insn (target, op0);
7770 op0 = gen_label_rtx ();
7772 /* If this mode is an integer too wide to compare properly,
7773 compare word by word. Rely on cse to optimize constant cases. */
7774 if (GET_MODE_CLASS (mode) == MODE_INT
7775 && ! can_compare_p (GE, mode, ccp_jump))
7777 if (code == MAX_EXPR)
7778 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7781 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7786 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7787 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7789 emit_move_insn (target, op1);
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7795 if (modifier == EXPAND_STACK_PARM)
7797 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7801 /* ??? Can optimize bitwise operations with one arg constant.
7802 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7803 and (a bitwise1 b) bitwise2 b (etc)
7804 but that is probably not worth while. */
7806 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7807 boolean values when we want in all cases to compute both of them. In
7808 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7809 as actual zero-or-1 values and then bitwise anding. In cases where
7810 there cannot be any side effects, better code would be made by
7811 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7812 how to recognize those cases. */
7814 case TRUTH_AND_EXPR:
7815 code = BIT_AND_EXPR;
7820 code = BIT_IOR_EXPR;
7824 case TRUTH_XOR_EXPR:
7825 code = BIT_XOR_EXPR;
7833 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7835 if (modifier == EXPAND_STACK_PARM)
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7838 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7841 /* Could determine the answer when only additive constants differ. Also,
7842 the addition of one can be handled by changing the condition. */
7849 case UNORDERED_EXPR:
7857 temp = do_store_flag (exp,
7858 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7859 tmode != VOIDmode ? tmode : mode, 0);
7863 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7864 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7866 && REG_P (original_target)
7867 && (GET_MODE (original_target)
7868 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7870 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7873 /* If temp is constant, we can just compute the result. */
7874 if (GET_CODE (temp) == CONST_INT)
7876 if (INTVAL (temp) != 0)
7877 emit_move_insn (target, const1_rtx);
7879 emit_move_insn (target, const0_rtx);
7884 if (temp != original_target)
7886 enum machine_mode mode1 = GET_MODE (temp);
7887 if (mode1 == VOIDmode)
7888 mode1 = tmode != VOIDmode ? tmode : mode;
7890 temp = copy_to_mode_reg (mode1, temp);
7893 op1 = gen_label_rtx ();
7894 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7895 GET_MODE (temp), unsignedp, op1);
7896 emit_move_insn (temp, const1_rtx);
7901 /* If no set-flag instruction, must generate a conditional store
7902 into a temporary variable. Drop through and handle this
7907 || modifier == EXPAND_STACK_PARM
7908 || ! safe_from_p (target, exp, 1)
7909 /* Make sure we don't have a hard reg (such as function's return
7910 value) live across basic blocks, if not optimizing. */
7911 || (!optimize && REG_P (target)
7912 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7913 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7916 emit_move_insn (target, const0_rtx);
7918 op1 = gen_label_rtx ();
7919 jumpifnot (exp, op1);
7922 emit_move_insn (target, const1_rtx);
7925 return ignore ? const0_rtx : target;
7927 case TRUTH_NOT_EXPR:
7928 if (modifier == EXPAND_STACK_PARM)
7930 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7931 /* The parser is careful to generate TRUTH_NOT_EXPR
7932 only with operands that are always zero or one. */
7933 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7934 target, 1, OPTAB_LIB_WIDEN);
7938 case STATEMENT_LIST:
7940 tree_stmt_iterator iter;
7942 gcc_assert (ignore);
7944 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7945 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7950 /* A COND_EXPR with its type being VOID_TYPE represents a
7951 conditional jump and is handled in
7952 expand_gimple_cond_expr. */
7953 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
7955 /* Note that COND_EXPRs whose type is a structure or union
7956 are required to be constructed to contain assignments of
7957 a temporary variable, so that we can evaluate them here
7958 for side effect only. If type is void, we must do likewise. */
7960 gcc_assert (!TREE_ADDRESSABLE (type)
7962 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7963 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7965 /* If we are not to produce a result, we have no target. Otherwise,
7966 if a target was specified use it; it will not be used as an
7967 intermediate target unless it is safe. If no target, use a
7970 if (modifier != EXPAND_STACK_PARM
7972 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7973 && GET_MODE (original_target) == mode
7974 #ifdef HAVE_conditional_move
7975 && (! can_conditionally_move_p (mode)
7976 || REG_P (original_target))
7978 && !MEM_P (original_target))
7979 temp = original_target;
7981 temp = assign_temp (type, 0, 0, 1);
7983 do_pending_stack_adjust ();
7985 op0 = gen_label_rtx ();
7986 op1 = gen_label_rtx ();
7987 jumpifnot (TREE_OPERAND (exp, 0), op0);
7988 store_expr (TREE_OPERAND (exp, 1), temp,
7989 modifier == EXPAND_STACK_PARM);
7991 emit_jump_insn (gen_jump (op1));
7994 store_expr (TREE_OPERAND (exp, 2), temp,
7995 modifier == EXPAND_STACK_PARM);
8002 target = expand_vec_cond_expr (exp, target);
8007 tree lhs = TREE_OPERAND (exp, 0);
8008 tree rhs = TREE_OPERAND (exp, 1);
8010 gcc_assert (ignore);
8012 /* Check for |= or &= of a bitfield of size one into another bitfield
8013 of size 1. In this case, (unless we need the result of the
8014 assignment) we can do this more efficiently with a
8015 test followed by an assignment, if necessary.
8017 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8018 things change so we do, this code should be enhanced to
8020 if (TREE_CODE (lhs) == COMPONENT_REF
8021 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8022 || TREE_CODE (rhs) == BIT_AND_EXPR)
8023 && TREE_OPERAND (rhs, 0) == lhs
8024 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8025 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8026 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8028 rtx label = gen_label_rtx ();
8030 do_jump (TREE_OPERAND (rhs, 1),
8031 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8032 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8033 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8034 (TREE_CODE (rhs) == BIT_IOR_EXPR
8036 : integer_zero_node)));
8037 do_pending_stack_adjust ();
8042 expand_assignment (lhs, rhs);
8048 if (!TREE_OPERAND (exp, 0))
8049 expand_null_return ();
8051 expand_return (TREE_OPERAND (exp, 0));
8055 return expand_expr_addr_expr (exp, target, tmode, modifier);
8058 /* Get the rtx code of the operands. */
8059 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8060 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8063 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8065 /* Move the real (op0) and imaginary (op1) parts to their location. */
8066 write_complex_part (target, op0, false);
8067 write_complex_part (target, op1, true);
8072 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8073 return read_complex_part (op0, false);
8076 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8077 return read_complex_part (op0, true);
8080 expand_resx_expr (exp);
8083 case TRY_CATCH_EXPR:
8085 case EH_FILTER_EXPR:
8086 case TRY_FINALLY_EXPR:
8087 /* Lowered by tree-eh.c. */
8090 case WITH_CLEANUP_EXPR:
8091 case CLEANUP_POINT_EXPR:
8093 case CASE_LABEL_EXPR:
8099 case PREINCREMENT_EXPR:
8100 case PREDECREMENT_EXPR:
8101 case POSTINCREMENT_EXPR:
8102 case POSTDECREMENT_EXPR:
8105 case TRUTH_ANDIF_EXPR:
8106 case TRUTH_ORIF_EXPR:
8107 /* Lowered by gimplify.c. */
8111 return get_exception_pointer (cfun);
8114 return get_exception_filter (cfun);
8117 /* Function descriptors are not valid except for as
8118 initialization constants, and should not be expanded. */
8126 expand_label (TREE_OPERAND (exp, 0));
8130 expand_asm_expr (exp);
8133 case WITH_SIZE_EXPR:
8134 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8135 have pulled out the size to use in whatever context it needed. */
8136 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8139 case REALIGN_LOAD_EXPR:
8141 tree oprnd0 = TREE_OPERAND (exp, 0);
8142 tree oprnd1 = TREE_OPERAND (exp, 1);
8143 tree oprnd2 = TREE_OPERAND (exp, 2);
8146 this_optab = optab_for_tree_code (code, type);
8147 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8148 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8149 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8158 return lang_hooks.expand_expr (exp, original_target, tmode,
8162 /* Here to do an ordinary binary operator. */
8164 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8165 subtarget, &op0, &op1, 0);
8167 this_optab = optab_for_tree_code (code, type);
8169 if (modifier == EXPAND_STACK_PARM)
8171 temp = expand_binop (mode, this_optab, op0, op1, target,
8172 unsignedp, OPTAB_LIB_WIDEN);
8174 return REDUCE_BIT_FIELD (temp);
8176 #undef REDUCE_BIT_FIELD
8178 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8179 signedness of TYPE), possibly returning the result in TARGET. */
8181 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8183 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8184 if (target && GET_MODE (target) != GET_MODE (exp))
8186 if (TYPE_UNSIGNED (type))
8189 if (prec < HOST_BITS_PER_WIDE_INT)
8190 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8193 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8194 ((unsigned HOST_WIDE_INT) 1
8195 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8197 return expand_and (GET_MODE (exp), exp, mask, target);
8201 tree count = build_int_cst (NULL_TREE,
8202 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8203 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8204 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8208 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8209 when applied to the address of EXP produces an address known to be
8210 aligned more than BIGGEST_ALIGNMENT. */
8213 is_aligning_offset (tree offset, tree exp)
8215 /* Strip off any conversions. */
8216 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8217 || TREE_CODE (offset) == NOP_EXPR
8218 || TREE_CODE (offset) == CONVERT_EXPR)
8219 offset = TREE_OPERAND (offset, 0);
8221 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8222 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8223 if (TREE_CODE (offset) != BIT_AND_EXPR
8224 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8225 || compare_tree_int (TREE_OPERAND (offset, 1),
8226 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8227 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8230 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8231 It must be NEGATE_EXPR. Then strip any more conversions. */
8232 offset = TREE_OPERAND (offset, 0);
8233 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8234 || TREE_CODE (offset) == NOP_EXPR
8235 || TREE_CODE (offset) == CONVERT_EXPR)
8236 offset = TREE_OPERAND (offset, 0);
8238 if (TREE_CODE (offset) != NEGATE_EXPR)
8241 offset = TREE_OPERAND (offset, 0);
8242 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8243 || TREE_CODE (offset) == NOP_EXPR
8244 || TREE_CODE (offset) == CONVERT_EXPR)
8245 offset = TREE_OPERAND (offset, 0);
8247 /* This must now be the address of EXP. */
8248 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8251 /* Return the tree node if an ARG corresponds to a string constant or zero
8252 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8253 in bytes within the string that ARG is accessing. The type of the
8254 offset will be `sizetype'. */
8257 string_constant (tree arg, tree *ptr_offset)
8262 if (TREE_CODE (arg) == ADDR_EXPR)
8264 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8266 *ptr_offset = size_zero_node;
8267 return TREE_OPERAND (arg, 0);
8269 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8271 array = TREE_OPERAND (arg, 0);
8272 offset = size_zero_node;
8274 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8276 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8277 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8278 if (TREE_CODE (array) != STRING_CST
8279 && TREE_CODE (array) != VAR_DECL)
8285 else if (TREE_CODE (arg) == PLUS_EXPR)
8287 tree arg0 = TREE_OPERAND (arg, 0);
8288 tree arg1 = TREE_OPERAND (arg, 1);
8293 if (TREE_CODE (arg0) == ADDR_EXPR
8294 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8295 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8297 array = TREE_OPERAND (arg0, 0);
8300 else if (TREE_CODE (arg1) == ADDR_EXPR
8301 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8302 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8304 array = TREE_OPERAND (arg1, 0);
8313 if (TREE_CODE (array) == STRING_CST)
8315 *ptr_offset = convert (sizetype, offset);
8318 else if (TREE_CODE (array) == VAR_DECL)
8322 /* Variables initialized to string literals can be handled too. */
8323 if (DECL_INITIAL (array) == NULL_TREE
8324 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8327 /* If they are read-only, non-volatile and bind locally. */
8328 if (! TREE_READONLY (array)
8329 || TREE_SIDE_EFFECTS (array)
8330 || ! targetm.binds_local_p (array))
8333 /* Avoid const char foo[4] = "abcde"; */
8334 if (DECL_SIZE_UNIT (array) == NULL_TREE
8335 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8336 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8337 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8340 /* If variable is bigger than the string literal, OFFSET must be constant
8341 and inside of the bounds of the string literal. */
8342 offset = convert (sizetype, offset);
8343 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8344 && (! host_integerp (offset, 1)
8345 || compare_tree_int (offset, length) >= 0))
8348 *ptr_offset = offset;
8349 return DECL_INITIAL (array);
8355 /* Generate code to calculate EXP using a store-flag instruction
8356 and return an rtx for the result. EXP is either a comparison
8357 or a TRUTH_NOT_EXPR whose operand is a comparison.
8359 If TARGET is nonzero, store the result there if convenient.
8361 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8364 Return zero if there is no suitable set-flag instruction
8365 available on this machine.
8367 Once expand_expr has been called on the arguments of the comparison,
8368 we are committed to doing the store flag, since it is not safe to
8369 re-evaluate the expression. We emit the store-flag insn by calling
8370 emit_store_flag, but only expand the arguments if we have a reason
8371 to believe that emit_store_flag will be successful. If we think that
8372 it will, but it isn't, we have to simulate the store-flag with a
8373 set/jump/set sequence. */
8376 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8379 tree arg0, arg1, type;
8381 enum machine_mode operand_mode;
8385 enum insn_code icode;
8386 rtx subtarget = target;
8389 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8390 result at the end. We can't simply invert the test since it would
8391 have already been inverted if it were valid. This case occurs for
8392 some floating-point comparisons. */
8394 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8395 invert = 1, exp = TREE_OPERAND (exp, 0);
8397 arg0 = TREE_OPERAND (exp, 0);
8398 arg1 = TREE_OPERAND (exp, 1);
8400 /* Don't crash if the comparison was erroneous. */
8401 if (arg0 == error_mark_node || arg1 == error_mark_node)
8404 type = TREE_TYPE (arg0);
8405 operand_mode = TYPE_MODE (type);
8406 unsignedp = TYPE_UNSIGNED (type);
8408 /* We won't bother with BLKmode store-flag operations because it would mean
8409 passing a lot of information to emit_store_flag. */
8410 if (operand_mode == BLKmode)
8413 /* We won't bother with store-flag operations involving function pointers
8414 when function pointers must be canonicalized before comparisons. */
8415 #ifdef HAVE_canonicalize_funcptr_for_compare
8416 if (HAVE_canonicalize_funcptr_for_compare
8417 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8418 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8420 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8421 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8422 == FUNCTION_TYPE))))
8429 /* Get the rtx comparison code to use. We know that EXP is a comparison
8430 operation of some type. Some comparisons against 1 and -1 can be
8431 converted to comparisons with zero. Do so here so that the tests
8432 below will be aware that we have a comparison with zero. These
8433 tests will not catch constants in the first operand, but constants
8434 are rarely passed as the first operand. */
8436 switch (TREE_CODE (exp))
8445 if (integer_onep (arg1))
8446 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8448 code = unsignedp ? LTU : LT;
8451 if (! unsignedp && integer_all_onesp (arg1))
8452 arg1 = integer_zero_node, code = LT;
8454 code = unsignedp ? LEU : LE;
8457 if (! unsignedp && integer_all_onesp (arg1))
8458 arg1 = integer_zero_node, code = GE;
8460 code = unsignedp ? GTU : GT;
8463 if (integer_onep (arg1))
8464 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8466 code = unsignedp ? GEU : GE;
8469 case UNORDERED_EXPR:
8498 /* Put a constant second. */
8499 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8501 tem = arg0; arg0 = arg1; arg1 = tem;
8502 code = swap_condition (code);
8505 /* If this is an equality or inequality test of a single bit, we can
8506 do this by shifting the bit being tested to the low-order bit and
8507 masking the result with the constant 1. If the condition was EQ,
8508 we xor it with 1. This does not require an scc insn and is faster
8509 than an scc insn even if we have it.
8511 The code to make this transformation was moved into fold_single_bit_test,
8512 so we just call into the folder and expand its result. */
8514 if ((code == NE || code == EQ)
8515 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8516 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8518 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8519 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8521 target, VOIDmode, EXPAND_NORMAL);
8524 /* Now see if we are likely to be able to do this. Return if not. */
8525 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8528 icode = setcc_gen_code[(int) code];
8529 if (icode == CODE_FOR_nothing
8530 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8532 /* We can only do this if it is one of the special cases that
8533 can be handled without an scc insn. */
8534 if ((code == LT && integer_zerop (arg1))
8535 || (! only_cheap && code == GE && integer_zerop (arg1)))
8537 else if (BRANCH_COST >= 0
8538 && ! only_cheap && (code == NE || code == EQ)
8539 && TREE_CODE (type) != REAL_TYPE
8540 && ((abs_optab->handlers[(int) operand_mode].insn_code
8541 != CODE_FOR_nothing)
8542 || (ffs_optab->handlers[(int) operand_mode].insn_code
8543 != CODE_FOR_nothing)))
8549 if (! get_subtarget (target)
8550 || GET_MODE (subtarget) != operand_mode)
8553 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8556 target = gen_reg_rtx (mode);
8558 result = emit_store_flag (target, code, op0, op1,
8559 operand_mode, unsignedp, 1);
8564 result = expand_binop (mode, xor_optab, result, const1_rtx,
8565 result, 0, OPTAB_LIB_WIDEN);
8569 /* If this failed, we have to do this with set/compare/jump/set code. */
8571 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8572 target = gen_reg_rtx (GET_MODE (target));
8574 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8575 result = compare_from_rtx (op0, op1, code, unsignedp,
8576 operand_mode, NULL_RTX);
8577 if (GET_CODE (result) == CONST_INT)
8578 return (((result == const0_rtx && ! invert)
8579 || (result != const0_rtx && invert))
8580 ? const0_rtx : const1_rtx);
8582 /* The code of RESULT may not match CODE if compare_from_rtx
8583 decided to swap its operands and reverse the original code.
8585 We know that compare_from_rtx returns either a CONST_INT or
8586 a new comparison code, so it is safe to just extract the
8587 code from RESULT. */
8588 code = GET_CODE (result);
8590 label = gen_label_rtx ();
8591 gcc_assert (bcc_gen_fctn[(int) code]);
8593 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8594 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8601 /* Stubs in case we haven't got a casesi insn. */
8603 # define HAVE_casesi 0
8604 # define gen_casesi(a, b, c, d, e) (0)
8605 # define CODE_FOR_casesi CODE_FOR_nothing
8608 /* If the machine does not have a case insn that compares the bounds,
8609 this means extra overhead for dispatch tables, which raises the
8610 threshold for using them. */
8611 #ifndef CASE_VALUES_THRESHOLD
8612 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8613 #endif /* CASE_VALUES_THRESHOLD */
8616 case_values_threshold (void)
8618 return CASE_VALUES_THRESHOLD;
8621 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8622 0 otherwise (i.e. if there is no casesi instruction). */
8624 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8625 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8627 enum machine_mode index_mode = SImode;
8628 int index_bits = GET_MODE_BITSIZE (index_mode);
8629 rtx op1, op2, index;
8630 enum machine_mode op_mode;
8635 /* Convert the index to SImode. */
8636 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8638 enum machine_mode omode = TYPE_MODE (index_type);
8639 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8641 /* We must handle the endpoints in the original mode. */
8642 index_expr = build2 (MINUS_EXPR, index_type,
8643 index_expr, minval);
8644 minval = integer_zero_node;
8645 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8646 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8647 omode, 1, default_label);
8648 /* Now we can safely truncate. */
8649 index = convert_to_mode (index_mode, index, 0);
8653 if (TYPE_MODE (index_type) != index_mode)
8655 index_expr = convert (lang_hooks.types.type_for_size
8656 (index_bits, 0), index_expr);
8657 index_type = TREE_TYPE (index_expr);
8660 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8663 do_pending_stack_adjust ();
8665 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8666 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8668 index = copy_to_mode_reg (op_mode, index);
8670 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8672 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8673 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8674 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8675 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8677 op1 = copy_to_mode_reg (op_mode, op1);
8679 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8681 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8682 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8683 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8684 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8686 op2 = copy_to_mode_reg (op_mode, op2);
8688 emit_jump_insn (gen_casesi (index, op1, op2,
8689 table_label, default_label));
8693 /* Attempt to generate a tablejump instruction; same concept. */
8694 #ifndef HAVE_tablejump
8695 #define HAVE_tablejump 0
8696 #define gen_tablejump(x, y) (0)
8699 /* Subroutine of the next function.
8701 INDEX is the value being switched on, with the lowest value
8702 in the table already subtracted.
8703 MODE is its expected mode (needed if INDEX is constant).
8704 RANGE is the length of the jump table.
8705 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8707 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8708 index value is out of range. */
8711 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8716 if (INTVAL (range) > cfun->max_jumptable_ents)
8717 cfun->max_jumptable_ents = INTVAL (range);
8719 /* Do an unsigned comparison (in the proper mode) between the index
8720 expression and the value which represents the length of the range.
8721 Since we just finished subtracting the lower bound of the range
8722 from the index expression, this comparison allows us to simultaneously
8723 check that the original index expression value is both greater than
8724 or equal to the minimum value of the range and less than or equal to
8725 the maximum value of the range. */
8727 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8730 /* If index is in range, it must fit in Pmode.
8731 Convert to Pmode so we can index with it. */
8733 index = convert_to_mode (Pmode, index, 1);
8735 /* Don't let a MEM slip through, because then INDEX that comes
8736 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8737 and break_out_memory_refs will go to work on it and mess it up. */
8738 #ifdef PIC_CASE_VECTOR_ADDRESS
8739 if (flag_pic && !REG_P (index))
8740 index = copy_to_mode_reg (Pmode, index);
8743 /* If flag_force_addr were to affect this address
8744 it could interfere with the tricky assumptions made
8745 about addresses that contain label-refs,
8746 which may be valid only very near the tablejump itself. */
8747 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8748 GET_MODE_SIZE, because this indicates how large insns are. The other
8749 uses should all be Pmode, because they are addresses. This code
8750 could fail if addresses and insns are not the same size. */
8751 index = gen_rtx_PLUS (Pmode,
8752 gen_rtx_MULT (Pmode, index,
8753 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8754 gen_rtx_LABEL_REF (Pmode, table_label));
8755 #ifdef PIC_CASE_VECTOR_ADDRESS
8757 index = PIC_CASE_VECTOR_ADDRESS (index);
8760 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8761 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8762 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8763 convert_move (temp, vector, 0);
8765 emit_jump_insn (gen_tablejump (temp, table_label));
8767 /* If we are generating PIC code or if the table is PC-relative, the
8768 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8769 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8774 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8775 rtx table_label, rtx default_label)
8779 if (! HAVE_tablejump)
8782 index_expr = fold (build2 (MINUS_EXPR, index_type,
8783 convert (index_type, index_expr),
8784 convert (index_type, minval)));
8785 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8786 do_pending_stack_adjust ();
8788 do_tablejump (index, TYPE_MODE (index_type),
8789 convert_modes (TYPE_MODE (index_type),
8790 TYPE_MODE (TREE_TYPE (range)),
8791 expand_expr (range, NULL_RTX,
8793 TYPE_UNSIGNED (TREE_TYPE (range))),
8794 table_label, default_label);
8798 /* Nonzero if the mode is a valid vector mode for this architecture.
8799 This returns nonzero even if there is no hardware support for the
8800 vector mode, but we can emulate with narrower modes. */
8803 vector_mode_valid_p (enum machine_mode mode)
8805 enum mode_class class = GET_MODE_CLASS (mode);
8806 enum machine_mode innermode;
8808 /* Doh! What's going on? */
8809 if (class != MODE_VECTOR_INT
8810 && class != MODE_VECTOR_FLOAT)
8813 /* Hardware support. Woo hoo! */
8814 if (targetm.vector_mode_supported_p (mode))
8817 innermode = GET_MODE_INNER (mode);
8819 /* We should probably return 1 if requesting V4DI and we have no DI,
8820 but we have V2DI, but this is probably very unlikely. */
8822 /* If we have support for the inner mode, we can safely emulate it.
8823 We may not have V2DI, but me can emulate with a pair of DIs. */
8824 return targetm.scalar_mode_supported_p (innermode);
8827 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8829 const_vector_from_tree (tree exp)
8834 enum machine_mode inner, mode;
8836 mode = TYPE_MODE (TREE_TYPE (exp));
8838 if (initializer_zerop (exp))
8839 return CONST0_RTX (mode);
8841 units = GET_MODE_NUNITS (mode);
8842 inner = GET_MODE_INNER (mode);
8844 v = rtvec_alloc (units);
8846 link = TREE_VECTOR_CST_ELTS (exp);
8847 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8849 elt = TREE_VALUE (link);
8851 if (TREE_CODE (elt) == REAL_CST)
8852 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8855 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8856 TREE_INT_CST_HIGH (elt),
8860 /* Initialize remaining elements to 0. */
8861 for (; i < units; ++i)
8862 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8864 return gen_rtx_CONST_VECTOR (mode, v);
8866 #include "gt-expr.h"