1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1680 0, 1, NULL_RTX, mode, mode);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Emit code to move a block SRC of type TYPE to a block DST,
1714 where DST is non-consecutive registers represented by a PARALLEL.
1715 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1719 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1724 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1725 emit_group_load_1 (tmps, dst, src, type, ssize);
1727 /* Copy the extracted pieces into the proper (probable) hard regs. */
1728 for (i = 0; i < XVECLEN (dst, 0); i++)
1730 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1733 emit_move_insn (d, tmps[i]);
1737 /* Similar, but load SRC into new pseudos in a format that looks like
1738 PARALLEL. This can later be fed to emit_group_move to get things
1739 in the right place. */
1742 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1747 vec = rtvec_alloc (XVECLEN (parallel, 0));
1748 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1750 /* Convert the vector to look just like the original PARALLEL, except
1751 with the computed values. */
1752 for (i = 0; i < XVECLEN (parallel, 0); i++)
1754 rtx e = XVECEXP (parallel, 0, i);
1755 rtx d = XEXP (e, 0);
1759 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1760 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1768 /* Emit code to move a block SRC to block DST, where SRC and DST are
1769 non-consecutive groups of registers, each represented by a PARALLEL. */
1772 emit_group_move (rtx dst, rtx src)
1776 gcc_assert (GET_CODE (src) == PARALLEL
1777 && GET_CODE (dst) == PARALLEL
1778 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1780 /* Skip first entry if NULL. */
1781 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1782 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1783 XEXP (XVECEXP (src, 0, i), 0));
1786 /* Move a group of registers represented by a PARALLEL into pseudos. */
1789 emit_group_move_into_temps (rtx src)
1791 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1794 for (i = 0; i < XVECLEN (src, 0); i++)
1796 rtx e = XVECEXP (src, 0, i);
1797 rtx d = XEXP (e, 0);
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1807 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1808 where SRC is non-consecutive registers represented by a PARALLEL.
1809 SSIZE represents the total size of block ORIG_DST, or -1 if not
1813 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 enum machine_mode m = GET_MODE (orig_dst);
1819 gcc_assert (GET_CODE (src) == PARALLEL);
1821 if (!SCALAR_INT_MODE_P (m)
1822 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1824 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1825 if (imode == BLKmode)
1826 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1828 dst = gen_reg_rtx (imode);
1829 emit_group_store (dst, src, type, ssize);
1830 if (imode != BLKmode)
1831 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1832 emit_move_insn (orig_dst, dst);
1836 /* Check for a NULL entry, used to indicate that the parameter goes
1837 both on the stack and in registers. */
1838 if (XEXP (XVECEXP (src, 0, 0), 0))
1843 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1845 /* Copy the (probable) hard regs into pseudos. */
1846 for (i = start; i < XVECLEN (src, 0); i++)
1848 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1849 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1850 emit_move_insn (tmps[i], reg);
1853 /* If we won't be storing directly into memory, protect the real destination
1854 from strange tricks we might play. */
1856 if (GET_CODE (dst) == PARALLEL)
1860 /* We can get a PARALLEL dst if there is a conditional expression in
1861 a return statement. In that case, the dst and src are the same,
1862 so no action is necessary. */
1863 if (rtx_equal_p (dst, src))
1866 /* It is unclear if we can ever reach here, but we may as well handle
1867 it. Allocate a temporary, and split this into a store/load to/from
1870 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1871 emit_group_store (temp, src, type, ssize);
1872 emit_group_load (dst, temp, type, ssize);
1875 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1877 dst = gen_reg_rtx (GET_MODE (orig_dst));
1878 /* Make life a bit easier for combine. */
1879 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1882 /* Process the pieces. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
1885 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1886 enum machine_mode mode = GET_MODE (tmps[i]);
1887 unsigned int bytelen = GET_MODE_SIZE (mode);
1890 /* Handle trailing fragments that run over the size of the struct. */
1891 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1893 /* store_bit_field always takes its value from the lsb.
1894 Move the fragment to the lsb if it's not already there. */
1896 #ifdef BLOCK_REG_PADDING
1897 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1898 == (BYTES_BIG_ENDIAN ? upward : downward)
1904 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1905 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1906 build_int_cst (NULL_TREE, shift),
1909 bytelen = ssize - bytepos;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 dest = XEXP (dst, 0);
1916 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1918 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1919 dest = XEXP (dst, 1);
1923 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1924 dest = assign_stack_temp (GET_MODE (dest),
1925 GET_MODE_SIZE (GET_MODE (dest)), 0);
1926 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1933 /* Optimize the access just a bit. */
1935 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1936 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1941 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1945 /* Copy from the pseudo into the (probable) hard reg. */
1946 if (orig_dst != dst)
1947 emit_move_insn (orig_dst, dst);
1950 /* Generate code to copy a BLKmode object of TYPE out of a
1951 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1952 is null, a stack temporary is created. TGTBLK is returned.
1954 The purpose of this routine is to handle functions that return
1955 BLKmode structures in registers. Some machines (the PA for example)
1956 want to return all small structures in registers regardless of the
1957 structure's alignment. */
1960 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1962 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1963 rtx src = NULL, dst = NULL;
1964 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1965 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1969 tgtblk = assign_temp (build_qualified_type (type,
1971 | TYPE_QUAL_CONST)),
1973 preserve_temp_slots (tgtblk);
1976 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1977 into a new pseudo which is a full word. */
1979 if (GET_MODE (srcreg) != BLKmode
1980 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1981 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1983 /* If the structure doesn't take up a whole number of words, see whether
1984 SRCREG is padded on the left or on the right. If it's on the left,
1985 set PADDING_CORRECTION to the number of bits to skip.
1987 In most ABIs, the structure will be returned at the least end of
1988 the register, which translates to right padding on little-endian
1989 targets and left padding on big-endian targets. The opposite
1990 holds if the structure is returned at the most significant
1991 end of the register. */
1992 if (bytes % UNITS_PER_WORD != 0
1993 && (targetm.calls.return_in_msb (type)
1995 : BYTES_BIG_ENDIAN))
1997 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1999 /* Copy the structure BITSIZE bites at a time.
2001 We could probably emit more efficient code for machines which do not use
2002 strict alignment, but it doesn't seem worth the effort at the current
2004 for (bitpos = 0, xbitpos = padding_correction;
2005 bitpos < bytes * BITS_PER_UNIT;
2006 bitpos += bitsize, xbitpos += bitsize)
2008 /* We need a new source operand each time xbitpos is on a
2009 word boundary and when xbitpos == padding_correction
2010 (the first time through). */
2011 if (xbitpos % BITS_PER_WORD == 0
2012 || xbitpos == padding_correction)
2013 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2016 /* We need a new destination operand each time bitpos is on
2018 if (bitpos % BITS_PER_WORD == 0)
2019 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2021 /* Use xbitpos for the source extraction (right justified) and
2022 xbitpos for the destination store (left justified). */
2023 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2024 extract_bit_field (src, bitsize,
2025 xbitpos % BITS_PER_WORD, 1,
2026 NULL_RTX, word_mode, word_mode));
2032 /* Add a USE expression for REG to the (possibly empty) list pointed
2033 to by CALL_FUSAGE. REG must denote a hard register. */
2036 use_reg (rtx *call_fusage, rtx reg)
2038 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2041 = gen_rtx_EXPR_LIST (VOIDmode,
2042 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2045 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2046 starting at REGNO. All of these registers must be hard registers. */
2049 use_regs (rtx *call_fusage, int regno, int nregs)
2053 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2055 for (i = 0; i < nregs; i++)
2056 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2059 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2060 PARALLEL REGS. This is for calls that pass values in multiple
2061 non-contiguous locations. The Irix 6 ABI has examples of this. */
2064 use_group_regs (rtx *call_fusage, rtx regs)
2068 for (i = 0; i < XVECLEN (regs, 0); i++)
2070 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2072 /* A NULL entry means the parameter goes both on the stack and in
2073 registers. This can also be a MEM for targets that pass values
2074 partially on the stack and partially in registers. */
2075 if (reg != 0 && REG_P (reg))
2076 use_reg (call_fusage, reg);
2081 /* Determine whether the LEN bytes generated by CONSTFUN can be
2082 stored to memory using several move instructions. CONSTFUNDATA is
2083 a pointer which will be passed as argument in every CONSTFUN call.
2084 ALIGN is maximum alignment we can assume. Return nonzero if a
2085 call to store_by_pieces should succeed. */
2088 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2089 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2090 void *constfundata, unsigned int align)
2092 unsigned HOST_WIDE_INT l;
2093 unsigned int max_size;
2094 HOST_WIDE_INT offset = 0;
2095 enum machine_mode mode, tmode;
2096 enum insn_code icode;
2103 if (! STORE_BY_PIECES_P (len, align))
2106 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2107 if (align >= GET_MODE_ALIGNMENT (tmode))
2108 align = GET_MODE_ALIGNMENT (tmode);
2111 enum machine_mode xmode;
2113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2115 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2116 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2117 || SLOW_UNALIGNED_ACCESS (tmode, align))
2120 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2123 /* We would first store what we can in the largest integer mode, then go to
2124 successively smaller modes. */
2127 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2132 max_size = STORE_MAX_PIECES + 1;
2133 while (max_size > 1)
2135 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2136 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2137 if (GET_MODE_SIZE (tmode) < max_size)
2140 if (mode == VOIDmode)
2143 icode = mov_optab->handlers[(int) mode].insn_code;
2144 if (icode != CODE_FOR_nothing
2145 && align >= GET_MODE_ALIGNMENT (mode))
2147 unsigned int size = GET_MODE_SIZE (mode);
2154 cst = (*constfun) (constfundata, offset, mode);
2155 if (!LEGITIMATE_CONSTANT_P (cst))
2165 max_size = GET_MODE_SIZE (mode);
2168 /* The code above should have handled everything. */
2175 /* Generate several move instructions to store LEN bytes generated by
2176 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2177 pointer which will be passed as argument in every CONSTFUN call.
2178 ALIGN is maximum alignment we can assume.
2179 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2180 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2184 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2185 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2186 void *constfundata, unsigned int align, int endp)
2188 struct store_by_pieces data;
2192 gcc_assert (endp != 2);
2196 gcc_assert (STORE_BY_PIECES_P (len, align));
2197 data.constfun = constfun;
2198 data.constfundata = constfundata;
2201 store_by_pieces_1 (&data, align);
2206 gcc_assert (!data.reverse);
2211 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2212 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2214 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2217 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2224 to1 = adjust_address (data.to, QImode, data.offset);
2232 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2233 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2236 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2238 struct store_by_pieces data;
2243 data.constfun = clear_by_pieces_1;
2244 data.constfundata = NULL;
2247 store_by_pieces_1 (&data, align);
2250 /* Callback routine for clear_by_pieces.
2251 Return const0_rtx unconditionally. */
2254 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2255 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2256 enum machine_mode mode ATTRIBUTE_UNUSED)
2261 /* Subroutine of clear_by_pieces and store_by_pieces.
2262 Generate several move instructions to store LEN bytes of block TO. (A MEM
2263 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2266 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2267 unsigned int align ATTRIBUTE_UNUSED)
2269 rtx to_addr = XEXP (data->to, 0);
2270 unsigned int max_size = STORE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2275 data->to_addr = to_addr;
2277 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2278 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280 data->explicit_inc_to = 0;
2282 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 data->offset = data->len;
2286 /* If storing requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2289 if (!data->autinc_to
2290 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2300 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2301 data->autinc_to = 1;
2302 data->explicit_inc_to = -1;
2305 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2306 && ! data->autinc_to)
2308 data->to_addr = copy_addr_to_reg (to_addr);
2309 data->autinc_to = 1;
2310 data->explicit_inc_to = 1;
2313 if ( !data->autinc_to && CONSTANT_P (to_addr))
2314 data->to_addr = copy_addr_to_reg (to_addr);
2317 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2318 if (align >= GET_MODE_ALIGNMENT (tmode))
2319 align = GET_MODE_ALIGNMENT (tmode);
2322 enum machine_mode xmode;
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 /* First store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2337 while (max_size > 1)
2339 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341 if (GET_MODE_SIZE (tmode) < max_size)
2344 if (mode == VOIDmode)
2347 icode = mov_optab->handlers[(int) mode].insn_code;
2348 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2349 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2351 max_size = GET_MODE_SIZE (mode);
2354 /* The code above should have handled everything. */
2355 gcc_assert (!data->len);
2358 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2359 with move instructions for mode MODE. GENFUN is the gen_... function
2360 to make a move insn for that mode. DATA has all the other info. */
2363 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2364 struct store_by_pieces *data)
2366 unsigned int size = GET_MODE_SIZE (mode);
2369 while (data->len >= size)
2372 data->offset -= size;
2374 if (data->autinc_to)
2375 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2378 to1 = adjust_address (data->to, mode, data->offset);
2380 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2381 emit_insn (gen_add2_insn (data->to_addr,
2382 GEN_INT (-(HOST_WIDE_INT) size)));
2384 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2385 emit_insn ((*genfun) (to1, cst));
2387 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2388 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390 if (! data->reverse)
2391 data->offset += size;
2397 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2398 its length in bytes. */
2401 clear_storage (rtx object, rtx size)
2404 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2405 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2415 if (size == const0_rtx)
2417 else if (GET_CODE (size) == CONST_INT
2418 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2419 clear_by_pieces (object, INTVAL (size), align);
2420 else if (clear_storage_via_clrmem (object, size, align))
2423 retval = clear_storage_via_libcall (object, size);
2429 /* A subroutine of clear_storage. Expand a clrmem pattern;
2430 return true if successful. */
2433 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2435 /* Try the most limited insn first, because there's no point
2436 including more than one in the machine description unless
2437 the more limited one has some advantage. */
2439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2440 enum machine_mode mode;
2442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2443 mode = GET_MODE_WIDER_MODE (mode))
2445 enum insn_code code = clrmem_optab[(int) mode];
2446 insn_operand_predicate_fn pred;
2448 if (code != CODE_FOR_nothing
2449 /* We don't need MODE to be narrower than
2450 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2451 the mode mask, as it is returned by the macro, it will
2452 definitely be less than the actual mode mask. */
2453 && ((GET_CODE (size) == CONST_INT
2454 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2455 <= (GET_MODE_MASK (mode) >> 1)))
2456 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2457 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2458 || (*pred) (object, BLKmode))
2459 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2460 || (*pred) (opalign, VOIDmode)))
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 pred = insn_data[(int) code].operand[1].predicate;
2468 if (pred != 0 && ! (*pred) (op1, mode))
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2485 /* A subroutine of clear_storage. Expand a call to memset.
2486 Return the return value of memset, 0 otherwise. */
2489 clear_storage_via_libcall (rtx object, rtx size)
2491 tree call_expr, arg_list, fn, object_tree, size_tree;
2492 enum machine_mode size_mode;
2495 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2496 place those into new pseudos into a VAR_DECL and use them later. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 size_mode = TYPE_MODE (sizetype);
2501 size = convert_to_mode (size_mode, size, 1);
2502 size = copy_to_mode_reg (size_mode, size);
2504 /* It is incorrect to use the libcall calling conventions to call
2505 memset in this context. This could be a user call to memset and
2506 the user may wish to examine the return value from memset. For
2507 targets where libcalls and normal calls have different conventions
2508 for returning pointers, we could end up generating incorrect code. */
2510 object_tree = make_tree (ptr_type_node, object);
2511 size_tree = make_tree (sizetype, size);
2513 fn = clear_storage_libcall_fn (true);
2514 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2515 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2516 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2521 call_expr, arg_list, NULL_TREE);
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 /* A subroutine of clear_storage_via_libcall. Create the tree node
2529 for the function we use for block clears. The first time FOR_CALL
2530 is true, we call assemble_external. */
2532 static GTY(()) tree block_clear_fn;
2535 init_block_clear_fn (const char *asmspec)
2537 if (!block_clear_fn)
2541 fn = get_identifier ("memset");
2542 args = build_function_type_list (ptr_type_node, ptr_type_node,
2543 integer_type_node, sizetype,
2546 fn = build_decl (FUNCTION_DECL, fn, args);
2547 DECL_EXTERNAL (fn) = 1;
2548 TREE_PUBLIC (fn) = 1;
2549 DECL_ARTIFICIAL (fn) = 1;
2550 TREE_NOTHROW (fn) = 1;
2552 block_clear_fn = fn;
2556 set_user_assembler_name (block_clear_fn, asmspec);
2560 clear_storage_libcall_fn (int for_call)
2562 static bool emitted_extern;
2564 if (!block_clear_fn)
2565 init_block_clear_fn (NULL);
2567 if (for_call && !emitted_extern)
2569 emitted_extern = true;
2570 make_decl_rtl (block_clear_fn);
2571 assemble_external (block_clear_fn);
2574 return block_clear_fn;
2577 /* Write to one of the components of the complex value CPLX. Write VAL to
2578 the real part if IMAG_P is false, and the imaginary part if its true. */
2581 write_complex_part (rtx cplx, rtx val, bool imag_p)
2583 enum machine_mode cmode;
2584 enum machine_mode imode;
2587 if (GET_CODE (cplx) == CONCAT)
2589 emit_move_insn (XEXP (cplx, imag_p), val);
2593 cmode = GET_MODE (cplx);
2594 imode = GET_MODE_INNER (cmode);
2595 ibitsize = GET_MODE_BITSIZE (imode);
2597 /* If the sub-object is at least word sized, then we know that subregging
2598 will work. This special case is important, since store_bit_field
2599 wants to operate on integer modes, and there's rarely an OImode to
2600 correspond to TCmode. */
2601 if (ibitsize >= BITS_PER_WORD)
2603 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2604 imag_p ? GET_MODE_SIZE (imode) : 0);
2605 emit_move_insn (part, val);
2608 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2611 /* Extract one of the components of the complex value CPLX. Extract the
2612 real part if IMAG_P is false, and the imaginary part if it's true. */
2615 read_complex_part (rtx cplx, bool imag_p)
2617 enum machine_mode cmode, imode;
2620 if (GET_CODE (cplx) == CONCAT)
2621 return XEXP (cplx, imag_p);
2623 cmode = GET_MODE (cplx);
2624 imode = GET_MODE_INNER (cmode);
2625 ibitsize = GET_MODE_BITSIZE (imode);
2627 /* Special case reads from complex constants that got spilled to memory. */
2628 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2630 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2631 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2633 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2634 if (CONSTANT_CLASS_P (part))
2635 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2639 /* If the sub-object is at least word sized, then we know that subregging
2640 will work. This special case is important, since extract_bit_field
2641 wants to operate on integer modes, and there's rarely an OImode to
2642 correspond to TCmode. */
2643 if (ibitsize >= BITS_PER_WORD)
2645 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2646 imag_p ? GET_MODE_SIZE (imode) : 0);
2647 gcc_assert (ret != NULL);
2651 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2652 true, NULL_RTX, imode, imode);
2655 /* A subroutine of emit_move_via_alt_mode. Yet another lowpart generator.
2656 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2657 represented in NEW_MODE. */
2660 emit_move_change_mode (enum machine_mode new_mode,
2661 enum machine_mode old_mode, rtx x)
2665 if (reload_in_progress && MEM_P (x))
2667 /* We can't use gen_lowpart here because it may call change_address
2668 which is not appropriate if we were called when a reload was in
2669 progress. We don't have to worry about changing the address since
2670 the size in bytes is supposed to be the same. Copy the MEM to
2671 change the mode and move any substitutions from the old MEM to
2674 ret = adjust_address_nv (x, new_mode, 0);
2675 copy_replacements (x, ret);
2679 /* Note that we do want simplify_subreg's behaviour of validating
2680 that the new mode is ok for a hard register. If we were to use
2681 simplify_gen_subreg, we would create the subreg, but would
2682 probably run into the target not being able to implement it. */
2683 ret = simplify_subreg (new_mode, x, old_mode, 0);
2689 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2690 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2691 code for the move in ALT_MODE, and is known to be valid. Returns the
2692 instruction emitted, or NULL if X or Y cannot be represented in ALT_MODE. */
2695 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2696 enum insn_code code, rtx x, rtx y)
2698 x = emit_move_change_mode (alt_mode, mode, x);
2701 y = emit_move_change_mode (alt_mode, mode, y);
2704 return emit_insn (GEN_FCN (code) (x, y));
2707 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2708 an integer mode of the same size as MODE. Returns the instruction
2709 emitted, or NULL if such a move could not be generated. */
2712 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2714 enum machine_mode imode;
2715 enum insn_code code;
2717 /* There must exist a mode of the exact size we require. */
2718 imode = int_mode_for_mode (mode);
2719 if (imode == BLKmode)
2722 /* The target must support moves in this mode. */
2723 code = mov_optab->handlers[imode].insn_code;
2724 if (code == CODE_FOR_nothing)
2727 return emit_move_via_alt_mode (imode, mode, code, x, y);
2730 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2731 Return an equivalent MEM that does not use an auto-increment. */
2734 emit_move_resolve_push (enum machine_mode mode, rtx x)
2736 enum rtx_code code = GET_CODE (XEXP (x, 0));
2737 HOST_WIDE_INT adjust;
2740 adjust = GET_MODE_SIZE (mode);
2741 #ifdef PUSH_ROUNDING
2742 adjust = PUSH_ROUNDING (adjust);
2744 if (code == PRE_DEC || code == POST_DEC)
2747 /* Do not use anti_adjust_stack, since we don't want to update
2748 stack_pointer_delta. */
2749 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2750 GEN_INT (adjust), stack_pointer_rtx,
2751 0, OPTAB_LIB_WIDEN);
2752 if (temp != stack_pointer_rtx)
2753 emit_move_insn (stack_pointer_rtx, temp);
2759 temp = stack_pointer_rtx;
2762 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2765 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2771 return replace_equiv_address (x, temp);
2774 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2775 X is known to satisfy push_operand, and MODE is known to be complex.
2776 Returns the last instruction emitted. */
2779 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2781 enum machine_mode submode = GET_MODE_INNER (mode);
2784 #ifdef PUSH_ROUNDING
2785 unsigned int submodesize = GET_MODE_SIZE (submode);
2787 /* In case we output to the stack, but the size is smaller than the
2788 machine can push exactly, we need to use move instructions. */
2789 if (PUSH_ROUNDING (submodesize) != submodesize)
2791 x = emit_move_resolve_push (mode, x);
2792 return emit_move_insn (x, y);
2796 /* Note that the real part always precedes the imag part in memory
2797 regardless of machine's endianness. */
2798 switch (GET_CODE (XEXP (x, 0)))
2812 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2813 read_complex_part (y, imag_first));
2814 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2815 read_complex_part (y, !imag_first));
2818 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2819 MODE is known to be complex. Returns the last instruction emitted. */
2822 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2826 /* Need to take special care for pushes, to maintain proper ordering
2827 of the data, and possibly extra padding. */
2828 if (push_operand (x, mode))
2829 return emit_move_complex_push (mode, x, y);
2831 /* For memory to memory moves, optimial behaviour can be had with the
2832 existing block move logic. */
2833 if (MEM_P (x) && MEM_P (y))
2835 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2836 BLOCK_OP_NO_LIBCALL);
2837 return get_last_insn ();
2840 /* See if we can coerce the target into moving both values at once. */
2842 /* Not possible if the values are inherently not adjacent. */
2843 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2845 /* Is possible if both are registers (or subregs of registers). */
2846 else if (register_operand (x, mode) && register_operand (y, mode))
2848 /* If one of the operands is a memory, and alignment constraints
2849 are friendly enough, we may be able to do combined memory operations.
2850 We do not attempt this if Y is a constant because that combination is
2851 usually better with the by-parts thing below. */
2852 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2853 && (!STRICT_ALIGNMENT
2854 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2861 rtx ret = emit_move_via_integer (mode, x, y);
2866 /* Show the output dies here. This is necessary for SUBREGs
2867 of pseudos since we cannot track their lifetimes correctly;
2868 hard regs shouldn't appear here except as return values. */
2869 if (!reload_completed && !reload_in_progress
2870 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2871 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2873 write_complex_part (x, read_complex_part (y, false), false);
2874 write_complex_part (x, read_complex_part (y, true), true);
2875 return get_last_insn ();
2878 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2879 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2882 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2886 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2889 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2890 if (code != CODE_FOR_nothing)
2891 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2894 /* Otherwise, find the MODE_INT mode of the same width. */
2895 ret = emit_move_via_integer (mode, x, y);
2896 gcc_assert (ret != NULL);
2900 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2901 MODE is any multi-word or full-word mode that lacks a move_insn
2902 pattern. Note that you will get better code if you define such
2903 patterns, even if they must turn into multiple assembler instructions. */
2906 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2913 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2915 /* If X is a push on the stack, do the push now and replace
2916 X with a reference to the stack pointer. */
2917 if (push_operand (x, mode))
2918 x = emit_move_resolve_push (mode, x);
2920 /* If we are in reload, see if either operand is a MEM whose address
2921 is scheduled for replacement. */
2922 if (reload_in_progress && MEM_P (x)
2923 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2924 x = replace_equiv_address_nv (x, inner);
2925 if (reload_in_progress && MEM_P (y)
2926 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2927 y = replace_equiv_address_nv (y, inner);
2931 need_clobber = false;
2933 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2936 rtx xpart = operand_subword (x, i, 1, mode);
2937 rtx ypart = operand_subword (y, i, 1, mode);
2939 /* If we can't get a part of Y, put Y into memory if it is a
2940 constant. Otherwise, force it into a register. If we still
2941 can't get a part of Y, abort. */
2942 if (ypart == 0 && CONSTANT_P (y))
2944 y = force_const_mem (mode, y);
2945 ypart = operand_subword (y, i, 1, mode);
2947 else if (ypart == 0)
2948 ypart = operand_subword_force (y, i, mode);
2950 gcc_assert (xpart && ypart);
2952 need_clobber |= (GET_CODE (xpart) == SUBREG);
2954 last_insn = emit_move_insn (xpart, ypart);
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values.
2963 We never want to emit such a clobber after reload. */
2965 && ! (reload_in_progress || reload_completed)
2966 && need_clobber != 0)
2967 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2974 /* Low level part of emit_move_insn.
2975 Called just like emit_move_insn, but assumes X and Y
2976 are basically valid. */
2979 emit_move_insn_1 (rtx x, rtx y)
2981 enum machine_mode mode = GET_MODE (x);
2982 enum insn_code code;
2984 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2986 code = mov_optab->handlers[mode].insn_code;
2987 if (code != CODE_FOR_nothing)
2988 return emit_insn (GEN_FCN (code) (x, y));
2990 /* Expand complex moves by moving real part and imag part. */
2991 if (COMPLEX_MODE_P (mode))
2992 return emit_move_complex (mode, x, y);
2994 if (GET_MODE_CLASS (mode) == MODE_CC)
2995 return emit_move_ccmode (mode, x, y);
2997 /* Try using a move pattern for the corresponding integer mode. This is
2998 only safe when simplify_subreg can convert MODE constants into integer
2999 constants. At present, it can only do this reliably if the value
3000 fits within a HOST_WIDE_INT. */
3001 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3003 rtx ret = emit_move_via_integer (mode, x, y);
3008 return emit_move_multi_word (mode, x, y);
3011 /* Generate code to copy Y into X.
3012 Both Y and X must have the same mode, except that
3013 Y can be a constant with VOIDmode.
3014 This mode cannot be BLKmode; use emit_block_move for that.
3016 Return the last instruction emitted. */
3019 emit_move_insn (rtx x, rtx y)
3021 enum machine_mode mode = GET_MODE (x);
3022 rtx y_cst = NULL_RTX;
3025 gcc_assert (mode != BLKmode
3026 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3031 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3032 && (last_insn = compress_float_constant (x, y)))
3037 if (!LEGITIMATE_CONSTANT_P (y))
3039 y = force_const_mem (mode, y);
3041 /* If the target's cannot_force_const_mem prevented the spill,
3042 assume that the target's move expanders will also take care
3043 of the non-legitimate constant. */
3049 /* If X or Y are memory references, verify that their addresses are valid
3052 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3053 && ! push_operand (x, GET_MODE (x)))
3055 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3056 x = validize_mem (x);
3059 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3061 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3062 y = validize_mem (y);
3064 gcc_assert (mode != BLKmode);
3066 last_insn = emit_move_insn_1 (x, y);
3068 if (y_cst && REG_P (x)
3069 && (set = single_set (last_insn)) != NULL_RTX
3070 && SET_DEST (set) == x
3071 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3072 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3077 /* If Y is representable exactly in a narrower mode, and the target can
3078 perform the extension directly from constant or memory, then emit the
3079 move as an extension. */
3082 compress_float_constant (rtx x, rtx y)
3084 enum machine_mode dstmode = GET_MODE (x);
3085 enum machine_mode orig_srcmode = GET_MODE (y);
3086 enum machine_mode srcmode;
3089 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3091 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3092 srcmode != orig_srcmode;
3093 srcmode = GET_MODE_WIDER_MODE (srcmode))
3096 rtx trunc_y, last_insn;
3098 /* Skip if the target can't extend this way. */
3099 ic = can_extend_p (dstmode, srcmode, 0);
3100 if (ic == CODE_FOR_nothing)
3103 /* Skip if the narrowed value isn't exact. */
3104 if (! exact_real_truncate (srcmode, &r))
3107 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3109 if (LEGITIMATE_CONSTANT_P (trunc_y))
3111 /* Skip if the target needs extra instructions to perform
3113 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3116 else if (float_extend_from_mem[dstmode][srcmode])
3117 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3121 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3122 last_insn = get_last_insn ();
3125 set_unique_reg_note (last_insn, REG_EQUAL, y);
3133 /* Pushing data onto the stack. */
3135 /* Push a block of length SIZE (perhaps variable)
3136 and return an rtx to address the beginning of the block.
3137 The value may be virtual_outgoing_args_rtx.
3139 EXTRA is the number of bytes of padding to push in addition to SIZE.
3140 BELOW nonzero means this padding comes at low addresses;
3141 otherwise, the padding comes at high addresses. */
3144 push_block (rtx size, int extra, int below)
3148 size = convert_modes (Pmode, ptr_mode, size, 1);
3149 if (CONSTANT_P (size))
3150 anti_adjust_stack (plus_constant (size, extra));
3151 else if (REG_P (size) && extra == 0)
3152 anti_adjust_stack (size);
3155 temp = copy_to_mode_reg (Pmode, size);
3157 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3158 temp, 0, OPTAB_LIB_WIDEN);
3159 anti_adjust_stack (temp);
3162 #ifndef STACK_GROWS_DOWNWARD
3168 temp = virtual_outgoing_args_rtx;
3169 if (extra != 0 && below)
3170 temp = plus_constant (temp, extra);
3174 if (GET_CODE (size) == CONST_INT)
3175 temp = plus_constant (virtual_outgoing_args_rtx,
3176 -INTVAL (size) - (below ? 0 : extra));
3177 else if (extra != 0 && !below)
3178 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3179 negate_rtx (Pmode, plus_constant (size, extra)));
3181 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3182 negate_rtx (Pmode, size));
3185 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3188 #ifdef PUSH_ROUNDING
3190 /* Emit single push insn. */
3193 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3196 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3198 enum insn_code icode;
3199 insn_operand_predicate_fn pred;
3201 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3202 /* If there is push pattern, use it. Otherwise try old way of throwing
3203 MEM representing push operation to move expander. */
3204 icode = push_optab->handlers[(int) mode].insn_code;
3205 if (icode != CODE_FOR_nothing)
3207 if (((pred = insn_data[(int) icode].operand[0].predicate)
3208 && !((*pred) (x, mode))))
3209 x = force_reg (mode, x);
3210 emit_insn (GEN_FCN (icode) (x));
3213 if (GET_MODE_SIZE (mode) == rounded_size)
3214 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3215 /* If we are to pad downward, adjust the stack pointer first and
3216 then store X into the stack location using an offset. This is
3217 because emit_move_insn does not know how to pad; it does not have
3219 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3221 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3222 HOST_WIDE_INT offset;
3224 emit_move_insn (stack_pointer_rtx,
3225 expand_binop (Pmode,
3226 #ifdef STACK_GROWS_DOWNWARD
3232 GEN_INT (rounded_size),
3233 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3235 offset = (HOST_WIDE_INT) padding_size;
3236 #ifdef STACK_GROWS_DOWNWARD
3237 if (STACK_PUSH_CODE == POST_DEC)
3238 /* We have already decremented the stack pointer, so get the
3240 offset += (HOST_WIDE_INT) rounded_size;
3242 if (STACK_PUSH_CODE == POST_INC)
3243 /* We have already incremented the stack pointer, so get the
3245 offset -= (HOST_WIDE_INT) rounded_size;
3247 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3251 #ifdef STACK_GROWS_DOWNWARD
3252 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3253 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3254 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3256 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3257 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3258 GEN_INT (rounded_size));
3260 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3263 dest = gen_rtx_MEM (mode, dest_addr);
3267 set_mem_attributes (dest, type, 1);
3269 if (flag_optimize_sibling_calls)
3270 /* Function incoming arguments may overlap with sibling call
3271 outgoing arguments and we cannot allow reordering of reads
3272 from function arguments with stores to outgoing arguments
3273 of sibling calls. */
3274 set_mem_alias_set (dest, 0);
3276 emit_move_insn (dest, x);
3280 /* Generate code to push X onto the stack, assuming it has mode MODE and
3282 MODE is redundant except when X is a CONST_INT (since they don't
3284 SIZE is an rtx for the size of data to be copied (in bytes),
3285 needed only if X is BLKmode.
3287 ALIGN (in bits) is maximum alignment we can assume.
3289 If PARTIAL and REG are both nonzero, then copy that many of the first
3290 words of X into registers starting with REG, and push the rest of X.
3291 The amount of space pushed is decreased by PARTIAL words,
3292 rounded *down* to a multiple of PARM_BOUNDARY.
3293 REG must be a hard register in this case.
3294 If REG is zero but PARTIAL is not, take any all others actions for an
3295 argument partially in registers, but do not actually load any
3298 EXTRA is the amount in bytes of extra space to leave next to this arg.
3299 This is ignored if an argument block has already been allocated.
3301 On a machine that lacks real push insns, ARGS_ADDR is the address of
3302 the bottom of the argument block for this call. We use indexing off there
3303 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3304 argument block has not been preallocated.
3306 ARGS_SO_FAR is the size of args previously pushed for this call.
3308 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3309 for arguments passed in registers. If nonzero, it will be the number
3310 of bytes required. */
3313 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3314 unsigned int align, int partial, rtx reg, int extra,
3315 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3319 enum direction stack_direction
3320 #ifdef STACK_GROWS_DOWNWARD
3326 /* Decide where to pad the argument: `downward' for below,
3327 `upward' for above, or `none' for don't pad it.
3328 Default is below for small data on big-endian machines; else above. */
3329 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3331 /* Invert direction if stack is post-decrement.
3333 if (STACK_PUSH_CODE == POST_DEC)
3334 if (where_pad != none)
3335 where_pad = (where_pad == downward ? upward : downward);
3339 if (mode == BLKmode)
3341 /* Copy a block into the stack, entirely or partially. */
3344 int used = partial * UNITS_PER_WORD;
3348 if (reg && GET_CODE (reg) == PARALLEL)
3350 /* Use the size of the elt to compute offset. */
3351 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3352 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3353 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3356 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3362 /* USED is now the # of bytes we need not copy to the stack
3363 because registers will take care of them. */
3366 xinner = adjust_address (xinner, BLKmode, used);
3368 /* If the partial register-part of the arg counts in its stack size,
3369 skip the part of stack space corresponding to the registers.
3370 Otherwise, start copying to the beginning of the stack space,
3371 by setting SKIP to 0. */
3372 skip = (reg_parm_stack_space == 0) ? 0 : used;
3374 #ifdef PUSH_ROUNDING
3375 /* Do it with several push insns if that doesn't take lots of insns
3376 and if there is no difficulty with push insns that skip bytes
3377 on the stack for alignment purposes. */
3380 && GET_CODE (size) == CONST_INT
3382 && MEM_ALIGN (xinner) >= align
3383 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3384 /* Here we avoid the case of a structure whose weak alignment
3385 forces many pushes of a small amount of data,
3386 and such small pushes do rounding that causes trouble. */
3387 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3388 || align >= BIGGEST_ALIGNMENT
3389 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3390 == (align / BITS_PER_UNIT)))
3391 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3393 /* Push padding now if padding above and stack grows down,
3394 or if padding below and stack grows up.
3395 But if space already allocated, this has already been done. */
3396 if (extra && args_addr == 0
3397 && where_pad != none && where_pad != stack_direction)
3398 anti_adjust_stack (GEN_INT (extra));
3400 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3403 #endif /* PUSH_ROUNDING */
3407 /* Otherwise make space on the stack and copy the data
3408 to the address of that space. */
3410 /* Deduct words put into registers from the size we must copy. */
3413 if (GET_CODE (size) == CONST_INT)
3414 size = GEN_INT (INTVAL (size) - used);
3416 size = expand_binop (GET_MODE (size), sub_optab, size,
3417 GEN_INT (used), NULL_RTX, 0,
3421 /* Get the address of the stack space.
3422 In this case, we do not deal with EXTRA separately.
3423 A single stack adjust will do. */
3426 temp = push_block (size, extra, where_pad == downward);
3429 else if (GET_CODE (args_so_far) == CONST_INT)
3430 temp = memory_address (BLKmode,
3431 plus_constant (args_addr,
3432 skip + INTVAL (args_so_far)));
3434 temp = memory_address (BLKmode,
3435 plus_constant (gen_rtx_PLUS (Pmode,
3440 if (!ACCUMULATE_OUTGOING_ARGS)
3442 /* If the source is referenced relative to the stack pointer,
3443 copy it to another register to stabilize it. We do not need
3444 to do this if we know that we won't be changing sp. */
3446 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3447 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3448 temp = copy_to_reg (temp);
3451 target = gen_rtx_MEM (BLKmode, temp);
3453 /* We do *not* set_mem_attributes here, because incoming arguments
3454 may overlap with sibling call outgoing arguments and we cannot
3455 allow reordering of reads from function arguments with stores
3456 to outgoing arguments of sibling calls. We do, however, want
3457 to record the alignment of the stack slot. */
3458 /* ALIGN may well be better aligned than TYPE, e.g. due to
3459 PARM_BOUNDARY. Assume the caller isn't lying. */
3460 set_mem_align (target, align);
3462 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3465 else if (partial > 0)
3467 /* Scalar partly in registers. */
3469 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3472 /* # words of start of argument
3473 that we must make space for but need not store. */
3474 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3475 int args_offset = INTVAL (args_so_far);
3478 /* Push padding now if padding above and stack grows down,
3479 or if padding below and stack grows up.
3480 But if space already allocated, this has already been done. */
3481 if (extra && args_addr == 0
3482 && where_pad != none && where_pad != stack_direction)
3483 anti_adjust_stack (GEN_INT (extra));
3485 /* If we make space by pushing it, we might as well push
3486 the real data. Otherwise, we can leave OFFSET nonzero
3487 and leave the space uninitialized. */
3491 /* Now NOT_STACK gets the number of words that we don't need to
3492 allocate on the stack. */
3493 not_stack = partial - offset;
3495 /* If the partial register-part of the arg counts in its stack size,
3496 skip the part of stack space corresponding to the registers.
3497 Otherwise, start copying to the beginning of the stack space,
3498 by setting SKIP to 0. */
3499 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3501 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3502 x = validize_mem (force_const_mem (mode, x));
3504 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3505 SUBREGs of such registers are not allowed. */
3506 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3507 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3508 x = copy_to_reg (x);
3510 /* Loop over all the words allocated on the stack for this arg. */
3511 /* We can do it by words, because any scalar bigger than a word
3512 has a size a multiple of a word. */
3513 #ifndef PUSH_ARGS_REVERSED
3514 for (i = not_stack; i < size; i++)
3516 for (i = size - 1; i >= not_stack; i--)
3518 if (i >= not_stack + offset)
3519 emit_push_insn (operand_subword_force (x, i, mode),
3520 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3522 GEN_INT (args_offset + ((i - not_stack + skip)
3524 reg_parm_stack_space, alignment_pad);
3531 /* Push padding now if padding above and stack grows down,
3532 or if padding below and stack grows up.
3533 But if space already allocated, this has already been done. */
3534 if (extra && args_addr == 0
3535 && where_pad != none && where_pad != stack_direction)
3536 anti_adjust_stack (GEN_INT (extra));
3538 #ifdef PUSH_ROUNDING
3539 if (args_addr == 0 && PUSH_ARGS)
3540 emit_single_push_insn (mode, x, type);
3544 if (GET_CODE (args_so_far) == CONST_INT)
3546 = memory_address (mode,
3547 plus_constant (args_addr,
3548 INTVAL (args_so_far)));
3550 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3552 dest = gen_rtx_MEM (mode, addr);
3554 /* We do *not* set_mem_attributes here, because incoming arguments
3555 may overlap with sibling call outgoing arguments and we cannot
3556 allow reordering of reads from function arguments with stores
3557 to outgoing arguments of sibling calls. We do, however, want
3558 to record the alignment of the stack slot. */
3559 /* ALIGN may well be better aligned than TYPE, e.g. due to
3560 PARM_BOUNDARY. Assume the caller isn't lying. */
3561 set_mem_align (dest, align);
3563 emit_move_insn (dest, x);
3567 /* If part should go in registers, copy that part
3568 into the appropriate registers. Do this now, at the end,
3569 since mem-to-mem copies above may do function calls. */
3570 if (partial > 0 && reg != 0)
3572 /* Handle calls that pass values in multiple non-contiguous locations.
3573 The Irix 6 ABI has examples of this. */
3574 if (GET_CODE (reg) == PARALLEL)
3575 emit_group_load (reg, x, type, -1);
3577 move_block_to_reg (REGNO (reg), x, partial, mode);
3580 if (extra && args_addr == 0 && where_pad == stack_direction)
3581 anti_adjust_stack (GEN_INT (extra));
3583 if (alignment_pad && args_addr == 0)
3584 anti_adjust_stack (alignment_pad);
3587 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3591 get_subtarget (rtx x)
3595 /* Only registers can be subtargets. */
3597 /* Don't use hard regs to avoid extending their life. */
3598 || REGNO (x) < FIRST_PSEUDO_REGISTER
3602 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3603 FIELD is a bitfield. Returns true if the optimization was successful,
3604 and there's nothing else to do. */
3607 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3608 unsigned HOST_WIDE_INT bitpos,
3609 enum machine_mode mode1, rtx str_rtx,
3612 enum machine_mode str_mode = GET_MODE (str_rtx);
3613 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3618 if (mode1 != VOIDmode
3619 || bitsize >= BITS_PER_WORD
3620 || str_bitsize > BITS_PER_WORD
3621 || TREE_SIDE_EFFECTS (to)
3622 || TREE_THIS_VOLATILE (to))
3626 if (!BINARY_CLASS_P (src)
3627 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3630 op0 = TREE_OPERAND (src, 0);
3631 op1 = TREE_OPERAND (src, 1);
3634 if (!operand_equal_p (to, op0, 0))
3637 if (MEM_P (str_rtx))
3639 unsigned HOST_WIDE_INT offset1;
3641 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3642 str_mode = word_mode;
3643 str_mode = get_best_mode (bitsize, bitpos,
3644 MEM_ALIGN (str_rtx), str_mode, 0);
3645 if (str_mode == VOIDmode)
3647 str_bitsize = GET_MODE_BITSIZE (str_mode);
3650 bitpos %= str_bitsize;
3651 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3652 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3654 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3657 /* If the bit field covers the whole REG/MEM, store_field
3658 will likely generate better code. */
3659 if (bitsize >= str_bitsize)
3662 /* We can't handle fields split across multiple entities. */
3663 if (bitpos + bitsize > str_bitsize)
3666 if (BYTES_BIG_ENDIAN)
3667 bitpos = str_bitsize - bitpos - bitsize;
3669 switch (TREE_CODE (src))
3673 /* For now, just optimize the case of the topmost bitfield
3674 where we don't need to do any masking and also
3675 1 bit bitfields where xor can be used.
3676 We might win by one instruction for the other bitfields
3677 too if insv/extv instructions aren't used, so that
3678 can be added later. */
3679 if (bitpos + bitsize != str_bitsize
3680 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3683 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3684 value = convert_modes (str_mode,
3685 TYPE_MODE (TREE_TYPE (op1)), value,
3686 TYPE_UNSIGNED (TREE_TYPE (op1)));
3688 /* We may be accessing data outside the field, which means
3689 we can alias adjacent data. */
3690 if (MEM_P (str_rtx))
3692 str_rtx = shallow_copy_rtx (str_rtx);
3693 set_mem_alias_set (str_rtx, 0);
3694 set_mem_expr (str_rtx, 0);
3697 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3698 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3700 value = expand_and (str_mode, value, const1_rtx, NULL);
3703 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3704 build_int_cst (NULL_TREE, bitpos),
3706 result = expand_binop (str_mode, binop, str_rtx,
3707 value, str_rtx, 1, OPTAB_WIDEN);
3708 if (result != str_rtx)
3709 emit_move_insn (str_rtx, result);
3720 /* Expand an assignment that stores the value of FROM into TO. */
3723 expand_assignment (tree to, tree from)
3728 /* Don't crash if the lhs of the assignment was erroneous. */
3730 if (TREE_CODE (to) == ERROR_MARK)
3732 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3736 /* Assignment of a structure component needs special treatment
3737 if the structure component's rtx is not simply a MEM.
3738 Assignment of an array element at a constant index, and assignment of
3739 an array element in an unaligned packed structure field, has the same
3741 if (handled_component_p (to)
3742 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3744 enum machine_mode mode1;
3745 HOST_WIDE_INT bitsize, bitpos;
3753 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3754 &unsignedp, &volatilep);
3756 /* If we are going to use store_bit_field and extract_bit_field,
3757 make sure to_rtx will be safe for multiple use. */
3759 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3763 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3765 gcc_assert (MEM_P (to_rtx));
3767 #ifdef POINTERS_EXTEND_UNSIGNED
3768 if (GET_MODE (offset_rtx) != Pmode)
3769 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3771 if (GET_MODE (offset_rtx) != ptr_mode)
3772 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3775 /* A constant address in TO_RTX can have VOIDmode, we must not try
3776 to call force_reg for that case. Avoid that case. */
3778 && GET_MODE (to_rtx) == BLKmode
3779 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3781 && (bitpos % bitsize) == 0
3782 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3783 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3785 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3789 to_rtx = offset_address (to_rtx, offset_rtx,
3790 highest_pow2_factor_for_target (to,
3794 /* Handle expand_expr of a complex value returning a CONCAT. */
3795 if (GET_CODE (to_rtx) == CONCAT)
3797 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3799 gcc_assert (bitpos == 0);
3800 result = store_expr (from, to_rtx, false);
3804 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3805 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3812 /* If the field is at offset zero, we could have been given the
3813 DECL_RTX of the parent struct. Don't munge it. */
3814 to_rtx = shallow_copy_rtx (to_rtx);
3816 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3818 /* Deal with volatile and readonly fields. The former is only
3819 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3821 MEM_VOLATILE_P (to_rtx) = 1;
3822 if (component_uses_parent_alias_set (to))
3823 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3826 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3830 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3831 TREE_TYPE (tem), get_alias_set (to));
3835 preserve_temp_slots (result);
3841 /* If the rhs is a function call and its value is not an aggregate,
3842 call the function before we start to compute the lhs.
3843 This is needed for correct code for cases such as
3844 val = setjmp (buf) on machines where reference to val
3845 requires loading up part of an address in a separate insn.
3847 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3848 since it might be a promoted variable where the zero- or sign- extension
3849 needs to be done. Handling this in the normal way is safe because no
3850 computation is done before the call. */
3851 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3852 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3853 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3854 && REG_P (DECL_RTL (to))))
3859 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3861 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3863 /* Handle calls that return values in multiple non-contiguous locations.
3864 The Irix 6 ABI has examples of this. */
3865 if (GET_CODE (to_rtx) == PARALLEL)
3866 emit_group_load (to_rtx, value, TREE_TYPE (from),
3867 int_size_in_bytes (TREE_TYPE (from)));
3868 else if (GET_MODE (to_rtx) == BLKmode)
3869 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3872 if (POINTER_TYPE_P (TREE_TYPE (to)))
3873 value = convert_memory_address (GET_MODE (to_rtx), value);
3874 emit_move_insn (to_rtx, value);
3876 preserve_temp_slots (to_rtx);
3882 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3883 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3886 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3888 /* Don't move directly into a return register. */
3889 if (TREE_CODE (to) == RESULT_DECL
3890 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3895 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3897 if (GET_CODE (to_rtx) == PARALLEL)
3898 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3899 int_size_in_bytes (TREE_TYPE (from)));
3901 emit_move_insn (to_rtx, temp);
3903 preserve_temp_slots (to_rtx);
3909 /* In case we are returning the contents of an object which overlaps
3910 the place the value is being stored, use a safe function when copying
3911 a value through a pointer into a structure value return block. */
3912 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3913 && current_function_returns_struct
3914 && !current_function_returns_pcc_struct)
3919 size = expr_size (from);
3920 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3922 emit_library_call (memmove_libfunc, LCT_NORMAL,
3923 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3924 XEXP (from_rtx, 0), Pmode,
3925 convert_to_mode (TYPE_MODE (sizetype),
3926 size, TYPE_UNSIGNED (sizetype)),
3927 TYPE_MODE (sizetype));
3929 preserve_temp_slots (to_rtx);
3935 /* Compute FROM and store the value in the rtx we got. */
3938 result = store_expr (from, to_rtx, 0);
3939 preserve_temp_slots (result);
3945 /* Generate code for computing expression EXP,
3946 and storing the value into TARGET.
3948 If the mode is BLKmode then we may return TARGET itself.
3949 It turns out that in BLKmode it doesn't cause a problem.
3950 because C has no operators that could combine two different
3951 assignments into the same BLKmode object with different values
3952 with no sequence point. Will other languages need this to
3955 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3956 stack, and block moves may need to be treated specially. */
3959 store_expr (tree exp, rtx target, int call_param_p)
3962 rtx alt_rtl = NULL_RTX;
3963 int dont_return_target = 0;
3965 if (VOID_TYPE_P (TREE_TYPE (exp)))
3967 /* C++ can generate ?: expressions with a throw expression in one
3968 branch and an rvalue in the other. Here, we resolve attempts to
3969 store the throw expression's nonexistent result. */
3970 gcc_assert (!call_param_p);
3971 expand_expr (exp, const0_rtx, VOIDmode, 0);
3974 if (TREE_CODE (exp) == COMPOUND_EXPR)
3976 /* Perform first part of compound expression, then assign from second
3978 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3979 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3980 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3982 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3984 /* For conditional expression, get safe form of the target. Then
3985 test the condition, doing the appropriate assignment on either
3986 side. This avoids the creation of unnecessary temporaries.
3987 For non-BLKmode, it is more efficient not to do this. */
3989 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3991 do_pending_stack_adjust ();
3993 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3994 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3995 emit_jump_insn (gen_jump (lab2));
3998 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4004 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4005 /* If this is a scalar in a register that is stored in a wider mode
4006 than the declared mode, compute the result into its declared mode
4007 and then convert to the wider mode. Our value is the computed
4010 rtx inner_target = 0;
4012 /* We can do the conversion inside EXP, which will often result
4013 in some optimizations. Do the conversion in two steps: first
4014 change the signedness, if needed, then the extend. But don't
4015 do this if the type of EXP is a subtype of something else
4016 since then the conversion might involve more than just
4017 converting modes. */
4018 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4019 && TREE_TYPE (TREE_TYPE (exp)) == 0
4020 && (!lang_hooks.reduce_bit_field_operations
4021 || (GET_MODE_PRECISION (GET_MODE (target))
4022 == TYPE_PRECISION (TREE_TYPE (exp)))))
4024 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4025 != SUBREG_PROMOTED_UNSIGNED_P (target))
4027 (lang_hooks.types.signed_or_unsigned_type
4028 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4030 exp = convert (lang_hooks.types.type_for_mode
4031 (GET_MODE (SUBREG_REG (target)),
4032 SUBREG_PROMOTED_UNSIGNED_P (target)),
4035 inner_target = SUBREG_REG (target);
4038 temp = expand_expr (exp, inner_target, VOIDmode,
4039 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4041 /* If TEMP is a VOIDmode constant, use convert_modes to make
4042 sure that we properly convert it. */
4043 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4045 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4046 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4047 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4048 GET_MODE (target), temp,
4049 SUBREG_PROMOTED_UNSIGNED_P (target));
4052 convert_move (SUBREG_REG (target), temp,
4053 SUBREG_PROMOTED_UNSIGNED_P (target));
4059 temp = expand_expr_real (exp, target, GET_MODE (target),
4061 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4063 /* Return TARGET if it's a specified hardware register.
4064 If TARGET is a volatile mem ref, either return TARGET
4065 or return a reg copied *from* TARGET; ANSI requires this.
4067 Otherwise, if TEMP is not TARGET, return TEMP
4068 if it is constant (for efficiency),
4069 or if we really want the correct value. */
4070 if (!(target && REG_P (target)
4071 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4072 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4073 && ! rtx_equal_p (temp, target)
4074 && CONSTANT_P (temp))
4075 dont_return_target = 1;
4078 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4079 the same as that of TARGET, adjust the constant. This is needed, for
4080 example, in case it is a CONST_DOUBLE and we want only a word-sized
4082 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4083 && TREE_CODE (exp) != ERROR_MARK
4084 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4085 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4086 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4088 /* If value was not generated in the target, store it there.
4089 Convert the value to TARGET's type first if necessary and emit the
4090 pending incrementations that have been queued when expanding EXP.
4091 Note that we cannot emit the whole queue blindly because this will
4092 effectively disable the POST_INC optimization later.
4094 If TEMP and TARGET compare equal according to rtx_equal_p, but
4095 one or both of them are volatile memory refs, we have to distinguish
4097 - expand_expr has used TARGET. In this case, we must not generate
4098 another copy. This can be detected by TARGET being equal according
4100 - expand_expr has not used TARGET - that means that the source just
4101 happens to have the same RTX form. Since temp will have been created
4102 by expand_expr, it will compare unequal according to == .
4103 We must generate a copy in this case, to reach the correct number
4104 of volatile memory references. */
4106 if ((! rtx_equal_p (temp, target)
4107 || (temp != target && (side_effects_p (temp)
4108 || side_effects_p (target))))
4109 && TREE_CODE (exp) != ERROR_MARK
4110 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4111 but TARGET is not valid memory reference, TEMP will differ
4112 from TARGET although it is really the same location. */
4113 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4114 /* If there's nothing to copy, don't bother. Don't call expr_size
4115 unless necessary, because some front-ends (C++) expr_size-hook
4116 aborts on objects that are not supposed to be bit-copied or
4118 && expr_size (exp) != const0_rtx)
4120 if (GET_MODE (temp) != GET_MODE (target)
4121 && GET_MODE (temp) != VOIDmode)
4123 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4124 if (dont_return_target)
4126 /* In this case, we will return TEMP,
4127 so make sure it has the proper mode.
4128 But don't forget to store the value into TARGET. */
4129 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4130 emit_move_insn (target, temp);
4133 convert_move (target, temp, unsignedp);
4136 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4138 /* Handle copying a string constant into an array. The string
4139 constant may be shorter than the array. So copy just the string's
4140 actual length, and clear the rest. First get the size of the data
4141 type of the string, which is actually the size of the target. */
4142 rtx size = expr_size (exp);
4144 if (GET_CODE (size) == CONST_INT
4145 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4146 emit_block_move (target, temp, size,
4148 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4151 /* Compute the size of the data to copy from the string. */
4153 = size_binop (MIN_EXPR,
4154 make_tree (sizetype, size),
4155 size_int (TREE_STRING_LENGTH (exp)));
4157 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4159 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4162 /* Copy that much. */
4163 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4164 TYPE_UNSIGNED (sizetype));
4165 emit_block_move (target, temp, copy_size_rtx,
4167 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4169 /* Figure out how much is left in TARGET that we have to clear.
4170 Do all calculations in ptr_mode. */
4171 if (GET_CODE (copy_size_rtx) == CONST_INT)
4173 size = plus_constant (size, -INTVAL (copy_size_rtx));
4174 target = adjust_address (target, BLKmode,
4175 INTVAL (copy_size_rtx));
4179 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4180 copy_size_rtx, NULL_RTX, 0,
4183 #ifdef POINTERS_EXTEND_UNSIGNED
4184 if (GET_MODE (copy_size_rtx) != Pmode)
4185 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4186 TYPE_UNSIGNED (sizetype));
4189 target = offset_address (target, copy_size_rtx,
4190 highest_pow2_factor (copy_size));
4191 label = gen_label_rtx ();
4192 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4193 GET_MODE (size), 0, label);
4196 if (size != const0_rtx)
4197 clear_storage (target, size);
4203 /* Handle calls that return values in multiple non-contiguous locations.
4204 The Irix 6 ABI has examples of this. */
4205 else if (GET_CODE (target) == PARALLEL)
4206 emit_group_load (target, temp, TREE_TYPE (exp),
4207 int_size_in_bytes (TREE_TYPE (exp)));
4208 else if (GET_MODE (temp) == BLKmode)
4209 emit_block_move (target, temp, expr_size (exp),
4211 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4214 temp = force_operand (temp, target);
4216 emit_move_insn (target, temp);
4223 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4224 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4225 are set to non-constant values and place it in *P_NC_ELTS. */
4228 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4229 HOST_WIDE_INT *p_nc_elts)
4231 HOST_WIDE_INT nz_elts, nc_elts;
4237 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4239 tree value = TREE_VALUE (list);
4240 tree purpose = TREE_PURPOSE (list);
4244 if (TREE_CODE (purpose) == RANGE_EXPR)
4246 tree lo_index = TREE_OPERAND (purpose, 0);
4247 tree hi_index = TREE_OPERAND (purpose, 1);
4249 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4250 mult = (tree_low_cst (hi_index, 1)
4251 - tree_low_cst (lo_index, 1) + 1);
4254 switch (TREE_CODE (value))
4258 HOST_WIDE_INT nz = 0, nc = 0;
4259 categorize_ctor_elements_1 (value, &nz, &nc);
4260 nz_elts += mult * nz;
4261 nc_elts += mult * nc;
4267 if (!initializer_zerop (value))
4272 nz_elts += mult * TREE_STRING_LENGTH (value);
4276 if (!initializer_zerop (TREE_REALPART (value)))
4278 if (!initializer_zerop (TREE_IMAGPART (value)))
4285 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4286 if (!initializer_zerop (TREE_VALUE (v)))
4293 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4299 *p_nz_elts += nz_elts;
4300 *p_nc_elts += nc_elts;
4304 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4305 HOST_WIDE_INT *p_nc_elts)
4309 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4312 /* Count the number of scalars in TYPE. Return -1 on overflow or
4316 count_type_elements (tree type)
4318 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4319 switch (TREE_CODE (type))
4323 tree telts = array_type_nelts (type);
4324 if (telts && host_integerp (telts, 1))
4326 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4327 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4330 else if (max / n > m)
4338 HOST_WIDE_INT n = 0, t;
4341 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4342 if (TREE_CODE (f) == FIELD_DECL)
4344 t = count_type_elements (TREE_TYPE (f));
4354 case QUAL_UNION_TYPE:
4356 /* Ho hum. How in the world do we guess here? Clearly it isn't
4357 right to count the fields. Guess based on the number of words. */
4358 HOST_WIDE_INT n = int_size_in_bytes (type);
4361 return n / UNITS_PER_WORD;
4368 return TYPE_VECTOR_SUBPARTS (type);
4377 case REFERENCE_TYPE:
4390 /* Return 1 if EXP contains mostly (3/4) zeros. */
4393 mostly_zeros_p (tree exp)
4395 if (TREE_CODE (exp) == CONSTRUCTOR)
4398 HOST_WIDE_INT nz_elts, nc_elts, elts;
4400 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4401 elts = count_type_elements (TREE_TYPE (exp));
4403 return nz_elts < elts / 4;
4406 return initializer_zerop (exp);
4409 /* Helper function for store_constructor.
4410 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4411 TYPE is the type of the CONSTRUCTOR, not the element type.
4412 CLEARED is as for store_constructor.
4413 ALIAS_SET is the alias set to use for any stores.
4415 This provides a recursive shortcut back to store_constructor when it isn't
4416 necessary to go through store_field. This is so that we can pass through
4417 the cleared field to let store_constructor know that we may not have to
4418 clear a substructure if the outer structure has already been cleared. */
4421 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4422 HOST_WIDE_INT bitpos, enum machine_mode mode,
4423 tree exp, tree type, int cleared, int alias_set)
4425 if (TREE_CODE (exp) == CONSTRUCTOR
4426 /* We can only call store_constructor recursively if the size and
4427 bit position are on a byte boundary. */
4428 && bitpos % BITS_PER_UNIT == 0
4429 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4430 /* If we have a nonzero bitpos for a register target, then we just
4431 let store_field do the bitfield handling. This is unlikely to
4432 generate unnecessary clear instructions anyways. */
4433 && (bitpos == 0 || MEM_P (target)))
4437 = adjust_address (target,
4438 GET_MODE (target) == BLKmode
4440 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4441 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4444 /* Update the alias set, if required. */
4445 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4446 && MEM_ALIAS_SET (target) != 0)
4448 target = copy_rtx (target);
4449 set_mem_alias_set (target, alias_set);
4452 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4455 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4458 /* Store the value of constructor EXP into the rtx TARGET.
4459 TARGET is either a REG or a MEM; we know it cannot conflict, since
4460 safe_from_p has been called.
4461 CLEARED is true if TARGET is known to have been zero'd.
4462 SIZE is the number of bytes of TARGET we are allowed to modify: this
4463 may not be the same as the size of EXP if we are assigning to a field
4464 which has been packed to exclude padding bits. */
4467 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4469 tree type = TREE_TYPE (exp);
4470 #ifdef WORD_REGISTER_OPERATIONS
4471 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4474 switch (TREE_CODE (type))
4478 case QUAL_UNION_TYPE:
4482 /* If size is zero or the target is already cleared, do nothing. */
4483 if (size == 0 || cleared)
4485 /* We either clear the aggregate or indicate the value is dead. */
4486 else if ((TREE_CODE (type) == UNION_TYPE
4487 || TREE_CODE (type) == QUAL_UNION_TYPE)
4488 && ! CONSTRUCTOR_ELTS (exp))
4489 /* If the constructor is empty, clear the union. */
4491 clear_storage (target, expr_size (exp));
4495 /* If we are building a static constructor into a register,
4496 set the initial value as zero so we can fold the value into
4497 a constant. But if more than one register is involved,
4498 this probably loses. */
4499 else if (REG_P (target) && TREE_STATIC (exp)
4500 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4502 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4506 /* If the constructor has fewer fields than the structure or
4507 if we are initializing the structure to mostly zeros, clear
4508 the whole structure first. Don't do this if TARGET is a
4509 register whose mode size isn't equal to SIZE since
4510 clear_storage can't handle this case. */
4512 && ((list_length (CONSTRUCTOR_ELTS (exp))
4513 != fields_length (type))
4514 || mostly_zeros_p (exp))
4516 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4519 clear_storage (target, GEN_INT (size));
4524 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4526 /* Store each element of the constructor into the
4527 corresponding field of TARGET. */
4529 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4531 tree field = TREE_PURPOSE (elt);
4532 tree value = TREE_VALUE (elt);
4533 enum machine_mode mode;
4534 HOST_WIDE_INT bitsize;
4535 HOST_WIDE_INT bitpos = 0;
4537 rtx to_rtx = target;
4539 /* Just ignore missing fields. We cleared the whole
4540 structure, above, if any fields are missing. */
4544 if (cleared && initializer_zerop (value))
4547 if (host_integerp (DECL_SIZE (field), 1))
4548 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4552 mode = DECL_MODE (field);
4553 if (DECL_BIT_FIELD (field))
4556 offset = DECL_FIELD_OFFSET (field);
4557 if (host_integerp (offset, 0)
4558 && host_integerp (bit_position (field), 0))
4560 bitpos = int_bit_position (field);
4564 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4571 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4572 make_tree (TREE_TYPE (exp),
4575 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4576 gcc_assert (MEM_P (to_rtx));
4578 #ifdef POINTERS_EXTEND_UNSIGNED
4579 if (GET_MODE (offset_rtx) != Pmode)
4580 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4582 if (GET_MODE (offset_rtx) != ptr_mode)
4583 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4586 to_rtx = offset_address (to_rtx, offset_rtx,
4587 highest_pow2_factor (offset));
4590 #ifdef WORD_REGISTER_OPERATIONS
4591 /* If this initializes a field that is smaller than a
4592 word, at the start of a word, try to widen it to a full
4593 word. This special case allows us to output C++ member
4594 function initializations in a form that the optimizers
4597 && bitsize < BITS_PER_WORD
4598 && bitpos % BITS_PER_WORD == 0
4599 && GET_MODE_CLASS (mode) == MODE_INT
4600 && TREE_CODE (value) == INTEGER_CST
4602 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4604 tree type = TREE_TYPE (value);
4606 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4608 type = lang_hooks.types.type_for_size
4609 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4610 value = convert (type, value);
4613 if (BYTES_BIG_ENDIAN)
4615 = fold (build2 (LSHIFT_EXPR, type, value,
4616 build_int_cst (NULL_TREE,
4617 BITS_PER_WORD - bitsize)));
4618 bitsize = BITS_PER_WORD;
4623 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4624 && DECL_NONADDRESSABLE_P (field))
4626 to_rtx = copy_rtx (to_rtx);
4627 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4630 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4631 value, type, cleared,
4632 get_alias_set (TREE_TYPE (field)));
4642 tree elttype = TREE_TYPE (type);
4644 HOST_WIDE_INT minelt = 0;
4645 HOST_WIDE_INT maxelt = 0;
4647 domain = TYPE_DOMAIN (type);
4648 const_bounds_p = (TYPE_MIN_VALUE (domain)
4649 && TYPE_MAX_VALUE (domain)
4650 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4651 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4653 /* If we have constant bounds for the range of the type, get them. */
4656 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4657 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4660 /* If the constructor has fewer elements than the array, clear
4661 the whole array first. Similarly if this is static
4662 constructor of a non-BLKmode object. */
4665 else if (REG_P (target) && TREE_STATIC (exp))
4669 HOST_WIDE_INT count = 0, zero_count = 0;
4670 need_to_clear = ! const_bounds_p;
4672 /* This loop is a more accurate version of the loop in
4673 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4674 is also needed to check for missing elements. */
4675 for (elt = CONSTRUCTOR_ELTS (exp);
4676 elt != NULL_TREE && ! need_to_clear;
4677 elt = TREE_CHAIN (elt))
4679 tree index = TREE_PURPOSE (elt);
4680 HOST_WIDE_INT this_node_count;
4682 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4684 tree lo_index = TREE_OPERAND (index, 0);
4685 tree hi_index = TREE_OPERAND (index, 1);
4687 if (! host_integerp (lo_index, 1)
4688 || ! host_integerp (hi_index, 1))
4694 this_node_count = (tree_low_cst (hi_index, 1)
4695 - tree_low_cst (lo_index, 1) + 1);
4698 this_node_count = 1;
4700 count += this_node_count;
4701 if (mostly_zeros_p (TREE_VALUE (elt)))
4702 zero_count += this_node_count;
4705 /* Clear the entire array first if there are any missing
4706 elements, or if the incidence of zero elements is >=
4709 && (count < maxelt - minelt + 1
4710 || 4 * zero_count >= 3 * count))
4714 if (need_to_clear && size > 0)
4717 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4719 clear_storage (target, GEN_INT (size));
4723 if (!cleared && REG_P (target))
4724 /* Inform later passes that the old value is dead. */
4725 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4727 /* Store each element of the constructor into the
4728 corresponding element of TARGET, determined by counting the
4730 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4732 elt = TREE_CHAIN (elt), i++)
4734 enum machine_mode mode;
4735 HOST_WIDE_INT bitsize;
4736 HOST_WIDE_INT bitpos;
4738 tree value = TREE_VALUE (elt);
4739 tree index = TREE_PURPOSE (elt);
4740 rtx xtarget = target;
4742 if (cleared && initializer_zerop (value))
4745 unsignedp = TYPE_UNSIGNED (elttype);
4746 mode = TYPE_MODE (elttype);
4747 if (mode == BLKmode)
4748 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4749 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4752 bitsize = GET_MODE_BITSIZE (mode);
4754 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4756 tree lo_index = TREE_OPERAND (index, 0);
4757 tree hi_index = TREE_OPERAND (index, 1);
4758 rtx index_r, pos_rtx;
4759 HOST_WIDE_INT lo, hi, count;
4762 /* If the range is constant and "small", unroll the loop. */
4764 && host_integerp (lo_index, 0)
4765 && host_integerp (hi_index, 0)
4766 && (lo = tree_low_cst (lo_index, 0),
4767 hi = tree_low_cst (hi_index, 0),
4768 count = hi - lo + 1,
4771 || (host_integerp (TYPE_SIZE (elttype), 1)
4772 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4775 lo -= minelt; hi -= minelt;
4776 for (; lo <= hi; lo++)
4778 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4781 && !MEM_KEEP_ALIAS_SET_P (target)
4782 && TREE_CODE (type) == ARRAY_TYPE
4783 && TYPE_NONALIASED_COMPONENT (type))
4785 target = copy_rtx (target);
4786 MEM_KEEP_ALIAS_SET_P (target) = 1;
4789 store_constructor_field
4790 (target, bitsize, bitpos, mode, value, type, cleared,
4791 get_alias_set (elttype));
4796 rtx loop_start = gen_label_rtx ();
4797 rtx loop_end = gen_label_rtx ();
4800 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4801 unsignedp = TYPE_UNSIGNED (domain);
4803 index = build_decl (VAR_DECL, NULL_TREE, domain);
4806 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4808 SET_DECL_RTL (index, index_r);
4809 store_expr (lo_index, index_r, 0);
4811 /* Build the head of the loop. */
4812 do_pending_stack_adjust ();
4813 emit_label (loop_start);
4815 /* Assign value to element index. */
4817 = convert (ssizetype,
4818 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4819 index, TYPE_MIN_VALUE (domain))));
4820 position = size_binop (MULT_EXPR, position,
4822 TYPE_SIZE_UNIT (elttype)));
4824 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4825 xtarget = offset_address (target, pos_rtx,
4826 highest_pow2_factor (position));
4827 xtarget = adjust_address (xtarget, mode, 0);
4828 if (TREE_CODE (value) == CONSTRUCTOR)
4829 store_constructor (value, xtarget, cleared,
4830 bitsize / BITS_PER_UNIT);
4832 store_expr (value, xtarget, 0);
4834 /* Generate a conditional jump to exit the loop. */
4835 exit_cond = build2 (LT_EXPR, integer_type_node,
4837 jumpif (exit_cond, loop_end);
4839 /* Update the loop counter, and jump to the head of
4841 expand_assignment (index,
4842 build2 (PLUS_EXPR, TREE_TYPE (index),
4843 index, integer_one_node));
4845 emit_jump (loop_start);
4847 /* Build the end of the loop. */
4848 emit_label (loop_end);
4851 else if ((index != 0 && ! host_integerp (index, 0))
4852 || ! host_integerp (TYPE_SIZE (elttype), 1))
4857 index = ssize_int (1);
4860 index = fold_convert (ssizetype,
4861 fold (build2 (MINUS_EXPR,
4864 TYPE_MIN_VALUE (domain))));
4866 position = size_binop (MULT_EXPR, index,
4868 TYPE_SIZE_UNIT (elttype)));
4869 xtarget = offset_address (target,
4870 expand_expr (position, 0, VOIDmode, 0),
4871 highest_pow2_factor (position));
4872 xtarget = adjust_address (xtarget, mode, 0);
4873 store_expr (value, xtarget, 0);
4878 bitpos = ((tree_low_cst (index, 0) - minelt)
4879 * tree_low_cst (TYPE_SIZE (elttype), 1));
4881 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4883 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4884 && TREE_CODE (type) == ARRAY_TYPE
4885 && TYPE_NONALIASED_COMPONENT (type))
4887 target = copy_rtx (target);
4888 MEM_KEEP_ALIAS_SET_P (target) = 1;
4890 store_constructor_field (target, bitsize, bitpos, mode, value,
4891 type, cleared, get_alias_set (elttype));
4903 tree elttype = TREE_TYPE (type);
4904 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4905 enum machine_mode eltmode = TYPE_MODE (elttype);
4906 HOST_WIDE_INT bitsize;
4907 HOST_WIDE_INT bitpos;
4911 gcc_assert (eltmode != BLKmode);
4913 n_elts = TYPE_VECTOR_SUBPARTS (type);
4914 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4916 enum machine_mode mode = GET_MODE (target);
4918 icode = (int) vec_init_optab->handlers[mode].insn_code;
4919 if (icode != CODE_FOR_nothing)
4923 vector = alloca (n_elts);
4924 for (i = 0; i < n_elts; i++)
4925 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4929 /* If the constructor has fewer elements than the vector,
4930 clear the whole array first. Similarly if this is static
4931 constructor of a non-BLKmode object. */
4934 else if (REG_P (target) && TREE_STATIC (exp))
4938 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4940 for (elt = CONSTRUCTOR_ELTS (exp);
4942 elt = TREE_CHAIN (elt))
4944 int n_elts_here = tree_low_cst
4945 (int_const_binop (TRUNC_DIV_EXPR,
4946 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4947 TYPE_SIZE (elttype), 0), 1);
4949 count += n_elts_here;
4950 if (mostly_zeros_p (TREE_VALUE (elt)))
4951 zero_count += n_elts_here;
4954 /* Clear the entire vector first if there are any missing elements,
4955 or if the incidence of zero elements is >= 75%. */
4956 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4959 if (need_to_clear && size > 0 && !vector)
4962 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4964 clear_storage (target, GEN_INT (size));
4968 if (!cleared && REG_P (target))
4969 /* Inform later passes that the old value is dead. */
4970 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4972 /* Store each element of the constructor into the corresponding
4973 element of TARGET, determined by counting the elements. */
4974 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4976 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4978 tree value = TREE_VALUE (elt);
4979 tree index = TREE_PURPOSE (elt);
4980 HOST_WIDE_INT eltpos;
4982 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4983 if (cleared && initializer_zerop (value))
4987 eltpos = tree_low_cst (index, 1);
4993 /* Vector CONSTRUCTORs should only be built from smaller
4994 vectors in the case of BLKmode vectors. */
4995 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4996 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5000 enum machine_mode value_mode =
5001 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5002 ? TYPE_MODE (TREE_TYPE (value))
5004 bitpos = eltpos * elt_size;
5005 store_constructor_field (target, bitsize, bitpos,
5006 value_mode, value, type,
5007 cleared, get_alias_set (elttype));
5012 emit_insn (GEN_FCN (icode)
5014 gen_rtx_PARALLEL (GET_MODE (target),
5015 gen_rtvec_v (n_elts, vector))));
5024 /* Store the value of EXP (an expression tree)
5025 into a subfield of TARGET which has mode MODE and occupies
5026 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5027 If MODE is VOIDmode, it means that we are storing into a bit-field.
5029 Always return const0_rtx unless we have something particular to
5032 TYPE is the type of the underlying object,
5034 ALIAS_SET is the alias set for the destination. This value will
5035 (in general) be different from that for TARGET, since TARGET is a
5036 reference to the containing structure. */
5039 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5040 enum machine_mode mode, tree exp, tree type, int alias_set)
5042 HOST_WIDE_INT width_mask = 0;
5044 if (TREE_CODE (exp) == ERROR_MARK)
5047 /* If we have nothing to store, do nothing unless the expression has
5050 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5051 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5052 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5054 /* If we are storing into an unaligned field of an aligned union that is
5055 in a register, we may have the mode of TARGET being an integer mode but
5056 MODE == BLKmode. In that case, get an aligned object whose size and
5057 alignment are the same as TARGET and store TARGET into it (we can avoid
5058 the store if the field being stored is the entire width of TARGET). Then
5059 call ourselves recursively to store the field into a BLKmode version of
5060 that object. Finally, load from the object into TARGET. This is not
5061 very efficient in general, but should only be slightly more expensive
5062 than the otherwise-required unaligned accesses. Perhaps this can be
5063 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5064 twice, once with emit_move_insn and once via store_field. */
5067 && (REG_P (target) || GET_CODE (target) == SUBREG))
5069 rtx object = assign_temp (type, 0, 1, 1);
5070 rtx blk_object = adjust_address (object, BLKmode, 0);
5072 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5073 emit_move_insn (object, target);
5075 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5077 emit_move_insn (target, object);
5079 /* We want to return the BLKmode version of the data. */
5083 if (GET_CODE (target) == CONCAT)
5085 /* We're storing into a struct containing a single __complex. */
5087 gcc_assert (!bitpos);
5088 return store_expr (exp, target, 0);
5091 /* If the structure is in a register or if the component
5092 is a bit field, we cannot use addressing to access it.
5093 Use bit-field techniques or SUBREG to store in it. */
5095 if (mode == VOIDmode
5096 || (mode != BLKmode && ! direct_store[(int) mode]
5097 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5098 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5100 || GET_CODE (target) == SUBREG
5101 /* If the field isn't aligned enough to store as an ordinary memref,
5102 store it as a bit field. */
5104 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5105 || bitpos % GET_MODE_ALIGNMENT (mode))
5106 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5107 || (bitpos % BITS_PER_UNIT != 0)))
5108 /* If the RHS and field are a constant size and the size of the
5109 RHS isn't the same size as the bitfield, we must use bitfield
5112 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5113 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5115 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5117 /* If BITSIZE is narrower than the size of the type of EXP
5118 we will be narrowing TEMP. Normally, what's wanted are the
5119 low-order bits. However, if EXP's type is a record and this is
5120 big-endian machine, we want the upper BITSIZE bits. */
5121 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5122 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5123 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5124 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5125 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5129 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5131 if (mode != VOIDmode && mode != BLKmode
5132 && mode != TYPE_MODE (TREE_TYPE (exp)))
5133 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5135 /* If the modes of TARGET and TEMP are both BLKmode, both
5136 must be in memory and BITPOS must be aligned on a byte
5137 boundary. If so, we simply do a block copy. */
5138 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5140 gcc_assert (MEM_P (target) && MEM_P (temp)
5141 && !(bitpos % BITS_PER_UNIT));
5143 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5144 emit_block_move (target, temp,
5145 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5152 /* Store the value in the bitfield. */
5153 store_bit_field (target, bitsize, bitpos, mode, temp);
5159 /* Now build a reference to just the desired component. */
5160 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5162 if (to_rtx == target)
5163 to_rtx = copy_rtx (to_rtx);
5165 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5166 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5167 set_mem_alias_set (to_rtx, alias_set);
5169 return store_expr (exp, to_rtx, 0);
5173 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5174 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5175 codes and find the ultimate containing object, which we return.
5177 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5178 bit position, and *PUNSIGNEDP to the signedness of the field.
5179 If the position of the field is variable, we store a tree
5180 giving the variable offset (in units) in *POFFSET.
5181 This offset is in addition to the bit position.
5182 If the position is not variable, we store 0 in *POFFSET.
5184 If any of the extraction expressions is volatile,
5185 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5187 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5188 is a mode that can be used to access the field. In that case, *PBITSIZE
5191 If the field describes a variable-sized object, *PMODE is set to
5192 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5193 this case, but the address of the object can be found. */
5196 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5197 HOST_WIDE_INT *pbitpos, tree *poffset,
5198 enum machine_mode *pmode, int *punsignedp,
5202 enum machine_mode mode = VOIDmode;
5203 tree offset = size_zero_node;
5204 tree bit_offset = bitsize_zero_node;
5207 /* First get the mode, signedness, and size. We do this from just the
5208 outermost expression. */
5209 if (TREE_CODE (exp) == COMPONENT_REF)
5211 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5212 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5213 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5215 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5217 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5219 size_tree = TREE_OPERAND (exp, 1);
5220 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5224 mode = TYPE_MODE (TREE_TYPE (exp));
5225 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5227 if (mode == BLKmode)
5228 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5230 *pbitsize = GET_MODE_BITSIZE (mode);
5235 if (! host_integerp (size_tree, 1))
5236 mode = BLKmode, *pbitsize = -1;
5238 *pbitsize = tree_low_cst (size_tree, 1);
5241 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5242 and find the ultimate containing object. */
5245 switch (TREE_CODE (exp))
5248 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5249 TREE_OPERAND (exp, 2));
5254 tree field = TREE_OPERAND (exp, 1);
5255 tree this_offset = component_ref_field_offset (exp);
5257 /* If this field hasn't been filled in yet, don't go past it.
5258 This should only happen when folding expressions made during
5259 type construction. */
5260 if (this_offset == 0)
5263 offset = size_binop (PLUS_EXPR, offset, this_offset);
5264 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5265 DECL_FIELD_BIT_OFFSET (field));
5267 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5272 case ARRAY_RANGE_REF:
5274 tree index = TREE_OPERAND (exp, 1);
5275 tree low_bound = array_ref_low_bound (exp);
5276 tree unit_size = array_ref_element_size (exp);
5278 /* We assume all arrays have sizes that are a multiple of a byte.
5279 First subtract the lower bound, if any, in the type of the
5280 index, then convert to sizetype and multiply by the size of
5281 the array element. */
5282 if (! integer_zerop (low_bound))
5283 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5286 offset = size_binop (PLUS_EXPR, offset,
5287 size_binop (MULT_EXPR,
5288 convert (sizetype, index),
5297 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5298 bitsize_int (*pbitsize));
5301 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5302 conversions that don't change the mode, and all view conversions
5303 except those that need to "step up" the alignment. */
5305 case VIEW_CONVERT_EXPR:
5306 if ((TYPE_ALIGN (TREE_TYPE (exp))
5307 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5309 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5310 < BIGGEST_ALIGNMENT)
5311 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5312 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5320 /* If any reference in the chain is volatile, the effect is volatile. */
5321 if (TREE_THIS_VOLATILE (exp))
5324 exp = TREE_OPERAND (exp, 0);
5328 /* If OFFSET is constant, see if we can return the whole thing as a
5329 constant bit position. Otherwise, split it up. */
5330 if (host_integerp (offset, 0)
5331 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5333 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5334 && host_integerp (tem, 0))
5335 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5337 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5343 /* Return a tree of sizetype representing the size, in bytes, of the element
5344 of EXP, an ARRAY_REF. */
5347 array_ref_element_size (tree exp)
5349 tree aligned_size = TREE_OPERAND (exp, 3);
5350 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5352 /* If a size was specified in the ARRAY_REF, it's the size measured
5353 in alignment units of the element type. So multiply by that value. */
5356 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5357 sizetype from another type of the same width and signedness. */
5358 if (TREE_TYPE (aligned_size) != sizetype)
5359 aligned_size = fold_convert (sizetype, aligned_size);
5360 return size_binop (MULT_EXPR, aligned_size,
5361 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5364 /* Otherwise, take the size from that of the element type. Substitute
5365 any PLACEHOLDER_EXPR that we have. */
5367 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5370 /* Return a tree representing the lower bound of the array mentioned in
5371 EXP, an ARRAY_REF. */
5374 array_ref_low_bound (tree exp)
5376 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5378 /* If a lower bound is specified in EXP, use it. */
5379 if (TREE_OPERAND (exp, 2))
5380 return TREE_OPERAND (exp, 2);
5382 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5383 substituting for a PLACEHOLDER_EXPR as needed. */
5384 if (domain_type && TYPE_MIN_VALUE (domain_type))
5385 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5387 /* Otherwise, return a zero of the appropriate type. */
5388 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5391 /* Return a tree representing the upper bound of the array mentioned in
5392 EXP, an ARRAY_REF. */
5395 array_ref_up_bound (tree exp)
5397 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5399 /* If there is a domain type and it has an upper bound, use it, substituting
5400 for a PLACEHOLDER_EXPR as needed. */
5401 if (domain_type && TYPE_MAX_VALUE (domain_type))
5402 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5404 /* Otherwise fail. */
5408 /* Return a tree representing the offset, in bytes, of the field referenced
5409 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5412 component_ref_field_offset (tree exp)
5414 tree aligned_offset = TREE_OPERAND (exp, 2);
5415 tree field = TREE_OPERAND (exp, 1);
5417 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5418 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5422 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5423 sizetype from another type of the same width and signedness. */
5424 if (TREE_TYPE (aligned_offset) != sizetype)
5425 aligned_offset = fold_convert (sizetype, aligned_offset);
5426 return size_binop (MULT_EXPR, aligned_offset,
5427 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5430 /* Otherwise, take the offset from that of the field. Substitute
5431 any PLACEHOLDER_EXPR that we have. */
5433 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5436 /* Return 1 if T is an expression that get_inner_reference handles. */
5439 handled_component_p (tree t)
5441 switch (TREE_CODE (t))
5446 case ARRAY_RANGE_REF:
5447 case VIEW_CONVERT_EXPR:
5457 /* Given an rtx VALUE that may contain additions and multiplications, return
5458 an equivalent value that just refers to a register, memory, or constant.
5459 This is done by generating instructions to perform the arithmetic and
5460 returning a pseudo-register containing the value.
5462 The returned value may be a REG, SUBREG, MEM or constant. */
5465 force_operand (rtx value, rtx target)
5468 /* Use subtarget as the target for operand 0 of a binary operation. */
5469 rtx subtarget = get_subtarget (target);
5470 enum rtx_code code = GET_CODE (value);
5472 /* Check for subreg applied to an expression produced by loop optimizer. */
5474 && !REG_P (SUBREG_REG (value))
5475 && !MEM_P (SUBREG_REG (value)))
5477 value = simplify_gen_subreg (GET_MODE (value),
5478 force_reg (GET_MODE (SUBREG_REG (value)),
5479 force_operand (SUBREG_REG (value),
5481 GET_MODE (SUBREG_REG (value)),
5482 SUBREG_BYTE (value));
5483 code = GET_CODE (value);
5486 /* Check for a PIC address load. */
5487 if ((code == PLUS || code == MINUS)
5488 && XEXP (value, 0) == pic_offset_table_rtx
5489 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5490 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5491 || GET_CODE (XEXP (value, 1)) == CONST))
5494 subtarget = gen_reg_rtx (GET_MODE (value));
5495 emit_move_insn (subtarget, value);
5499 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5502 target = gen_reg_rtx (GET_MODE (value));
5503 convert_move (target, force_operand (XEXP (value, 0), NULL),
5504 code == ZERO_EXTEND);
5508 if (ARITHMETIC_P (value))
5510 op2 = XEXP (value, 1);
5511 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5513 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5516 op2 = negate_rtx (GET_MODE (value), op2);
5519 /* Check for an addition with OP2 a constant integer and our first
5520 operand a PLUS of a virtual register and something else. In that
5521 case, we want to emit the sum of the virtual register and the
5522 constant first and then add the other value. This allows virtual
5523 register instantiation to simply modify the constant rather than
5524 creating another one around this addition. */
5525 if (code == PLUS && GET_CODE (op2) == CONST_INT
5526 && GET_CODE (XEXP (value, 0)) == PLUS
5527 && REG_P (XEXP (XEXP (value, 0), 0))
5528 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5529 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5531 rtx temp = expand_simple_binop (GET_MODE (value), code,
5532 XEXP (XEXP (value, 0), 0), op2,
5533 subtarget, 0, OPTAB_LIB_WIDEN);
5534 return expand_simple_binop (GET_MODE (value), code, temp,
5535 force_operand (XEXP (XEXP (value,
5537 target, 0, OPTAB_LIB_WIDEN);
5540 op1 = force_operand (XEXP (value, 0), subtarget);
5541 op2 = force_operand (op2, NULL_RTX);
5545 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5547 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5548 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5549 target, 1, OPTAB_LIB_WIDEN);
5551 return expand_divmod (0,
5552 FLOAT_MODE_P (GET_MODE (value))
5553 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5554 GET_MODE (value), op1, op2, target, 0);
5557 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5561 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5565 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5569 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5570 target, 0, OPTAB_LIB_WIDEN);
5573 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5574 target, 1, OPTAB_LIB_WIDEN);
5577 if (UNARY_P (value))
5579 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5580 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5583 #ifdef INSN_SCHEDULING
5584 /* On machines that have insn scheduling, we want all memory reference to be
5585 explicit, so we need to deal with such paradoxical SUBREGs. */
5586 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5587 && (GET_MODE_SIZE (GET_MODE (value))
5588 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5590 = simplify_gen_subreg (GET_MODE (value),
5591 force_reg (GET_MODE (SUBREG_REG (value)),
5592 force_operand (SUBREG_REG (value),
5594 GET_MODE (SUBREG_REG (value)),
5595 SUBREG_BYTE (value));
5601 /* Subroutine of expand_expr: return nonzero iff there is no way that
5602 EXP can reference X, which is being modified. TOP_P is nonzero if this
5603 call is going to be used to determine whether we need a temporary
5604 for EXP, as opposed to a recursive call to this function.
5606 It is always safe for this routine to return zero since it merely
5607 searches for optimization opportunities. */
5610 safe_from_p (rtx x, tree exp, int top_p)
5616 /* If EXP has varying size, we MUST use a target since we currently
5617 have no way of allocating temporaries of variable size
5618 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5619 So we assume here that something at a higher level has prevented a
5620 clash. This is somewhat bogus, but the best we can do. Only
5621 do this when X is BLKmode and when we are at the top level. */
5622 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5623 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5624 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5625 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5626 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5628 && GET_MODE (x) == BLKmode)
5629 /* If X is in the outgoing argument area, it is always safe. */
5631 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5632 || (GET_CODE (XEXP (x, 0)) == PLUS
5633 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5636 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5637 find the underlying pseudo. */
5638 if (GET_CODE (x) == SUBREG)
5641 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5645 /* Now look at our tree code and possibly recurse. */
5646 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5648 case tcc_declaration:
5649 exp_rtl = DECL_RTL_IF_SET (exp);
5655 case tcc_exceptional:
5656 if (TREE_CODE (exp) == TREE_LIST)
5660 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5662 exp = TREE_CHAIN (exp);
5665 if (TREE_CODE (exp) != TREE_LIST)
5666 return safe_from_p (x, exp, 0);
5669 else if (TREE_CODE (exp) == ERROR_MARK)
5670 return 1; /* An already-visited SAVE_EXPR? */
5675 /* The only case we look at here is the DECL_INITIAL inside a
5677 return (TREE_CODE (exp) != DECL_EXPR
5678 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5679 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5680 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5683 case tcc_comparison:
5684 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5689 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5691 case tcc_expression:
5693 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5694 the expression. If it is set, we conflict iff we are that rtx or
5695 both are in memory. Otherwise, we check all operands of the
5696 expression recursively. */
5698 switch (TREE_CODE (exp))
5701 /* If the operand is static or we are static, we can't conflict.
5702 Likewise if we don't conflict with the operand at all. */
5703 if (staticp (TREE_OPERAND (exp, 0))
5704 || TREE_STATIC (exp)
5705 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5708 /* Otherwise, the only way this can conflict is if we are taking
5709 the address of a DECL a that address if part of X, which is
5711 exp = TREE_OPERAND (exp, 0);
5714 if (!DECL_RTL_SET_P (exp)
5715 || !MEM_P (DECL_RTL (exp)))
5718 exp_rtl = XEXP (DECL_RTL (exp), 0);
5722 case MISALIGNED_INDIRECT_REF:
5723 case ALIGN_INDIRECT_REF:
5726 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5727 get_alias_set (exp)))
5732 /* Assume that the call will clobber all hard registers and
5734 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5739 case WITH_CLEANUP_EXPR:
5740 case CLEANUP_POINT_EXPR:
5741 /* Lowered by gimplify.c. */
5745 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5751 /* If we have an rtx, we do not need to scan our operands. */
5755 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5756 for (i = 0; i < nops; i++)
5757 if (TREE_OPERAND (exp, i) != 0
5758 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5761 /* If this is a language-specific tree code, it may require
5762 special handling. */
5763 if ((unsigned int) TREE_CODE (exp)
5764 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5765 && !lang_hooks.safe_from_p (x, exp))
5770 /* Should never get a type here. */
5774 /* If we have an rtl, find any enclosed object. Then see if we conflict
5778 if (GET_CODE (exp_rtl) == SUBREG)
5780 exp_rtl = SUBREG_REG (exp_rtl);
5782 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5786 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5787 are memory and they conflict. */
5788 return ! (rtx_equal_p (x, exp_rtl)
5789 || (MEM_P (x) && MEM_P (exp_rtl)
5790 && true_dependence (exp_rtl, VOIDmode, x,
5791 rtx_addr_varies_p)));
5794 /* If we reach here, it is safe. */
5799 /* Return the highest power of two that EXP is known to be a multiple of.
5800 This is used in updating alignment of MEMs in array references. */
5802 static unsigned HOST_WIDE_INT
5803 highest_pow2_factor (tree exp)
5805 unsigned HOST_WIDE_INT c0, c1;
5807 switch (TREE_CODE (exp))
5810 /* We can find the lowest bit that's a one. If the low
5811 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5812 We need to handle this case since we can find it in a COND_EXPR,
5813 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5814 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5816 if (TREE_CONSTANT_OVERFLOW (exp))
5817 return BIGGEST_ALIGNMENT;
5820 /* Note: tree_low_cst is intentionally not used here,
5821 we don't care about the upper bits. */
5822 c0 = TREE_INT_CST_LOW (exp);
5824 return c0 ? c0 : BIGGEST_ALIGNMENT;
5828 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5829 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5830 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5831 return MIN (c0, c1);
5834 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5835 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5838 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5840 if (integer_pow2p (TREE_OPERAND (exp, 1))
5841 && host_integerp (TREE_OPERAND (exp, 1), 1))
5843 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5844 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5845 return MAX (1, c0 / c1);
5849 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5851 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5854 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5857 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5858 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5859 return MIN (c0, c1);
5868 /* Similar, except that the alignment requirements of TARGET are
5869 taken into account. Assume it is at least as aligned as its
5870 type, unless it is a COMPONENT_REF in which case the layout of
5871 the structure gives the alignment. */
5873 static unsigned HOST_WIDE_INT
5874 highest_pow2_factor_for_target (tree target, tree exp)
5876 unsigned HOST_WIDE_INT target_align, factor;
5878 factor = highest_pow2_factor (exp);
5879 if (TREE_CODE (target) == COMPONENT_REF)
5880 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5882 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5883 return MAX (factor, target_align);
5886 /* Expands variable VAR. */
5889 expand_var (tree var)
5891 if (DECL_EXTERNAL (var))
5894 if (TREE_STATIC (var))
5895 /* If this is an inlined copy of a static local variable,
5896 look up the original decl. */
5897 var = DECL_ORIGIN (var);
5899 if (TREE_STATIC (var)
5900 ? !TREE_ASM_WRITTEN (var)
5901 : !DECL_RTL_SET_P (var))
5903 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5904 /* Should be ignored. */;
5905 else if (lang_hooks.expand_decl (var))
5907 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5909 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5910 rest_of_decl_compilation (var, 0, 0);
5912 /* No expansion needed. */
5913 gcc_assert (TREE_CODE (var) == TYPE_DECL
5914 || TREE_CODE (var) == CONST_DECL
5915 || TREE_CODE (var) == FUNCTION_DECL
5916 || TREE_CODE (var) == LABEL_DECL);
5920 /* Subroutine of expand_expr. Expand the two operands of a binary
5921 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5922 The value may be stored in TARGET if TARGET is nonzero. The
5923 MODIFIER argument is as documented by expand_expr. */
5926 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5927 enum expand_modifier modifier)
5929 if (! safe_from_p (target, exp1, 1))
5931 if (operand_equal_p (exp0, exp1, 0))
5933 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5934 *op1 = copy_rtx (*op0);
5938 /* If we need to preserve evaluation order, copy exp0 into its own
5939 temporary variable so that it can't be clobbered by exp1. */
5940 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5941 exp0 = save_expr (exp0);
5942 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5943 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5948 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5949 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5952 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5953 enum expand_modifier modifier)
5955 rtx result, subtarget;
5957 HOST_WIDE_INT bitsize, bitpos;
5958 int volatilep, unsignedp;
5959 enum machine_mode mode1;
5961 /* If we are taking the address of a constant and are at the top level,
5962 we have to use output_constant_def since we can't call force_const_mem
5964 /* ??? This should be considered a front-end bug. We should not be
5965 generating ADDR_EXPR of something that isn't an LVALUE. The only
5966 exception here is STRING_CST. */
5967 if (TREE_CODE (exp) == CONSTRUCTOR
5968 || CONSTANT_CLASS_P (exp))
5969 return XEXP (output_constant_def (exp, 0), 0);
5971 /* Everything must be something allowed by is_gimple_addressable. */
5972 switch (TREE_CODE (exp))
5975 /* This case will happen via recursion for &a->b. */
5976 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5979 /* Recurse and make the output_constant_def clause above handle this. */
5980 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5984 /* The real part of the complex number is always first, therefore
5985 the address is the same as the address of the parent object. */
5988 inner = TREE_OPERAND (exp, 0);
5992 /* The imaginary part of the complex number is always second.
5993 The expression is therefore always offset by the size of the
5996 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5997 inner = TREE_OPERAND (exp, 0);
6001 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6002 expand_expr, as that can have various side effects; LABEL_DECLs for
6003 example, may not have their DECL_RTL set yet. Assume language
6004 specific tree nodes can be expanded in some interesting way. */
6006 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6008 result = expand_expr (exp, target, tmode,
6009 modifier == EXPAND_INITIALIZER
6010 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6012 /* If the DECL isn't in memory, then the DECL wasn't properly
6013 marked TREE_ADDRESSABLE, which will be either a front-end
6014 or a tree optimizer bug. */
6015 gcc_assert (GET_CODE (result) == MEM);
6016 result = XEXP (result, 0);
6018 /* ??? Is this needed anymore? */
6019 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6021 assemble_external (exp);
6022 TREE_USED (exp) = 1;
6025 if (modifier != EXPAND_INITIALIZER
6026 && modifier != EXPAND_CONST_ADDRESS)
6027 result = force_operand (result, target);
6031 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6032 &mode1, &unsignedp, &volatilep);
6036 /* We must have made progress. */
6037 gcc_assert (inner != exp);
6039 subtarget = offset || bitpos ? NULL_RTX : target;
6040 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6046 if (modifier != EXPAND_NORMAL)
6047 result = force_operand (result, NULL);
6048 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6050 result = convert_memory_address (tmode, result);
6051 tmp = convert_memory_address (tmode, tmp);
6053 if (modifier == EXPAND_SUM)
6054 result = gen_rtx_PLUS (tmode, result, tmp);
6057 subtarget = bitpos ? NULL_RTX : target;
6058 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6059 1, OPTAB_LIB_WIDEN);
6065 /* Someone beforehand should have rejected taking the address
6066 of such an object. */
6067 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6069 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6070 if (modifier < EXPAND_SUM)
6071 result = force_operand (result, target);
6077 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6078 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6081 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6082 enum expand_modifier modifier)
6084 enum machine_mode rmode;
6087 /* Target mode of VOIDmode says "whatever's natural". */
6088 if (tmode == VOIDmode)
6089 tmode = TYPE_MODE (TREE_TYPE (exp));
6091 /* We can get called with some Weird Things if the user does silliness
6092 like "(short) &a". In that case, convert_memory_address won't do
6093 the right thing, so ignore the given target mode. */
6094 if (tmode != Pmode && tmode != ptr_mode)
6097 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6100 /* Despite expand_expr claims concerning ignoring TMODE when not
6101 strictly convenient, stuff breaks if we don't honor it. Note
6102 that combined with the above, we only do this for pointer modes. */
6103 rmode = GET_MODE (result);
6104 if (rmode == VOIDmode)
6107 result = convert_memory_address (tmode, result);
6113 /* expand_expr: generate code for computing expression EXP.
6114 An rtx for the computed value is returned. The value is never null.
6115 In the case of a void EXP, const0_rtx is returned.
6117 The value may be stored in TARGET if TARGET is nonzero.
6118 TARGET is just a suggestion; callers must assume that
6119 the rtx returned may not be the same as TARGET.
6121 If TARGET is CONST0_RTX, it means that the value will be ignored.
6123 If TMODE is not VOIDmode, it suggests generating the
6124 result in mode TMODE. But this is done only when convenient.
6125 Otherwise, TMODE is ignored and the value generated in its natural mode.
6126 TMODE is just a suggestion; callers must assume that
6127 the rtx returned may not have mode TMODE.
6129 Note that TARGET may have neither TMODE nor MODE. In that case, it
6130 probably will not be used.
6132 If MODIFIER is EXPAND_SUM then when EXP is an addition
6133 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6134 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6135 products as above, or REG or MEM, or constant.
6136 Ordinarily in such cases we would output mul or add instructions
6137 and then return a pseudo reg containing the sum.
6139 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6140 it also marks a label as absolutely required (it can't be dead).
6141 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6142 This is used for outputting expressions used in initializers.
6144 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6145 with a constant address even if that address is not normally legitimate.
6146 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6148 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6149 a call parameter. Such targets require special care as we haven't yet
6150 marked TARGET so that it's safe from being trashed by libcalls. We
6151 don't want to use TARGET for anything but the final result;
6152 Intermediate values must go elsewhere. Additionally, calls to
6153 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6155 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6156 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6157 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6158 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6161 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6162 enum expand_modifier, rtx *);
6165 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6166 enum expand_modifier modifier, rtx *alt_rtl)
6169 rtx ret, last = NULL;
6171 /* Handle ERROR_MARK before anybody tries to access its type. */
6172 if (TREE_CODE (exp) == ERROR_MARK
6173 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6175 ret = CONST0_RTX (tmode);
6176 return ret ? ret : const0_rtx;
6179 if (flag_non_call_exceptions)
6181 rn = lookup_stmt_eh_region (exp);
6182 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6184 last = get_last_insn ();
6187 /* If this is an expression of some kind and it has an associated line
6188 number, then emit the line number before expanding the expression.
6190 We need to save and restore the file and line information so that
6191 errors discovered during expansion are emitted with the right
6192 information. It would be better of the diagnostic routines
6193 used the file/line information embedded in the tree nodes rather
6195 if (cfun && EXPR_HAS_LOCATION (exp))
6197 location_t saved_location = input_location;
6198 input_location = EXPR_LOCATION (exp);
6199 emit_line_note (input_location);
6201 /* Record where the insns produced belong. */
6202 record_block_change (TREE_BLOCK (exp));
6204 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6206 input_location = saved_location;
6210 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6213 /* If using non-call exceptions, mark all insns that may trap.
6214 expand_call() will mark CALL_INSNs before we get to this code,
6215 but it doesn't handle libcalls, and these may trap. */
6219 for (insn = next_real_insn (last); insn;
6220 insn = next_real_insn (insn))
6222 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6223 /* If we want exceptions for non-call insns, any
6224 may_trap_p instruction may throw. */
6225 && GET_CODE (PATTERN (insn)) != CLOBBER
6226 && GET_CODE (PATTERN (insn)) != USE
6227 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6229 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6239 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6240 enum expand_modifier modifier, rtx *alt_rtl)
6243 tree type = TREE_TYPE (exp);
6245 enum machine_mode mode;
6246 enum tree_code code = TREE_CODE (exp);
6248 rtx subtarget, original_target;
6251 bool reduce_bit_field = false;
6252 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6253 ? reduce_to_bit_field_precision ((expr), \
6258 mode = TYPE_MODE (type);
6259 unsignedp = TYPE_UNSIGNED (type);
6260 if (lang_hooks.reduce_bit_field_operations
6261 && TREE_CODE (type) == INTEGER_TYPE
6262 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6264 /* An operation in what may be a bit-field type needs the
6265 result to be reduced to the precision of the bit-field type,
6266 which is narrower than that of the type's mode. */
6267 reduce_bit_field = true;
6268 if (modifier == EXPAND_STACK_PARM)
6272 /* Use subtarget as the target for operand 0 of a binary operation. */
6273 subtarget = get_subtarget (target);
6274 original_target = target;
6275 ignore = (target == const0_rtx
6276 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6277 || code == CONVERT_EXPR || code == COND_EXPR
6278 || code == VIEW_CONVERT_EXPR)
6279 && TREE_CODE (type) == VOID_TYPE));
6281 /* If we are going to ignore this result, we need only do something
6282 if there is a side-effect somewhere in the expression. If there
6283 is, short-circuit the most common cases here. Note that we must
6284 not call expand_expr with anything but const0_rtx in case this
6285 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6289 if (! TREE_SIDE_EFFECTS (exp))
6292 /* Ensure we reference a volatile object even if value is ignored, but
6293 don't do this if all we are doing is taking its address. */
6294 if (TREE_THIS_VOLATILE (exp)
6295 && TREE_CODE (exp) != FUNCTION_DECL
6296 && mode != VOIDmode && mode != BLKmode
6297 && modifier != EXPAND_CONST_ADDRESS)
6299 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6301 temp = copy_to_reg (temp);
6305 if (TREE_CODE_CLASS (code) == tcc_unary
6306 || code == COMPONENT_REF || code == INDIRECT_REF)
6307 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6310 else if (TREE_CODE_CLASS (code) == tcc_binary
6311 || TREE_CODE_CLASS (code) == tcc_comparison
6312 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6314 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6315 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6318 else if (code == BIT_FIELD_REF)
6320 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6321 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6322 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6329 /* If will do cse, generate all results into pseudo registers
6330 since 1) that allows cse to find more things
6331 and 2) otherwise cse could produce an insn the machine
6332 cannot support. An exception is a CONSTRUCTOR into a multi-word
6333 MEM: that's much more likely to be most efficient into the MEM.
6334 Another is a CALL_EXPR which must return in memory. */
6336 if (! cse_not_expected && mode != BLKmode && target
6337 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6338 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6339 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6346 tree function = decl_function_context (exp);
6348 temp = label_rtx (exp);
6349 temp = gen_rtx_LABEL_REF (Pmode, temp);
6351 if (function != current_function_decl
6353 LABEL_REF_NONLOCAL_P (temp) = 1;
6355 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6360 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6365 /* If a static var's type was incomplete when the decl was written,
6366 but the type is complete now, lay out the decl now. */
6367 if (DECL_SIZE (exp) == 0
6368 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6369 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6370 layout_decl (exp, 0);
6372 /* ... fall through ... */
6376 gcc_assert (DECL_RTL (exp));
6378 /* Ensure variable marked as used even if it doesn't go through
6379 a parser. If it hasn't be used yet, write out an external
6381 if (! TREE_USED (exp))
6383 assemble_external (exp);
6384 TREE_USED (exp) = 1;
6387 /* Show we haven't gotten RTL for this yet. */
6390 /* Variables inherited from containing functions should have
6391 been lowered by this point. */
6392 context = decl_function_context (exp);
6393 gcc_assert (!context
6394 || context == current_function_decl
6395 || TREE_STATIC (exp)
6396 /* ??? C++ creates functions that are not TREE_STATIC. */
6397 || TREE_CODE (exp) == FUNCTION_DECL);
6399 /* This is the case of an array whose size is to be determined
6400 from its initializer, while the initializer is still being parsed.
6403 if (MEM_P (DECL_RTL (exp))
6404 && REG_P (XEXP (DECL_RTL (exp), 0)))
6405 temp = validize_mem (DECL_RTL (exp));
6407 /* If DECL_RTL is memory, we are in the normal case and either
6408 the address is not valid or it is not a register and -fforce-addr
6409 is specified, get the address into a register. */
6411 else if (MEM_P (DECL_RTL (exp))
6412 && modifier != EXPAND_CONST_ADDRESS
6413 && modifier != EXPAND_SUM
6414 && modifier != EXPAND_INITIALIZER
6415 && (! memory_address_p (DECL_MODE (exp),
6416 XEXP (DECL_RTL (exp), 0))
6418 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6421 *alt_rtl = DECL_RTL (exp);
6422 temp = replace_equiv_address (DECL_RTL (exp),
6423 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6426 /* If we got something, return it. But first, set the alignment
6427 if the address is a register. */
6430 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6431 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6436 /* If the mode of DECL_RTL does not match that of the decl, it
6437 must be a promoted value. We return a SUBREG of the wanted mode,
6438 but mark it so that we know that it was already extended. */
6440 if (REG_P (DECL_RTL (exp))
6441 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6443 enum machine_mode pmode;
6445 /* Get the signedness used for this variable. Ensure we get the
6446 same mode we got when the variable was declared. */
6447 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6448 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6449 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6451 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6452 SUBREG_PROMOTED_VAR_P (temp) = 1;
6453 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6457 return DECL_RTL (exp);
6460 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6461 TREE_INT_CST_HIGH (exp), mode);
6463 /* ??? If overflow is set, fold will have done an incomplete job,
6464 which can result in (plus xx (const_int 0)), which can get
6465 simplified by validate_replace_rtx during virtual register
6466 instantiation, which can result in unrecognizable insns.
6467 Avoid this by forcing all overflows into registers. */
6468 if (TREE_CONSTANT_OVERFLOW (exp)
6469 && modifier != EXPAND_INITIALIZER)
6470 temp = force_reg (mode, temp);
6475 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6476 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6477 return const_vector_from_tree (exp);
6479 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6480 TREE_VECTOR_CST_ELTS (exp)),
6481 ignore ? const0_rtx : target, tmode, modifier);
6484 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6487 /* If optimized, generate immediate CONST_DOUBLE
6488 which will be turned into memory by reload if necessary.
6490 We used to force a register so that loop.c could see it. But
6491 this does not allow gen_* patterns to perform optimizations with
6492 the constants. It also produces two insns in cases like "x = 1.0;".
6493 On most machines, floating-point constants are not permitted in
6494 many insns, so we'd end up copying it to a register in any case.
6496 Now, we do the copying in expand_binop, if appropriate. */
6497 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6498 TYPE_MODE (TREE_TYPE (exp)));
6501 /* Handle evaluating a complex constant in a CONCAT target. */
6502 if (original_target && GET_CODE (original_target) == CONCAT)
6504 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6507 rtarg = XEXP (original_target, 0);
6508 itarg = XEXP (original_target, 1);
6510 /* Move the real and imaginary parts separately. */
6511 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6512 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6515 emit_move_insn (rtarg, op0);
6517 emit_move_insn (itarg, op1);
6519 return original_target;
6522 /* ... fall through ... */
6525 temp = output_constant_def (exp, 1);
6527 /* temp contains a constant address.
6528 On RISC machines where a constant address isn't valid,
6529 make some insns to get that address into a register. */
6530 if (modifier != EXPAND_CONST_ADDRESS
6531 && modifier != EXPAND_INITIALIZER
6532 && modifier != EXPAND_SUM
6533 && (! memory_address_p (mode, XEXP (temp, 0))
6534 || flag_force_addr))
6535 return replace_equiv_address (temp,
6536 copy_rtx (XEXP (temp, 0)));
6541 tree val = TREE_OPERAND (exp, 0);
6542 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6544 if (!SAVE_EXPR_RESOLVED_P (exp))
6546 /* We can indeed still hit this case, typically via builtin
6547 expanders calling save_expr immediately before expanding
6548 something. Assume this means that we only have to deal
6549 with non-BLKmode values. */
6550 gcc_assert (GET_MODE (ret) != BLKmode);
6552 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6553 DECL_ARTIFICIAL (val) = 1;
6554 DECL_IGNORED_P (val) = 1;
6555 TREE_OPERAND (exp, 0) = val;
6556 SAVE_EXPR_RESOLVED_P (exp) = 1;
6558 if (!CONSTANT_P (ret))
6559 ret = copy_to_reg (ret);
6560 SET_DECL_RTL (val, ret);
6567 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6568 expand_goto (TREE_OPERAND (exp, 0));
6570 expand_computed_goto (TREE_OPERAND (exp, 0));
6574 /* If we don't need the result, just ensure we evaluate any
6580 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6581 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6586 /* All elts simple constants => refer to a constant in memory. But
6587 if this is a non-BLKmode mode, let it store a field at a time
6588 since that should make a CONST_INT or CONST_DOUBLE when we
6589 fold. Likewise, if we have a target we can use, it is best to
6590 store directly into the target unless the type is large enough
6591 that memcpy will be used. If we are making an initializer and
6592 all operands are constant, put it in memory as well.
6594 FIXME: Avoid trying to fill vector constructors piece-meal.
6595 Output them with output_constant_def below unless we're sure
6596 they're zeros. This should go away when vector initializers
6597 are treated like VECTOR_CST instead of arrays.
6599 else if ((TREE_STATIC (exp)
6600 && ((mode == BLKmode
6601 && ! (target != 0 && safe_from_p (target, exp, 1)))
6602 || TREE_ADDRESSABLE (exp)
6603 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6604 && (! MOVE_BY_PIECES_P
6605 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6607 && ! mostly_zeros_p (exp))))
6608 || ((modifier == EXPAND_INITIALIZER
6609 || modifier == EXPAND_CONST_ADDRESS)
6610 && TREE_CONSTANT (exp)))
6612 rtx constructor = output_constant_def (exp, 1);
6614 if (modifier != EXPAND_CONST_ADDRESS
6615 && modifier != EXPAND_INITIALIZER
6616 && modifier != EXPAND_SUM)
6617 constructor = validize_mem (constructor);
6623 /* Handle calls that pass values in multiple non-contiguous
6624 locations. The Irix 6 ABI has examples of this. */
6625 if (target == 0 || ! safe_from_p (target, exp, 1)
6626 || GET_CODE (target) == PARALLEL
6627 || modifier == EXPAND_STACK_PARM)
6629 = assign_temp (build_qualified_type (type,
6631 | (TREE_READONLY (exp)
6632 * TYPE_QUAL_CONST))),
6633 0, TREE_ADDRESSABLE (exp), 1);
6635 store_constructor (exp, target, 0, int_expr_size (exp));
6639 case MISALIGNED_INDIRECT_REF:
6640 case ALIGN_INDIRECT_REF:
6643 tree exp1 = TREE_OPERAND (exp, 0);
6646 if (code == MISALIGNED_INDIRECT_REF
6647 && !targetm.vectorize.misaligned_mem_ok (mode))
6650 if (modifier != EXPAND_WRITE)
6654 t = fold_read_from_constant_string (exp);
6656 return expand_expr (t, target, tmode, modifier);
6659 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6660 op0 = memory_address (mode, op0);
6662 if (code == ALIGN_INDIRECT_REF)
6664 int align = TYPE_ALIGN_UNIT (type);
6665 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6666 op0 = memory_address (mode, op0);
6669 temp = gen_rtx_MEM (mode, op0);
6671 orig = REF_ORIGINAL (exp);
6674 set_mem_attributes (temp, orig, 0);
6682 tree array = TREE_OPERAND (exp, 0);
6683 tree index = TREE_OPERAND (exp, 1);
6685 /* Fold an expression like: "foo"[2].
6686 This is not done in fold so it won't happen inside &.
6687 Don't fold if this is for wide characters since it's too
6688 difficult to do correctly and this is a very rare case. */
6690 if (modifier != EXPAND_CONST_ADDRESS
6691 && modifier != EXPAND_INITIALIZER
6692 && modifier != EXPAND_MEMORY)
6694 tree t = fold_read_from_constant_string (exp);
6697 return expand_expr (t, target, tmode, modifier);
6700 /* If this is a constant index into a constant array,
6701 just get the value from the array. Handle both the cases when
6702 we have an explicit constructor and when our operand is a variable
6703 that was declared const. */
6705 if (modifier != EXPAND_CONST_ADDRESS
6706 && modifier != EXPAND_INITIALIZER
6707 && modifier != EXPAND_MEMORY
6708 && TREE_CODE (array) == CONSTRUCTOR
6709 && ! TREE_SIDE_EFFECTS (array)
6710 && TREE_CODE (index) == INTEGER_CST)
6714 for (elem = CONSTRUCTOR_ELTS (array);
6715 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6716 elem = TREE_CHAIN (elem))
6719 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6720 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6724 else if (optimize >= 1
6725 && modifier != EXPAND_CONST_ADDRESS
6726 && modifier != EXPAND_INITIALIZER
6727 && modifier != EXPAND_MEMORY
6728 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6729 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6730 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6731 && targetm.binds_local_p (array))
6733 if (TREE_CODE (index) == INTEGER_CST)
6735 tree init = DECL_INITIAL (array);
6737 if (TREE_CODE (init) == CONSTRUCTOR)
6741 for (elem = CONSTRUCTOR_ELTS (init);
6743 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6744 elem = TREE_CHAIN (elem))
6747 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6748 return expand_expr (fold (TREE_VALUE (elem)), target,
6751 else if (TREE_CODE (init) == STRING_CST
6752 && 0 > compare_tree_int (index,
6753 TREE_STRING_LENGTH (init)))
6755 tree type = TREE_TYPE (TREE_TYPE (init));
6756 enum machine_mode mode = TYPE_MODE (type);
6758 if (GET_MODE_CLASS (mode) == MODE_INT
6759 && GET_MODE_SIZE (mode) == 1)
6760 return gen_int_mode (TREE_STRING_POINTER (init)
6761 [TREE_INT_CST_LOW (index)], mode);
6766 goto normal_inner_ref;
6769 /* If the operand is a CONSTRUCTOR, we can just extract the
6770 appropriate field if it is present. */
6771 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6775 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6776 elt = TREE_CHAIN (elt))
6777 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6778 /* We can normally use the value of the field in the
6779 CONSTRUCTOR. However, if this is a bitfield in
6780 an integral mode that we can fit in a HOST_WIDE_INT,
6781 we must mask only the number of bits in the bitfield,
6782 since this is done implicitly by the constructor. If
6783 the bitfield does not meet either of those conditions,
6784 we can't do this optimization. */
6785 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6786 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6788 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6789 <= HOST_BITS_PER_WIDE_INT))))
6791 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6792 && modifier == EXPAND_STACK_PARM)
6794 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6795 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6797 HOST_WIDE_INT bitsize
6798 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6799 enum machine_mode imode
6800 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6802 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6804 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6805 op0 = expand_and (imode, op0, op1, target);
6810 = build_int_cst (NULL_TREE,
6811 GET_MODE_BITSIZE (imode) - bitsize);
6813 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6815 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6823 goto normal_inner_ref;
6826 case ARRAY_RANGE_REF:
6829 enum machine_mode mode1;
6830 HOST_WIDE_INT bitsize, bitpos;
6833 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6834 &mode1, &unsignedp, &volatilep);
6837 /* If we got back the original object, something is wrong. Perhaps
6838 we are evaluating an expression too early. In any event, don't
6839 infinitely recurse. */
6840 gcc_assert (tem != exp);
6842 /* If TEM's type is a union of variable size, pass TARGET to the inner
6843 computation, since it will need a temporary and TARGET is known
6844 to have to do. This occurs in unchecked conversion in Ada. */
6848 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6849 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6851 && modifier != EXPAND_STACK_PARM
6852 ? target : NULL_RTX),
6854 (modifier == EXPAND_INITIALIZER
6855 || modifier == EXPAND_CONST_ADDRESS
6856 || modifier == EXPAND_STACK_PARM)
6857 ? modifier : EXPAND_NORMAL);
6859 /* If this is a constant, put it into a register if it is a
6860 legitimate constant and OFFSET is 0 and memory if it isn't. */
6861 if (CONSTANT_P (op0))
6863 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6864 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6866 op0 = force_reg (mode, op0);
6868 op0 = validize_mem (force_const_mem (mode, op0));
6871 /* Otherwise, if this object not in memory and we either have an
6872 offset or a BLKmode result, put it there. This case can't occur in
6873 C, but can in Ada if we have unchecked conversion of an expression
6874 from a scalar type to an array or record type or for an
6875 ARRAY_RANGE_REF whose type is BLKmode. */
6876 else if (!MEM_P (op0)
6878 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6880 tree nt = build_qualified_type (TREE_TYPE (tem),
6881 (TYPE_QUALS (TREE_TYPE (tem))
6882 | TYPE_QUAL_CONST));
6883 rtx memloc = assign_temp (nt, 1, 1, 1);
6885 emit_move_insn (memloc, op0);
6891 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6894 gcc_assert (MEM_P (op0));
6896 #ifdef POINTERS_EXTEND_UNSIGNED
6897 if (GET_MODE (offset_rtx) != Pmode)
6898 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6900 if (GET_MODE (offset_rtx) != ptr_mode)
6901 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6904 if (GET_MODE (op0) == BLKmode
6905 /* A constant address in OP0 can have VOIDmode, we must
6906 not try to call force_reg in that case. */
6907 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6909 && (bitpos % bitsize) == 0
6910 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6911 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6913 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6917 op0 = offset_address (op0, offset_rtx,
6918 highest_pow2_factor (offset));
6921 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6922 record its alignment as BIGGEST_ALIGNMENT. */
6923 if (MEM_P (op0) && bitpos == 0 && offset != 0
6924 && is_aligning_offset (offset, tem))
6925 set_mem_align (op0, BIGGEST_ALIGNMENT);
6927 /* Don't forget about volatility even if this is a bitfield. */
6928 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6930 if (op0 == orig_op0)
6931 op0 = copy_rtx (op0);
6933 MEM_VOLATILE_P (op0) = 1;
6936 /* The following code doesn't handle CONCAT.
6937 Assume only bitpos == 0 can be used for CONCAT, due to
6938 one element arrays having the same mode as its element. */
6939 if (GET_CODE (op0) == CONCAT)
6941 gcc_assert (bitpos == 0
6942 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6946 /* In cases where an aligned union has an unaligned object
6947 as a field, we might be extracting a BLKmode value from
6948 an integer-mode (e.g., SImode) object. Handle this case
6949 by doing the extract into an object as wide as the field
6950 (which we know to be the width of a basic mode), then
6951 storing into memory, and changing the mode to BLKmode. */
6952 if (mode1 == VOIDmode
6953 || REG_P (op0) || GET_CODE (op0) == SUBREG
6954 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6955 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6956 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6957 && modifier != EXPAND_CONST_ADDRESS
6958 && modifier != EXPAND_INITIALIZER)
6959 /* If the field isn't aligned enough to fetch as a memref,
6960 fetch it as a bit field. */
6961 || (mode1 != BLKmode
6962 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6963 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6965 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6966 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6967 && ((modifier == EXPAND_CONST_ADDRESS
6968 || modifier == EXPAND_INITIALIZER)
6970 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6971 || (bitpos % BITS_PER_UNIT != 0)))
6972 /* If the type and the field are a constant size and the
6973 size of the type isn't the same size as the bitfield,
6974 we must use bitfield operations. */
6976 && TYPE_SIZE (TREE_TYPE (exp))
6977 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6978 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6981 enum machine_mode ext_mode = mode;
6983 if (ext_mode == BLKmode
6984 && ! (target != 0 && MEM_P (op0)
6986 && bitpos % BITS_PER_UNIT == 0))
6987 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6989 if (ext_mode == BLKmode)
6992 target = assign_temp (type, 0, 1, 1);
6997 /* In this case, BITPOS must start at a byte boundary and
6998 TARGET, if specified, must be a MEM. */
6999 gcc_assert (MEM_P (op0)
7000 && (!target || MEM_P (target))
7001 && !(bitpos % BITS_PER_UNIT));
7003 emit_block_move (target,
7004 adjust_address (op0, VOIDmode,
7005 bitpos / BITS_PER_UNIT),
7006 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7008 (modifier == EXPAND_STACK_PARM
7009 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7014 op0 = validize_mem (op0);
7016 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7017 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7019 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7020 (modifier == EXPAND_STACK_PARM
7021 ? NULL_RTX : target),
7022 ext_mode, ext_mode);
7024 /* If the result is a record type and BITSIZE is narrower than
7025 the mode of OP0, an integral mode, and this is a big endian
7026 machine, we must put the field into the high-order bits. */
7027 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7028 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7029 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7030 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7031 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7035 /* If the result type is BLKmode, store the data into a temporary
7036 of the appropriate type, but with the mode corresponding to the
7037 mode for the data we have (op0's mode). It's tempting to make
7038 this a constant type, since we know it's only being stored once,
7039 but that can cause problems if we are taking the address of this
7040 COMPONENT_REF because the MEM of any reference via that address
7041 will have flags corresponding to the type, which will not
7042 necessarily be constant. */
7043 if (mode == BLKmode)
7046 = assign_stack_temp_for_type
7047 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7049 emit_move_insn (new, op0);
7050 op0 = copy_rtx (new);
7051 PUT_MODE (op0, BLKmode);
7052 set_mem_attributes (op0, exp, 1);
7058 /* If the result is BLKmode, use that to access the object
7060 if (mode == BLKmode)
7063 /* Get a reference to just this component. */
7064 if (modifier == EXPAND_CONST_ADDRESS
7065 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7066 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7068 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7070 if (op0 == orig_op0)
7071 op0 = copy_rtx (op0);
7073 set_mem_attributes (op0, exp, 0);
7074 if (REG_P (XEXP (op0, 0)))
7075 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7077 MEM_VOLATILE_P (op0) |= volatilep;
7078 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7079 || modifier == EXPAND_CONST_ADDRESS
7080 || modifier == EXPAND_INITIALIZER)
7082 else if (target == 0)
7083 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7085 convert_move (target, op0, unsignedp);
7090 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7093 /* Check for a built-in function. */
7094 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7095 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7097 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7099 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7100 == BUILT_IN_FRONTEND)
7101 return lang_hooks.expand_expr (exp, original_target,
7105 return expand_builtin (exp, target, subtarget, tmode, ignore);
7108 return expand_call (exp, target, ignore);
7110 case NON_LVALUE_EXPR:
7113 if (TREE_OPERAND (exp, 0) == error_mark_node)
7116 if (TREE_CODE (type) == UNION_TYPE)
7118 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7120 /* If both input and output are BLKmode, this conversion isn't doing
7121 anything except possibly changing memory attribute. */
7122 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7124 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7127 result = copy_rtx (result);
7128 set_mem_attributes (result, exp, 0);
7134 if (TYPE_MODE (type) != BLKmode)
7135 target = gen_reg_rtx (TYPE_MODE (type));
7137 target = assign_temp (type, 0, 1, 1);
7141 /* Store data into beginning of memory target. */
7142 store_expr (TREE_OPERAND (exp, 0),
7143 adjust_address (target, TYPE_MODE (valtype), 0),
7144 modifier == EXPAND_STACK_PARM);
7148 gcc_assert (REG_P (target));
7150 /* Store this field into a union of the proper type. */
7151 store_field (target,
7152 MIN ((int_size_in_bytes (TREE_TYPE
7153 (TREE_OPERAND (exp, 0)))
7155 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7156 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7160 /* Return the entire union. */
7164 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7166 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7169 /* If the signedness of the conversion differs and OP0 is
7170 a promoted SUBREG, clear that indication since we now
7171 have to do the proper extension. */
7172 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7173 && GET_CODE (op0) == SUBREG)
7174 SUBREG_PROMOTED_VAR_P (op0) = 0;
7176 return REDUCE_BIT_FIELD (op0);
7179 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7180 if (GET_MODE (op0) == mode)
7183 /* If OP0 is a constant, just convert it into the proper mode. */
7184 else if (CONSTANT_P (op0))
7186 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7187 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7189 if (modifier == EXPAND_INITIALIZER)
7190 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7191 subreg_lowpart_offset (mode,
7194 op0= convert_modes (mode, inner_mode, op0,
7195 TYPE_UNSIGNED (inner_type));
7198 else if (modifier == EXPAND_INITIALIZER)
7199 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7201 else if (target == 0)
7202 op0 = convert_to_mode (mode, op0,
7203 TYPE_UNSIGNED (TREE_TYPE
7204 (TREE_OPERAND (exp, 0))));
7207 convert_move (target, op0,
7208 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7212 return REDUCE_BIT_FIELD (op0);
7214 case VIEW_CONVERT_EXPR:
7215 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7217 /* If the input and output modes are both the same, we are done.
7218 Otherwise, if neither mode is BLKmode and both are integral and within
7219 a word, we can use gen_lowpart. If neither is true, make sure the
7220 operand is in memory and convert the MEM to the new mode. */
7221 if (TYPE_MODE (type) == GET_MODE (op0))
7223 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7224 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7225 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7226 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7227 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7228 op0 = gen_lowpart (TYPE_MODE (type), op0);
7229 else if (!MEM_P (op0))
7231 /* If the operand is not a MEM, force it into memory. Since we
7232 are going to be be changing the mode of the MEM, don't call
7233 force_const_mem for constants because we don't allow pool
7234 constants to change mode. */
7235 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7237 gcc_assert (!TREE_ADDRESSABLE (exp));
7239 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7241 = assign_stack_temp_for_type
7242 (TYPE_MODE (inner_type),
7243 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7245 emit_move_insn (target, op0);
7249 /* At this point, OP0 is in the correct mode. If the output type is such
7250 that the operand is known to be aligned, indicate that it is.
7251 Otherwise, we need only be concerned about alignment for non-BLKmode
7255 op0 = copy_rtx (op0);
7257 if (TYPE_ALIGN_OK (type))
7258 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7259 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7260 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7262 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7263 HOST_WIDE_INT temp_size
7264 = MAX (int_size_in_bytes (inner_type),
7265 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7266 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7267 temp_size, 0, type);
7268 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7270 gcc_assert (!TREE_ADDRESSABLE (exp));
7272 if (GET_MODE (op0) == BLKmode)
7273 emit_block_move (new_with_op0_mode, op0,
7274 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7275 (modifier == EXPAND_STACK_PARM
7276 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7278 emit_move_insn (new_with_op0_mode, op0);
7283 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7289 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7290 something else, make sure we add the register to the constant and
7291 then to the other thing. This case can occur during strength
7292 reduction and doing it this way will produce better code if the
7293 frame pointer or argument pointer is eliminated.
7295 fold-const.c will ensure that the constant is always in the inner
7296 PLUS_EXPR, so the only case we need to do anything about is if
7297 sp, ap, or fp is our second argument, in which case we must swap
7298 the innermost first argument and our second argument. */
7300 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7301 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7302 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7303 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7304 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7305 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7307 tree t = TREE_OPERAND (exp, 1);
7309 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7310 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7313 /* If the result is to be ptr_mode and we are adding an integer to
7314 something, we might be forming a constant. So try to use
7315 plus_constant. If it produces a sum and we can't accept it,
7316 use force_operand. This allows P = &ARR[const] to generate
7317 efficient code on machines where a SYMBOL_REF is not a valid
7320 If this is an EXPAND_SUM call, always return the sum. */
7321 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7322 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7324 if (modifier == EXPAND_STACK_PARM)
7326 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7327 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7328 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7332 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7334 /* Use immed_double_const to ensure that the constant is
7335 truncated according to the mode of OP1, then sign extended
7336 to a HOST_WIDE_INT. Using the constant directly can result
7337 in non-canonical RTL in a 64x32 cross compile. */
7339 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7341 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7342 op1 = plus_constant (op1, INTVAL (constant_part));
7343 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7344 op1 = force_operand (op1, target);
7345 return REDUCE_BIT_FIELD (op1);
7348 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7349 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7350 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7355 (modifier == EXPAND_INITIALIZER
7356 ? EXPAND_INITIALIZER : EXPAND_SUM));
7357 if (! CONSTANT_P (op0))
7359 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7360 VOIDmode, modifier);
7361 /* Return a PLUS if modifier says it's OK. */
7362 if (modifier == EXPAND_SUM
7363 || modifier == EXPAND_INITIALIZER)
7364 return simplify_gen_binary (PLUS, mode, op0, op1);
7367 /* Use immed_double_const to ensure that the constant is
7368 truncated according to the mode of OP1, then sign extended
7369 to a HOST_WIDE_INT. Using the constant directly can result
7370 in non-canonical RTL in a 64x32 cross compile. */
7372 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7374 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7375 op0 = plus_constant (op0, INTVAL (constant_part));
7376 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7377 op0 = force_operand (op0, target);
7378 return REDUCE_BIT_FIELD (op0);
7382 /* No sense saving up arithmetic to be done
7383 if it's all in the wrong mode to form part of an address.
7384 And force_operand won't know whether to sign-extend or
7386 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7387 || mode != ptr_mode)
7389 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7390 subtarget, &op0, &op1, 0);
7391 if (op0 == const0_rtx)
7393 if (op1 == const0_rtx)
7398 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7399 subtarget, &op0, &op1, modifier);
7400 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7403 /* For initializers, we are allowed to return a MINUS of two
7404 symbolic constants. Here we handle all cases when both operands
7406 /* Handle difference of two symbolic constants,
7407 for the sake of an initializer. */
7408 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7409 && really_constant_p (TREE_OPERAND (exp, 0))
7410 && really_constant_p (TREE_OPERAND (exp, 1)))
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 NULL_RTX, &op0, &op1, modifier);
7415 /* If the last operand is a CONST_INT, use plus_constant of
7416 the negated constant. Else make the MINUS. */
7417 if (GET_CODE (op1) == CONST_INT)
7418 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7420 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7423 /* No sense saving up arithmetic to be done
7424 if it's all in the wrong mode to form part of an address.
7425 And force_operand won't know whether to sign-extend or
7427 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7428 || mode != ptr_mode)
7431 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7432 subtarget, &op0, &op1, modifier);
7434 /* Convert A - const to A + (-const). */
7435 if (GET_CODE (op1) == CONST_INT)
7437 op1 = negate_rtx (mode, op1);
7438 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7444 /* If first operand is constant, swap them.
7445 Thus the following special case checks need only
7446 check the second operand. */
7447 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7449 tree t1 = TREE_OPERAND (exp, 0);
7450 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7451 TREE_OPERAND (exp, 1) = t1;
7454 /* Attempt to return something suitable for generating an
7455 indexed address, for machines that support that. */
7457 if (modifier == EXPAND_SUM && mode == ptr_mode
7458 && host_integerp (TREE_OPERAND (exp, 1), 0))
7460 tree exp1 = TREE_OPERAND (exp, 1);
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7466 op0 = force_operand (op0, NULL_RTX);
7468 op0 = copy_to_mode_reg (mode, op0);
7470 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7471 gen_int_mode (tree_low_cst (exp1, 0),
7472 TYPE_MODE (TREE_TYPE (exp1)))));
7475 if (modifier == EXPAND_STACK_PARM)
7478 /* Check for multiplying things that have been extended
7479 from a narrower type. If this machine supports multiplying
7480 in that narrower type with a result in the desired type,
7481 do it that way, and avoid the explicit type-conversion. */
7482 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7483 && TREE_CODE (type) == INTEGER_TYPE
7484 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7485 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7486 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7487 && int_fits_type_p (TREE_OPERAND (exp, 1),
7488 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7489 /* Don't use a widening multiply if a shift will do. */
7490 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7491 > HOST_BITS_PER_WIDE_INT)
7492 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7494 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7495 && (TYPE_PRECISION (TREE_TYPE
7496 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7497 == TYPE_PRECISION (TREE_TYPE
7499 (TREE_OPERAND (exp, 0), 0))))
7500 /* If both operands are extended, they must either both
7501 be zero-extended or both be sign-extended. */
7502 && (TYPE_UNSIGNED (TREE_TYPE
7503 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7504 == TYPE_UNSIGNED (TREE_TYPE
7506 (TREE_OPERAND (exp, 0), 0)))))))
7508 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7509 enum machine_mode innermode = TYPE_MODE (op0type);
7510 bool zextend_p = TYPE_UNSIGNED (op0type);
7511 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7512 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7514 if (mode == GET_MODE_WIDER_MODE (innermode))
7516 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7518 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7519 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7520 TREE_OPERAND (exp, 1),
7521 NULL_RTX, &op0, &op1, 0);
7523 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7524 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7525 NULL_RTX, &op0, &op1, 0);
7528 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7529 && innermode == word_mode)
7532 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7533 NULL_RTX, VOIDmode, 0);
7534 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7535 op1 = convert_modes (innermode, mode,
7536 expand_expr (TREE_OPERAND (exp, 1),
7537 NULL_RTX, VOIDmode, 0),
7540 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7541 NULL_RTX, VOIDmode, 0);
7542 temp = expand_binop (mode, other_optab, op0, op1, target,
7543 unsignedp, OPTAB_LIB_WIDEN);
7544 hipart = gen_highpart (innermode, temp);
7545 htem = expand_mult_highpart_adjust (innermode, hipart,
7549 emit_move_insn (hipart, htem);
7550 return REDUCE_BIT_FIELD (temp);
7554 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7555 subtarget, &op0, &op1, 0);
7556 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7558 case TRUNC_DIV_EXPR:
7559 case FLOOR_DIV_EXPR:
7561 case ROUND_DIV_EXPR:
7562 case EXACT_DIV_EXPR:
7563 if (modifier == EXPAND_STACK_PARM)
7565 /* Possible optimization: compute the dividend with EXPAND_SUM
7566 then if the divisor is constant can optimize the case
7567 where some terms of the dividend have coeffs divisible by it. */
7568 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7569 subtarget, &op0, &op1, 0);
7570 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7573 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7574 expensive divide. If not, combine will rebuild the original
7576 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7577 && TREE_CODE (type) == REAL_TYPE
7578 && !real_onep (TREE_OPERAND (exp, 0)))
7579 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7580 build2 (RDIV_EXPR, type,
7581 build_real (type, dconst1),
7582 TREE_OPERAND (exp, 1))),
7583 target, tmode, modifier);
7587 case TRUNC_MOD_EXPR:
7588 case FLOOR_MOD_EXPR:
7590 case ROUND_MOD_EXPR:
7591 if (modifier == EXPAND_STACK_PARM)
7593 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7594 subtarget, &op0, &op1, 0);
7595 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7597 case FIX_ROUND_EXPR:
7598 case FIX_FLOOR_EXPR:
7600 gcc_unreachable (); /* Not used for C. */
7602 case FIX_TRUNC_EXPR:
7603 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7604 if (target == 0 || modifier == EXPAND_STACK_PARM)
7605 target = gen_reg_rtx (mode);
7606 expand_fix (target, op0, unsignedp);
7610 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7611 if (target == 0 || modifier == EXPAND_STACK_PARM)
7612 target = gen_reg_rtx (mode);
7613 /* expand_float can't figure out what to do if FROM has VOIDmode.
7614 So give it the correct mode. With -O, cse will optimize this. */
7615 if (GET_MODE (op0) == VOIDmode)
7616 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7618 expand_float (target, op0,
7619 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7624 if (modifier == EXPAND_STACK_PARM)
7626 temp = expand_unop (mode,
7627 optab_for_tree_code (NEGATE_EXPR, type),
7630 return REDUCE_BIT_FIELD (temp);
7633 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7634 if (modifier == EXPAND_STACK_PARM)
7637 /* ABS_EXPR is not valid for complex arguments. */
7638 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7639 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7641 /* Unsigned abs is simply the operand. Testing here means we don't
7642 risk generating incorrect code below. */
7643 if (TYPE_UNSIGNED (type))
7646 return expand_abs (mode, op0, target, unsignedp,
7647 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7651 target = original_target;
7653 || modifier == EXPAND_STACK_PARM
7654 || (MEM_P (target) && MEM_VOLATILE_P (target))
7655 || GET_MODE (target) != mode
7657 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7658 target = gen_reg_rtx (mode);
7659 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7660 target, &op0, &op1, 0);
7662 /* First try to do it with a special MIN or MAX instruction.
7663 If that does not win, use a conditional jump to select the proper
7665 this_optab = optab_for_tree_code (code, type);
7666 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7671 /* At this point, a MEM target is no longer useful; we will get better
7674 if (! REG_P (target))
7675 target = gen_reg_rtx (mode);
7677 /* If op1 was placed in target, swap op0 and op1. */
7678 if (target != op0 && target == op1)
7685 /* We generate better code and avoid problems with op1 mentioning
7686 target by forcing op1 into a pseudo if it isn't a constant. */
7687 if (! CONSTANT_P (op1))
7688 op1 = force_reg (mode, op1);
7691 emit_move_insn (target, op0);
7693 op0 = gen_label_rtx ();
7695 /* If this mode is an integer too wide to compare properly,
7696 compare word by word. Rely on cse to optimize constant cases. */
7697 if (GET_MODE_CLASS (mode) == MODE_INT
7698 && ! can_compare_p (GE, mode, ccp_jump))
7700 if (code == MAX_EXPR)
7701 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7704 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7709 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7710 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7712 emit_move_insn (target, op1);
7717 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7718 if (modifier == EXPAND_STACK_PARM)
7720 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7724 /* ??? Can optimize bitwise operations with one arg constant.
7725 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7726 and (a bitwise1 b) bitwise2 b (etc)
7727 but that is probably not worth while. */
7729 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7730 boolean values when we want in all cases to compute both of them. In
7731 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7732 as actual zero-or-1 values and then bitwise anding. In cases where
7733 there cannot be any side effects, better code would be made by
7734 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7735 how to recognize those cases. */
7737 case TRUTH_AND_EXPR:
7738 code = BIT_AND_EXPR;
7743 code = BIT_IOR_EXPR;
7747 case TRUTH_XOR_EXPR:
7748 code = BIT_XOR_EXPR;
7756 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7758 if (modifier == EXPAND_STACK_PARM)
7760 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7761 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7764 /* Could determine the answer when only additive constants differ. Also,
7765 the addition of one can be handled by changing the condition. */
7772 case UNORDERED_EXPR:
7780 temp = do_store_flag (exp,
7781 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7782 tmode != VOIDmode ? tmode : mode, 0);
7786 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7787 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7789 && REG_P (original_target)
7790 && (GET_MODE (original_target)
7791 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7793 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7796 /* If temp is constant, we can just compute the result. */
7797 if (GET_CODE (temp) == CONST_INT)
7799 if (INTVAL (temp) != 0)
7800 emit_move_insn (target, const1_rtx);
7802 emit_move_insn (target, const0_rtx);
7807 if (temp != original_target)
7809 enum machine_mode mode1 = GET_MODE (temp);
7810 if (mode1 == VOIDmode)
7811 mode1 = tmode != VOIDmode ? tmode : mode;
7813 temp = copy_to_mode_reg (mode1, temp);
7816 op1 = gen_label_rtx ();
7817 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7818 GET_MODE (temp), unsignedp, op1);
7819 emit_move_insn (temp, const1_rtx);
7824 /* If no set-flag instruction, must generate a conditional store
7825 into a temporary variable. Drop through and handle this
7830 || modifier == EXPAND_STACK_PARM
7831 || ! safe_from_p (target, exp, 1)
7832 /* Make sure we don't have a hard reg (such as function's return
7833 value) live across basic blocks, if not optimizing. */
7834 || (!optimize && REG_P (target)
7835 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7836 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7839 emit_move_insn (target, const0_rtx);
7841 op1 = gen_label_rtx ();
7842 jumpifnot (exp, op1);
7845 emit_move_insn (target, const1_rtx);
7848 return ignore ? const0_rtx : target;
7850 case TRUTH_NOT_EXPR:
7851 if (modifier == EXPAND_STACK_PARM)
7853 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7854 /* The parser is careful to generate TRUTH_NOT_EXPR
7855 only with operands that are always zero or one. */
7856 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7857 target, 1, OPTAB_LIB_WIDEN);
7861 case STATEMENT_LIST:
7863 tree_stmt_iterator iter;
7865 gcc_assert (ignore);
7867 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7868 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7873 /* A COND_EXPR with its type being VOID_TYPE represents a
7874 conditional jump and is handled in
7875 expand_gimple_cond_expr. */
7876 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
7878 /* Note that COND_EXPRs whose type is a structure or union
7879 are required to be constructed to contain assignments of
7880 a temporary variable, so that we can evaluate them here
7881 for side effect only. If type is void, we must do likewise. */
7883 gcc_assert (!TREE_ADDRESSABLE (type)
7885 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7886 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7888 /* If we are not to produce a result, we have no target. Otherwise,
7889 if a target was specified use it; it will not be used as an
7890 intermediate target unless it is safe. If no target, use a
7893 if (modifier != EXPAND_STACK_PARM
7895 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7896 && GET_MODE (original_target) == mode
7897 #ifdef HAVE_conditional_move
7898 && (! can_conditionally_move_p (mode)
7899 || REG_P (original_target))
7901 && !MEM_P (original_target))
7902 temp = original_target;
7904 temp = assign_temp (type, 0, 0, 1);
7906 do_pending_stack_adjust ();
7908 op0 = gen_label_rtx ();
7909 op1 = gen_label_rtx ();
7910 jumpifnot (TREE_OPERAND (exp, 0), op0);
7911 store_expr (TREE_OPERAND (exp, 1), temp,
7912 modifier == EXPAND_STACK_PARM);
7914 emit_jump_insn (gen_jump (op1));
7917 store_expr (TREE_OPERAND (exp, 2), temp,
7918 modifier == EXPAND_STACK_PARM);
7925 target = expand_vec_cond_expr (exp, target);
7930 tree lhs = TREE_OPERAND (exp, 0);
7931 tree rhs = TREE_OPERAND (exp, 1);
7933 gcc_assert (ignore);
7935 /* Check for |= or &= of a bitfield of size one into another bitfield
7936 of size 1. In this case, (unless we need the result of the
7937 assignment) we can do this more efficiently with a
7938 test followed by an assignment, if necessary.
7940 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7941 things change so we do, this code should be enhanced to
7943 if (TREE_CODE (lhs) == COMPONENT_REF
7944 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7945 || TREE_CODE (rhs) == BIT_AND_EXPR)
7946 && TREE_OPERAND (rhs, 0) == lhs
7947 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7948 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7949 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7951 rtx label = gen_label_rtx ();
7953 do_jump (TREE_OPERAND (rhs, 1),
7954 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7955 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7956 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7957 (TREE_CODE (rhs) == BIT_IOR_EXPR
7959 : integer_zero_node)));
7960 do_pending_stack_adjust ();
7965 expand_assignment (lhs, rhs);
7971 if (!TREE_OPERAND (exp, 0))
7972 expand_null_return ();
7974 expand_return (TREE_OPERAND (exp, 0));
7978 return expand_expr_addr_expr (exp, target, tmode, modifier);
7981 /* Get the rtx code of the operands. */
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7983 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7986 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7988 /* Move the real (op0) and imaginary (op1) parts to their location. */
7989 write_complex_part (target, op0, false);
7990 write_complex_part (target, op1, true);
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7996 return read_complex_part (op0, false);
7999 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8000 return read_complex_part (op0, true);
8003 expand_resx_expr (exp);
8006 case TRY_CATCH_EXPR:
8008 case EH_FILTER_EXPR:
8009 case TRY_FINALLY_EXPR:
8010 /* Lowered by tree-eh.c. */
8013 case WITH_CLEANUP_EXPR:
8014 case CLEANUP_POINT_EXPR:
8016 case CASE_LABEL_EXPR:
8022 case PREINCREMENT_EXPR:
8023 case PREDECREMENT_EXPR:
8024 case POSTINCREMENT_EXPR:
8025 case POSTDECREMENT_EXPR:
8028 case TRUTH_ANDIF_EXPR:
8029 case TRUTH_ORIF_EXPR:
8030 /* Lowered by gimplify.c. */
8034 return get_exception_pointer (cfun);
8037 return get_exception_filter (cfun);
8040 /* Function descriptors are not valid except for as
8041 initialization constants, and should not be expanded. */
8049 expand_label (TREE_OPERAND (exp, 0));
8053 expand_asm_expr (exp);
8056 case WITH_SIZE_EXPR:
8057 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8058 have pulled out the size to use in whatever context it needed. */
8059 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8062 case REALIGN_LOAD_EXPR:
8064 tree oprnd0 = TREE_OPERAND (exp, 0);
8065 tree oprnd1 = TREE_OPERAND (exp, 1);
8066 tree oprnd2 = TREE_OPERAND (exp, 2);
8069 this_optab = optab_for_tree_code (code, type);
8070 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8071 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8072 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8081 return lang_hooks.expand_expr (exp, original_target, tmode,
8085 /* Here to do an ordinary binary operator. */
8087 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8088 subtarget, &op0, &op1, 0);
8090 this_optab = optab_for_tree_code (code, type);
8092 if (modifier == EXPAND_STACK_PARM)
8094 temp = expand_binop (mode, this_optab, op0, op1, target,
8095 unsignedp, OPTAB_LIB_WIDEN);
8097 return REDUCE_BIT_FIELD (temp);
8099 #undef REDUCE_BIT_FIELD
8101 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8102 signedness of TYPE), possibly returning the result in TARGET. */
8104 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8106 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8107 if (target && GET_MODE (target) != GET_MODE (exp))
8109 if (TYPE_UNSIGNED (type))
8112 if (prec < HOST_BITS_PER_WIDE_INT)
8113 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8116 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8117 ((unsigned HOST_WIDE_INT) 1
8118 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8120 return expand_and (GET_MODE (exp), exp, mask, target);
8124 tree count = build_int_cst (NULL_TREE,
8125 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8126 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8127 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8131 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8132 when applied to the address of EXP produces an address known to be
8133 aligned more than BIGGEST_ALIGNMENT. */
8136 is_aligning_offset (tree offset, tree exp)
8138 /* Strip off any conversions. */
8139 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8140 || TREE_CODE (offset) == NOP_EXPR
8141 || TREE_CODE (offset) == CONVERT_EXPR)
8142 offset = TREE_OPERAND (offset, 0);
8144 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8145 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8146 if (TREE_CODE (offset) != BIT_AND_EXPR
8147 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8148 || compare_tree_int (TREE_OPERAND (offset, 1),
8149 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8150 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8153 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8154 It must be NEGATE_EXPR. Then strip any more conversions. */
8155 offset = TREE_OPERAND (offset, 0);
8156 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8157 || TREE_CODE (offset) == NOP_EXPR
8158 || TREE_CODE (offset) == CONVERT_EXPR)
8159 offset = TREE_OPERAND (offset, 0);
8161 if (TREE_CODE (offset) != NEGATE_EXPR)
8164 offset = TREE_OPERAND (offset, 0);
8165 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8166 || TREE_CODE (offset) == NOP_EXPR
8167 || TREE_CODE (offset) == CONVERT_EXPR)
8168 offset = TREE_OPERAND (offset, 0);
8170 /* This must now be the address of EXP. */
8171 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8174 /* Return the tree node if an ARG corresponds to a string constant or zero
8175 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8176 in bytes within the string that ARG is accessing. The type of the
8177 offset will be `sizetype'. */
8180 string_constant (tree arg, tree *ptr_offset)
8185 if (TREE_CODE (arg) == ADDR_EXPR)
8187 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8189 *ptr_offset = size_zero_node;
8190 return TREE_OPERAND (arg, 0);
8192 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8194 array = TREE_OPERAND (arg, 0);
8195 offset = size_zero_node;
8197 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8199 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8200 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8201 if (TREE_CODE (array) != STRING_CST
8202 && TREE_CODE (array) != VAR_DECL)
8208 else if (TREE_CODE (arg) == PLUS_EXPR)
8210 tree arg0 = TREE_OPERAND (arg, 0);
8211 tree arg1 = TREE_OPERAND (arg, 1);
8216 if (TREE_CODE (arg0) == ADDR_EXPR
8217 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8218 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8220 array = TREE_OPERAND (arg0, 0);
8223 else if (TREE_CODE (arg1) == ADDR_EXPR
8224 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8225 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8227 array = TREE_OPERAND (arg1, 0);
8236 if (TREE_CODE (array) == STRING_CST)
8238 *ptr_offset = convert (sizetype, offset);
8241 else if (TREE_CODE (array) == VAR_DECL)
8245 /* Variables initialized to string literals can be handled too. */
8246 if (DECL_INITIAL (array) == NULL_TREE
8247 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8250 /* If they are read-only, non-volatile and bind locally. */
8251 if (! TREE_READONLY (array)
8252 || TREE_SIDE_EFFECTS (array)
8253 || ! targetm.binds_local_p (array))
8256 /* Avoid const char foo[4] = "abcde"; */
8257 if (DECL_SIZE_UNIT (array) == NULL_TREE
8258 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8259 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8260 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8263 /* If variable is bigger than the string literal, OFFSET must be constant
8264 and inside of the bounds of the string literal. */
8265 offset = convert (sizetype, offset);
8266 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8267 && (! host_integerp (offset, 1)
8268 || compare_tree_int (offset, length) >= 0))
8271 *ptr_offset = offset;
8272 return DECL_INITIAL (array);
8278 /* Generate code to calculate EXP using a store-flag instruction
8279 and return an rtx for the result. EXP is either a comparison
8280 or a TRUTH_NOT_EXPR whose operand is a comparison.
8282 If TARGET is nonzero, store the result there if convenient.
8284 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8287 Return zero if there is no suitable set-flag instruction
8288 available on this machine.
8290 Once expand_expr has been called on the arguments of the comparison,
8291 we are committed to doing the store flag, since it is not safe to
8292 re-evaluate the expression. We emit the store-flag insn by calling
8293 emit_store_flag, but only expand the arguments if we have a reason
8294 to believe that emit_store_flag will be successful. If we think that
8295 it will, but it isn't, we have to simulate the store-flag with a
8296 set/jump/set sequence. */
8299 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8302 tree arg0, arg1, type;
8304 enum machine_mode operand_mode;
8308 enum insn_code icode;
8309 rtx subtarget = target;
8312 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8313 result at the end. We can't simply invert the test since it would
8314 have already been inverted if it were valid. This case occurs for
8315 some floating-point comparisons. */
8317 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8318 invert = 1, exp = TREE_OPERAND (exp, 0);
8320 arg0 = TREE_OPERAND (exp, 0);
8321 arg1 = TREE_OPERAND (exp, 1);
8323 /* Don't crash if the comparison was erroneous. */
8324 if (arg0 == error_mark_node || arg1 == error_mark_node)
8327 type = TREE_TYPE (arg0);
8328 operand_mode = TYPE_MODE (type);
8329 unsignedp = TYPE_UNSIGNED (type);
8331 /* We won't bother with BLKmode store-flag operations because it would mean
8332 passing a lot of information to emit_store_flag. */
8333 if (operand_mode == BLKmode)
8336 /* We won't bother with store-flag operations involving function pointers
8337 when function pointers must be canonicalized before comparisons. */
8338 #ifdef HAVE_canonicalize_funcptr_for_compare
8339 if (HAVE_canonicalize_funcptr_for_compare
8340 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8341 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8343 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8344 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8345 == FUNCTION_TYPE))))
8352 /* Get the rtx comparison code to use. We know that EXP is a comparison
8353 operation of some type. Some comparisons against 1 and -1 can be
8354 converted to comparisons with zero. Do so here so that the tests
8355 below will be aware that we have a comparison with zero. These
8356 tests will not catch constants in the first operand, but constants
8357 are rarely passed as the first operand. */
8359 switch (TREE_CODE (exp))
8368 if (integer_onep (arg1))
8369 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8371 code = unsignedp ? LTU : LT;
8374 if (! unsignedp && integer_all_onesp (arg1))
8375 arg1 = integer_zero_node, code = LT;
8377 code = unsignedp ? LEU : LE;
8380 if (! unsignedp && integer_all_onesp (arg1))
8381 arg1 = integer_zero_node, code = GE;
8383 code = unsignedp ? GTU : GT;
8386 if (integer_onep (arg1))
8387 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8389 code = unsignedp ? GEU : GE;
8392 case UNORDERED_EXPR:
8421 /* Put a constant second. */
8422 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8424 tem = arg0; arg0 = arg1; arg1 = tem;
8425 code = swap_condition (code);
8428 /* If this is an equality or inequality test of a single bit, we can
8429 do this by shifting the bit being tested to the low-order bit and
8430 masking the result with the constant 1. If the condition was EQ,
8431 we xor it with 1. This does not require an scc insn and is faster
8432 than an scc insn even if we have it.
8434 The code to make this transformation was moved into fold_single_bit_test,
8435 so we just call into the folder and expand its result. */
8437 if ((code == NE || code == EQ)
8438 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8439 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8441 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8442 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8444 target, VOIDmode, EXPAND_NORMAL);
8447 /* Now see if we are likely to be able to do this. Return if not. */
8448 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8451 icode = setcc_gen_code[(int) code];
8452 if (icode == CODE_FOR_nothing
8453 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8455 /* We can only do this if it is one of the special cases that
8456 can be handled without an scc insn. */
8457 if ((code == LT && integer_zerop (arg1))
8458 || (! only_cheap && code == GE && integer_zerop (arg1)))
8460 else if (BRANCH_COST >= 0
8461 && ! only_cheap && (code == NE || code == EQ)
8462 && TREE_CODE (type) != REAL_TYPE
8463 && ((abs_optab->handlers[(int) operand_mode].insn_code
8464 != CODE_FOR_nothing)
8465 || (ffs_optab->handlers[(int) operand_mode].insn_code
8466 != CODE_FOR_nothing)))
8472 if (! get_subtarget (target)
8473 || GET_MODE (subtarget) != operand_mode)
8476 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8479 target = gen_reg_rtx (mode);
8481 result = emit_store_flag (target, code, op0, op1,
8482 operand_mode, unsignedp, 1);
8487 result = expand_binop (mode, xor_optab, result, const1_rtx,
8488 result, 0, OPTAB_LIB_WIDEN);
8492 /* If this failed, we have to do this with set/compare/jump/set code. */
8494 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8495 target = gen_reg_rtx (GET_MODE (target));
8497 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8498 result = compare_from_rtx (op0, op1, code, unsignedp,
8499 operand_mode, NULL_RTX);
8500 if (GET_CODE (result) == CONST_INT)
8501 return (((result == const0_rtx && ! invert)
8502 || (result != const0_rtx && invert))
8503 ? const0_rtx : const1_rtx);
8505 /* The code of RESULT may not match CODE if compare_from_rtx
8506 decided to swap its operands and reverse the original code.
8508 We know that compare_from_rtx returns either a CONST_INT or
8509 a new comparison code, so it is safe to just extract the
8510 code from RESULT. */
8511 code = GET_CODE (result);
8513 label = gen_label_rtx ();
8514 gcc_assert (bcc_gen_fctn[(int) code]);
8516 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8517 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8524 /* Stubs in case we haven't got a casesi insn. */
8526 # define HAVE_casesi 0
8527 # define gen_casesi(a, b, c, d, e) (0)
8528 # define CODE_FOR_casesi CODE_FOR_nothing
8531 /* If the machine does not have a case insn that compares the bounds,
8532 this means extra overhead for dispatch tables, which raises the
8533 threshold for using them. */
8534 #ifndef CASE_VALUES_THRESHOLD
8535 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8536 #endif /* CASE_VALUES_THRESHOLD */
8539 case_values_threshold (void)
8541 return CASE_VALUES_THRESHOLD;
8544 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8545 0 otherwise (i.e. if there is no casesi instruction). */
8547 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8548 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8550 enum machine_mode index_mode = SImode;
8551 int index_bits = GET_MODE_BITSIZE (index_mode);
8552 rtx op1, op2, index;
8553 enum machine_mode op_mode;
8558 /* Convert the index to SImode. */
8559 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8561 enum machine_mode omode = TYPE_MODE (index_type);
8562 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8564 /* We must handle the endpoints in the original mode. */
8565 index_expr = build2 (MINUS_EXPR, index_type,
8566 index_expr, minval);
8567 minval = integer_zero_node;
8568 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8569 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8570 omode, 1, default_label);
8571 /* Now we can safely truncate. */
8572 index = convert_to_mode (index_mode, index, 0);
8576 if (TYPE_MODE (index_type) != index_mode)
8578 index_expr = convert (lang_hooks.types.type_for_size
8579 (index_bits, 0), index_expr);
8580 index_type = TREE_TYPE (index_expr);
8583 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8586 do_pending_stack_adjust ();
8588 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8589 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8591 index = copy_to_mode_reg (op_mode, index);
8593 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8595 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8596 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8597 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8598 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8600 op1 = copy_to_mode_reg (op_mode, op1);
8602 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8604 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8605 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8606 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8607 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8609 op2 = copy_to_mode_reg (op_mode, op2);
8611 emit_jump_insn (gen_casesi (index, op1, op2,
8612 table_label, default_label));
8616 /* Attempt to generate a tablejump instruction; same concept. */
8617 #ifndef HAVE_tablejump
8618 #define HAVE_tablejump 0
8619 #define gen_tablejump(x, y) (0)
8622 /* Subroutine of the next function.
8624 INDEX is the value being switched on, with the lowest value
8625 in the table already subtracted.
8626 MODE is its expected mode (needed if INDEX is constant).
8627 RANGE is the length of the jump table.
8628 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8630 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8631 index value is out of range. */
8634 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8639 if (INTVAL (range) > cfun->max_jumptable_ents)
8640 cfun->max_jumptable_ents = INTVAL (range);
8642 /* Do an unsigned comparison (in the proper mode) between the index
8643 expression and the value which represents the length of the range.
8644 Since we just finished subtracting the lower bound of the range
8645 from the index expression, this comparison allows us to simultaneously
8646 check that the original index expression value is both greater than
8647 or equal to the minimum value of the range and less than or equal to
8648 the maximum value of the range. */
8650 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8653 /* If index is in range, it must fit in Pmode.
8654 Convert to Pmode so we can index with it. */
8656 index = convert_to_mode (Pmode, index, 1);
8658 /* Don't let a MEM slip through, because then INDEX that comes
8659 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8660 and break_out_memory_refs will go to work on it and mess it up. */
8661 #ifdef PIC_CASE_VECTOR_ADDRESS
8662 if (flag_pic && !REG_P (index))
8663 index = copy_to_mode_reg (Pmode, index);
8666 /* If flag_force_addr were to affect this address
8667 it could interfere with the tricky assumptions made
8668 about addresses that contain label-refs,
8669 which may be valid only very near the tablejump itself. */
8670 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8671 GET_MODE_SIZE, because this indicates how large insns are. The other
8672 uses should all be Pmode, because they are addresses. This code
8673 could fail if addresses and insns are not the same size. */
8674 index = gen_rtx_PLUS (Pmode,
8675 gen_rtx_MULT (Pmode, index,
8676 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8677 gen_rtx_LABEL_REF (Pmode, table_label));
8678 #ifdef PIC_CASE_VECTOR_ADDRESS
8680 index = PIC_CASE_VECTOR_ADDRESS (index);
8683 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8684 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8685 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8686 convert_move (temp, vector, 0);
8688 emit_jump_insn (gen_tablejump (temp, table_label));
8690 /* If we are generating PIC code or if the table is PC-relative, the
8691 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8692 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8697 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8698 rtx table_label, rtx default_label)
8702 if (! HAVE_tablejump)
8705 index_expr = fold (build2 (MINUS_EXPR, index_type,
8706 convert (index_type, index_expr),
8707 convert (index_type, minval)));
8708 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8709 do_pending_stack_adjust ();
8711 do_tablejump (index, TYPE_MODE (index_type),
8712 convert_modes (TYPE_MODE (index_type),
8713 TYPE_MODE (TREE_TYPE (range)),
8714 expand_expr (range, NULL_RTX,
8716 TYPE_UNSIGNED (TREE_TYPE (range))),
8717 table_label, default_label);
8721 /* Nonzero if the mode is a valid vector mode for this architecture.
8722 This returns nonzero even if there is no hardware support for the
8723 vector mode, but we can emulate with narrower modes. */
8726 vector_mode_valid_p (enum machine_mode mode)
8728 enum mode_class class = GET_MODE_CLASS (mode);
8729 enum machine_mode innermode;
8731 /* Doh! What's going on? */
8732 if (class != MODE_VECTOR_INT
8733 && class != MODE_VECTOR_FLOAT)
8736 /* Hardware support. Woo hoo! */
8737 if (targetm.vector_mode_supported_p (mode))
8740 innermode = GET_MODE_INNER (mode);
8742 /* We should probably return 1 if requesting V4DI and we have no DI,
8743 but we have V2DI, but this is probably very unlikely. */
8745 /* If we have support for the inner mode, we can safely emulate it.
8746 We may not have V2DI, but me can emulate with a pair of DIs. */
8747 return targetm.scalar_mode_supported_p (innermode);
8750 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8752 const_vector_from_tree (tree exp)
8757 enum machine_mode inner, mode;
8759 mode = TYPE_MODE (TREE_TYPE (exp));
8761 if (initializer_zerop (exp))
8762 return CONST0_RTX (mode);
8764 units = GET_MODE_NUNITS (mode);
8765 inner = GET_MODE_INNER (mode);
8767 v = rtvec_alloc (units);
8769 link = TREE_VECTOR_CST_ELTS (exp);
8770 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8772 elt = TREE_VALUE (link);
8774 if (TREE_CODE (elt) == REAL_CST)
8775 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8778 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8779 TREE_INT_CST_HIGH (elt),
8783 /* Initialize remaining elements to 0. */
8784 for (; i < units; ++i)
8785 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8787 return gen_rtx_CONST_VECTOR (mode, v);
8789 #include "gt-expr.h"