1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1680 0, 1, NULL_RTX, mode, mode);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Emit code to move a block SRC of type TYPE to a block DST,
1714 where DST is non-consecutive registers represented by a PARALLEL.
1715 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1719 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1724 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1725 emit_group_load_1 (tmps, dst, src, type, ssize);
1727 /* Copy the extracted pieces into the proper (probable) hard regs. */
1728 for (i = 0; i < XVECLEN (dst, 0); i++)
1730 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1733 emit_move_insn (d, tmps[i]);
1737 /* Similar, but load SRC into new pseudos in a format that looks like
1738 PARALLEL. This can later be fed to emit_group_move to get things
1739 in the right place. */
1742 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1747 vec = rtvec_alloc (XVECLEN (parallel, 0));
1748 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1750 /* Convert the vector to look just like the original PARALLEL, except
1751 with the computed values. */
1752 for (i = 0; i < XVECLEN (parallel, 0); i++)
1754 rtx e = XVECEXP (parallel, 0, i);
1755 rtx d = XEXP (e, 0);
1759 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1760 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1768 /* Emit code to move a block SRC to block DST, where SRC and DST are
1769 non-consecutive groups of registers, each represented by a PARALLEL. */
1772 emit_group_move (rtx dst, rtx src)
1776 gcc_assert (GET_CODE (src) == PARALLEL
1777 && GET_CODE (dst) == PARALLEL
1778 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1780 /* Skip first entry if NULL. */
1781 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1782 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1783 XEXP (XVECEXP (src, 0, i), 0));
1786 /* Move a group of registers represented by a PARALLEL into pseudos. */
1789 emit_group_move_into_temps (rtx src)
1791 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1794 for (i = 0; i < XVECLEN (src, 0); i++)
1796 rtx e = XVECEXP (src, 0, i);
1797 rtx d = XEXP (e, 0);
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1807 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1808 where SRC is non-consecutive registers represented by a PARALLEL.
1809 SSIZE represents the total size of block ORIG_DST, or -1 if not
1813 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 enum machine_mode m = GET_MODE (orig_dst);
1819 gcc_assert (GET_CODE (src) == PARALLEL);
1821 if (!SCALAR_INT_MODE_P (m)
1822 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1824 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1825 if (imode == BLKmode)
1826 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1828 dst = gen_reg_rtx (imode);
1829 emit_group_store (dst, src, type, ssize);
1830 if (imode != BLKmode)
1831 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1832 emit_move_insn (orig_dst, dst);
1836 /* Check for a NULL entry, used to indicate that the parameter goes
1837 both on the stack and in registers. */
1838 if (XEXP (XVECEXP (src, 0, 0), 0))
1843 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1845 /* Copy the (probable) hard regs into pseudos. */
1846 for (i = start; i < XVECLEN (src, 0); i++)
1848 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1849 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1850 emit_move_insn (tmps[i], reg);
1853 /* If we won't be storing directly into memory, protect the real destination
1854 from strange tricks we might play. */
1856 if (GET_CODE (dst) == PARALLEL)
1860 /* We can get a PARALLEL dst if there is a conditional expression in
1861 a return statement. In that case, the dst and src are the same,
1862 so no action is necessary. */
1863 if (rtx_equal_p (dst, src))
1866 /* It is unclear if we can ever reach here, but we may as well handle
1867 it. Allocate a temporary, and split this into a store/load to/from
1870 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1871 emit_group_store (temp, src, type, ssize);
1872 emit_group_load (dst, temp, type, ssize);
1875 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1877 dst = gen_reg_rtx (GET_MODE (orig_dst));
1878 /* Make life a bit easier for combine. */
1879 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1882 /* Process the pieces. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
1885 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1886 enum machine_mode mode = GET_MODE (tmps[i]);
1887 unsigned int bytelen = GET_MODE_SIZE (mode);
1890 /* Handle trailing fragments that run over the size of the struct. */
1891 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1893 /* store_bit_field always takes its value from the lsb.
1894 Move the fragment to the lsb if it's not already there. */
1896 #ifdef BLOCK_REG_PADDING
1897 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1898 == (BYTES_BIG_ENDIAN ? upward : downward)
1904 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1905 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1906 build_int_cst (NULL_TREE, shift),
1909 bytelen = ssize - bytepos;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 dest = XEXP (dst, 0);
1916 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1918 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1919 dest = XEXP (dst, 1);
1923 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1924 dest = assign_stack_temp (GET_MODE (dest),
1925 GET_MODE_SIZE (GET_MODE (dest)), 0);
1926 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1933 /* Optimize the access just a bit. */
1935 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1936 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1941 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1945 /* Copy from the pseudo into the (probable) hard reg. */
1946 if (orig_dst != dst)
1947 emit_move_insn (orig_dst, dst);
1950 /* Generate code to copy a BLKmode object of TYPE out of a
1951 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1952 is null, a stack temporary is created. TGTBLK is returned.
1954 The purpose of this routine is to handle functions that return
1955 BLKmode structures in registers. Some machines (the PA for example)
1956 want to return all small structures in registers regardless of the
1957 structure's alignment. */
1960 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1962 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1963 rtx src = NULL, dst = NULL;
1964 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1965 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1969 tgtblk = assign_temp (build_qualified_type (type,
1971 | TYPE_QUAL_CONST)),
1973 preserve_temp_slots (tgtblk);
1976 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1977 into a new pseudo which is a full word. */
1979 if (GET_MODE (srcreg) != BLKmode
1980 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1981 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1983 /* If the structure doesn't take up a whole number of words, see whether
1984 SRCREG is padded on the left or on the right. If it's on the left,
1985 set PADDING_CORRECTION to the number of bits to skip.
1987 In most ABIs, the structure will be returned at the least end of
1988 the register, which translates to right padding on little-endian
1989 targets and left padding on big-endian targets. The opposite
1990 holds if the structure is returned at the most significant
1991 end of the register. */
1992 if (bytes % UNITS_PER_WORD != 0
1993 && (targetm.calls.return_in_msb (type)
1995 : BYTES_BIG_ENDIAN))
1997 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1999 /* Copy the structure BITSIZE bites at a time.
2001 We could probably emit more efficient code for machines which do not use
2002 strict alignment, but it doesn't seem worth the effort at the current
2004 for (bitpos = 0, xbitpos = padding_correction;
2005 bitpos < bytes * BITS_PER_UNIT;
2006 bitpos += bitsize, xbitpos += bitsize)
2008 /* We need a new source operand each time xbitpos is on a
2009 word boundary and when xbitpos == padding_correction
2010 (the first time through). */
2011 if (xbitpos % BITS_PER_WORD == 0
2012 || xbitpos == padding_correction)
2013 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2016 /* We need a new destination operand each time bitpos is on
2018 if (bitpos % BITS_PER_WORD == 0)
2019 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2021 /* Use xbitpos for the source extraction (right justified) and
2022 xbitpos for the destination store (left justified). */
2023 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2024 extract_bit_field (src, bitsize,
2025 xbitpos % BITS_PER_WORD, 1,
2026 NULL_RTX, word_mode, word_mode));
2032 /* Add a USE expression for REG to the (possibly empty) list pointed
2033 to by CALL_FUSAGE. REG must denote a hard register. */
2036 use_reg (rtx *call_fusage, rtx reg)
2038 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2041 = gen_rtx_EXPR_LIST (VOIDmode,
2042 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2045 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2046 starting at REGNO. All of these registers must be hard registers. */
2049 use_regs (rtx *call_fusage, int regno, int nregs)
2053 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2055 for (i = 0; i < nregs; i++)
2056 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2059 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2060 PARALLEL REGS. This is for calls that pass values in multiple
2061 non-contiguous locations. The Irix 6 ABI has examples of this. */
2064 use_group_regs (rtx *call_fusage, rtx regs)
2068 for (i = 0; i < XVECLEN (regs, 0); i++)
2070 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2072 /* A NULL entry means the parameter goes both on the stack and in
2073 registers. This can also be a MEM for targets that pass values
2074 partially on the stack and partially in registers. */
2075 if (reg != 0 && REG_P (reg))
2076 use_reg (call_fusage, reg);
2081 /* Determine whether the LEN bytes generated by CONSTFUN can be
2082 stored to memory using several move instructions. CONSTFUNDATA is
2083 a pointer which will be passed as argument in every CONSTFUN call.
2084 ALIGN is maximum alignment we can assume. Return nonzero if a
2085 call to store_by_pieces should succeed. */
2088 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2089 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2090 void *constfundata, unsigned int align)
2092 unsigned HOST_WIDE_INT l;
2093 unsigned int max_size;
2094 HOST_WIDE_INT offset = 0;
2095 enum machine_mode mode, tmode;
2096 enum insn_code icode;
2103 if (! STORE_BY_PIECES_P (len, align))
2106 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2107 if (align >= GET_MODE_ALIGNMENT (tmode))
2108 align = GET_MODE_ALIGNMENT (tmode);
2111 enum machine_mode xmode;
2113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2115 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2116 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2117 || SLOW_UNALIGNED_ACCESS (tmode, align))
2120 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2123 /* We would first store what we can in the largest integer mode, then go to
2124 successively smaller modes. */
2127 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2132 max_size = STORE_MAX_PIECES + 1;
2133 while (max_size > 1)
2135 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2136 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2137 if (GET_MODE_SIZE (tmode) < max_size)
2140 if (mode == VOIDmode)
2143 icode = mov_optab->handlers[(int) mode].insn_code;
2144 if (icode != CODE_FOR_nothing
2145 && align >= GET_MODE_ALIGNMENT (mode))
2147 unsigned int size = GET_MODE_SIZE (mode);
2154 cst = (*constfun) (constfundata, offset, mode);
2155 if (!LEGITIMATE_CONSTANT_P (cst))
2165 max_size = GET_MODE_SIZE (mode);
2168 /* The code above should have handled everything. */
2175 /* Generate several move instructions to store LEN bytes generated by
2176 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2177 pointer which will be passed as argument in every CONSTFUN call.
2178 ALIGN is maximum alignment we can assume.
2179 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2180 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2184 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2185 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2186 void *constfundata, unsigned int align, int endp)
2188 struct store_by_pieces data;
2192 gcc_assert (endp != 2);
2196 gcc_assert (STORE_BY_PIECES_P (len, align));
2197 data.constfun = constfun;
2198 data.constfundata = constfundata;
2201 store_by_pieces_1 (&data, align);
2206 gcc_assert (!data.reverse);
2211 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2212 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2214 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2217 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2224 to1 = adjust_address (data.to, QImode, data.offset);
2232 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2233 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2236 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2238 struct store_by_pieces data;
2243 data.constfun = clear_by_pieces_1;
2244 data.constfundata = NULL;
2247 store_by_pieces_1 (&data, align);
2250 /* Callback routine for clear_by_pieces.
2251 Return const0_rtx unconditionally. */
2254 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2255 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2256 enum machine_mode mode ATTRIBUTE_UNUSED)
2261 /* Subroutine of clear_by_pieces and store_by_pieces.
2262 Generate several move instructions to store LEN bytes of block TO. (A MEM
2263 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2266 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2267 unsigned int align ATTRIBUTE_UNUSED)
2269 rtx to_addr = XEXP (data->to, 0);
2270 unsigned int max_size = STORE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2275 data->to_addr = to_addr;
2277 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2278 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280 data->explicit_inc_to = 0;
2282 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 data->offset = data->len;
2286 /* If storing requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2289 if (!data->autinc_to
2290 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2300 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2301 data->autinc_to = 1;
2302 data->explicit_inc_to = -1;
2305 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2306 && ! data->autinc_to)
2308 data->to_addr = copy_addr_to_reg (to_addr);
2309 data->autinc_to = 1;
2310 data->explicit_inc_to = 1;
2313 if ( !data->autinc_to && CONSTANT_P (to_addr))
2314 data->to_addr = copy_addr_to_reg (to_addr);
2317 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2318 if (align >= GET_MODE_ALIGNMENT (tmode))
2319 align = GET_MODE_ALIGNMENT (tmode);
2322 enum machine_mode xmode;
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 /* First store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2337 while (max_size > 1)
2339 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341 if (GET_MODE_SIZE (tmode) < max_size)
2344 if (mode == VOIDmode)
2347 icode = mov_optab->handlers[(int) mode].insn_code;
2348 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2349 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2351 max_size = GET_MODE_SIZE (mode);
2354 /* The code above should have handled everything. */
2355 gcc_assert (!data->len);
2358 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2359 with move instructions for mode MODE. GENFUN is the gen_... function
2360 to make a move insn for that mode. DATA has all the other info. */
2363 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2364 struct store_by_pieces *data)
2366 unsigned int size = GET_MODE_SIZE (mode);
2369 while (data->len >= size)
2372 data->offset -= size;
2374 if (data->autinc_to)
2375 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2378 to1 = adjust_address (data->to, mode, data->offset);
2380 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2381 emit_insn (gen_add2_insn (data->to_addr,
2382 GEN_INT (-(HOST_WIDE_INT) size)));
2384 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2385 emit_insn ((*genfun) (to1, cst));
2387 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2388 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390 if (! data->reverse)
2391 data->offset += size;
2397 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2398 its length in bytes. */
2401 clear_storage (rtx object, rtx size)
2404 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2405 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2415 if (size == const0_rtx)
2417 else if (GET_CODE (size) == CONST_INT
2418 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2419 clear_by_pieces (object, INTVAL (size), align);
2420 else if (clear_storage_via_clrmem (object, size, align))
2423 retval = clear_storage_via_libcall (object, size);
2429 /* A subroutine of clear_storage. Expand a clrmem pattern;
2430 return true if successful. */
2433 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2435 /* Try the most limited insn first, because there's no point
2436 including more than one in the machine description unless
2437 the more limited one has some advantage. */
2439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2440 enum machine_mode mode;
2442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2443 mode = GET_MODE_WIDER_MODE (mode))
2445 enum insn_code code = clrmem_optab[(int) mode];
2446 insn_operand_predicate_fn pred;
2448 if (code != CODE_FOR_nothing
2449 /* We don't need MODE to be narrower than
2450 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2451 the mode mask, as it is returned by the macro, it will
2452 definitely be less than the actual mode mask. */
2453 && ((GET_CODE (size) == CONST_INT
2454 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2455 <= (GET_MODE_MASK (mode) >> 1)))
2456 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2457 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2458 || (*pred) (object, BLKmode))
2459 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2460 || (*pred) (opalign, VOIDmode)))
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 pred = insn_data[(int) code].operand[1].predicate;
2468 if (pred != 0 && ! (*pred) (op1, mode))
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2485 /* A subroutine of clear_storage. Expand a call to memset.
2486 Return the return value of memset, 0 otherwise. */
2489 clear_storage_via_libcall (rtx object, rtx size)
2491 tree call_expr, arg_list, fn, object_tree, size_tree;
2492 enum machine_mode size_mode;
2495 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2496 place those into new pseudos into a VAR_DECL and use them later. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 size_mode = TYPE_MODE (sizetype);
2501 size = convert_to_mode (size_mode, size, 1);
2502 size = copy_to_mode_reg (size_mode, size);
2504 /* It is incorrect to use the libcall calling conventions to call
2505 memset in this context. This could be a user call to memset and
2506 the user may wish to examine the return value from memset. For
2507 targets where libcalls and normal calls have different conventions
2508 for returning pointers, we could end up generating incorrect code. */
2510 object_tree = make_tree (ptr_type_node, object);
2511 size_tree = make_tree (sizetype, size);
2513 fn = clear_storage_libcall_fn (true);
2514 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2515 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2516 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2521 call_expr, arg_list, NULL_TREE);
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 /* A subroutine of clear_storage_via_libcall. Create the tree node
2529 for the function we use for block clears. The first time FOR_CALL
2530 is true, we call assemble_external. */
2532 static GTY(()) tree block_clear_fn;
2535 init_block_clear_fn (const char *asmspec)
2537 if (!block_clear_fn)
2541 fn = get_identifier ("memset");
2542 args = build_function_type_list (ptr_type_node, ptr_type_node,
2543 integer_type_node, sizetype,
2546 fn = build_decl (FUNCTION_DECL, fn, args);
2547 DECL_EXTERNAL (fn) = 1;
2548 TREE_PUBLIC (fn) = 1;
2549 DECL_ARTIFICIAL (fn) = 1;
2550 TREE_NOTHROW (fn) = 1;
2552 block_clear_fn = fn;
2556 set_user_assembler_name (block_clear_fn, asmspec);
2560 clear_storage_libcall_fn (int for_call)
2562 static bool emitted_extern;
2564 if (!block_clear_fn)
2565 init_block_clear_fn (NULL);
2567 if (for_call && !emitted_extern)
2569 emitted_extern = true;
2570 make_decl_rtl (block_clear_fn);
2571 assemble_external (block_clear_fn);
2574 return block_clear_fn;
2577 /* Write to one of the components of the complex value CPLX. Write VAL to
2578 the real part if IMAG_P is false, and the imaginary part if its true. */
2581 write_complex_part (rtx cplx, rtx val, bool imag_p)
2583 enum machine_mode cmode;
2584 enum machine_mode imode;
2587 if (GET_CODE (cplx) == CONCAT)
2589 emit_move_insn (XEXP (cplx, imag_p), val);
2593 cmode = GET_MODE (cplx);
2594 imode = GET_MODE_INNER (cmode);
2595 ibitsize = GET_MODE_BITSIZE (imode);
2597 /* If the sub-object is at least word sized, then we know that subregging
2598 will work. This special case is important, since store_bit_field
2599 wants to operate on integer modes, and there's rarely an OImode to
2600 correspond to TCmode. */
2601 if (ibitsize >= BITS_PER_WORD)
2603 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2604 imag_p ? GET_MODE_SIZE (imode) : 0);
2605 emit_move_insn (part, val);
2608 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2611 /* Extract one of the components of the complex value CPLX. Extract the
2612 real part if IMAG_P is false, and the imaginary part if it's true. */
2615 read_complex_part (rtx cplx, bool imag_p)
2617 enum machine_mode cmode, imode;
2620 if (GET_CODE (cplx) == CONCAT)
2621 return XEXP (cplx, imag_p);
2623 cmode = GET_MODE (cplx);
2624 imode = GET_MODE_INNER (cmode);
2625 ibitsize = GET_MODE_BITSIZE (imode);
2627 /* Special case reads from complex constants that got spilled to memory. */
2628 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2630 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2631 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2633 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2634 if (CONSTANT_CLASS_P (part))
2635 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2639 /* If the sub-object is at least word sized, then we know that subregging
2640 will work. This special case is important, since extract_bit_field
2641 wants to operate on integer modes, and there's rarely an OImode to
2642 correspond to TCmode. */
2643 if (ibitsize >= BITS_PER_WORD)
2645 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2646 imag_p ? GET_MODE_SIZE (imode) : 0);
2647 gcc_assert (ret != NULL);
2651 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2652 true, NULL_RTX, imode, imode);
2655 /* A subroutine of emit_move_via_alt_mode. Yet another lowpart generator.
2656 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2657 represented in NEW_MODE. */
2660 emit_move_change_mode (enum machine_mode new_mode,
2661 enum machine_mode old_mode, rtx x)
2665 if (reload_in_progress && MEM_P (x))
2667 /* We can't use gen_lowpart here because it may call change_address
2668 which is not appropriate if we were called when a reload was in
2669 progress. We don't have to worry about changing the address since
2670 the size in bytes is supposed to be the same. Copy the MEM to
2671 change the mode and move any substitutions from the old MEM to
2674 ret = adjust_address_nv (x, new_mode, 0);
2675 copy_replacements (x, ret);
2679 /* Note that we do want simplify_subreg's behaviour of validating
2680 that the new mode is ok for a hard register. If we were to use
2681 simplify_gen_subreg, we would create the subreg, but would
2682 probably run into the target not being able to implement it. */
2683 ret = simplify_subreg (new_mode, x, old_mode, 0);
2689 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2690 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2691 code for the move in ALT_MODE, and is known to be valid. Returns the
2692 instruction emitted, or NULL if X or Y cannot be represented in ALT_MODE. */
2695 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2696 enum insn_code code, rtx x, rtx y)
2698 x = emit_move_change_mode (alt_mode, mode, x);
2701 y = emit_move_change_mode (alt_mode, mode, y);
2704 return emit_insn (GEN_FCN (code) (x, y));
2707 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2708 an integer mode of the same size as MODE. Returns the instruction
2709 emitted, or NULL if such a move could not be generated. */
2712 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2714 enum machine_mode imode;
2715 enum insn_code code;
2717 /* There must exist a mode of the exact size we require. */
2718 imode = int_mode_for_mode (mode);
2719 if (imode == BLKmode)
2722 /* The target must support moves in this mode. */
2723 code = mov_optab->handlers[imode].insn_code;
2724 if (code == CODE_FOR_nothing)
2727 return emit_move_via_alt_mode (imode, mode, code, x, y);
2730 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2731 Return an equivalent MEM that does not use an auto-increment. */
2734 emit_move_resolve_push (enum machine_mode mode, rtx x)
2736 enum rtx_code code = GET_CODE (XEXP (x, 0));
2737 HOST_WIDE_INT adjust;
2740 adjust = GET_MODE_SIZE (mode);
2741 #ifdef PUSH_ROUNDING
2742 adjust = PUSH_ROUNDING (adjust);
2744 if (code == PRE_DEC || code == POST_DEC)
2747 /* Do not use anti_adjust_stack, since we don't want to update
2748 stack_pointer_delta. */
2749 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2750 GEN_INT (adjust), stack_pointer_rtx,
2751 0, OPTAB_LIB_WIDEN);
2752 if (temp != stack_pointer_rtx)
2753 emit_move_insn (stack_pointer_rtx, temp);
2759 temp = stack_pointer_rtx;
2762 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2765 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2771 return replace_equiv_address (x, temp);
2774 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2775 X is known to satisfy push_operand, and MODE is known to be complex.
2776 Returns the last instruction emitted. */
2779 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2781 enum machine_mode submode = GET_MODE_INNER (mode);
2784 #ifdef PUSH_ROUNDING
2785 unsigned int submodesize = GET_MODE_SIZE (submode);
2787 /* In case we output to the stack, but the size is smaller than the
2788 machine can push exactly, we need to use move instructions. */
2789 if (PUSH_ROUNDING (submodesize) != submodesize)
2791 x = emit_move_resolve_push (mode, x);
2792 return emit_move_insn (x, y);
2796 /* Note that the real part always precedes the imag part in memory
2797 regardless of machine's endianness. */
2798 switch (GET_CODE (XEXP (x, 0)))
2812 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2813 read_complex_part (y, imag_first));
2814 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2815 read_complex_part (y, !imag_first));
2818 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2819 MODE is known to be complex. Returns the last instruction emitted. */
2822 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2826 /* Need to take special care for pushes, to maintain proper ordering
2827 of the data, and possibly extra padding. */
2828 if (push_operand (x, mode))
2829 return emit_move_complex_push (mode, x, y);
2831 /* For memory to memory moves, optimial behaviour can be had with the
2832 existing block move logic. */
2833 if (MEM_P (x) && MEM_P (y))
2835 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2836 BLOCK_OP_NO_LIBCALL);
2837 return get_last_insn ();
2840 /* See if we can coerce the target into moving both values at once. */
2842 /* Not possible if the values are inherently not adjacent. */
2843 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2845 /* Is possible if both are registers (or subregs of registers). */
2846 else if (register_operand (x, mode) && register_operand (y, mode))
2848 /* If one of the operands is a memory, and alignment constraints
2849 are friendly enough, we may be able to do combined memory operations.
2850 We do not attempt this if Y is a constant because that combination is
2851 usually better with the by-parts thing below. */
2852 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2853 && (!STRICT_ALIGNMENT
2854 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2861 rtx ret = emit_move_via_integer (mode, x, y);
2866 /* Show the output dies here. This is necessary for SUBREGs
2867 of pseudos since we cannot track their lifetimes correctly;
2868 hard regs shouldn't appear here except as return values. */
2869 if (!reload_completed && !reload_in_progress
2870 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2871 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2873 write_complex_part (x, read_complex_part (y, false), false);
2874 write_complex_part (x, read_complex_part (y, true), true);
2875 return get_last_insn ();
2878 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2879 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2882 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2886 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2889 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2890 if (code != CODE_FOR_nothing)
2891 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2894 /* Otherwise, find the MODE_INT mode of the same width. */
2895 ret = emit_move_via_integer (mode, x, y);
2896 gcc_assert (ret != NULL);
2900 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2901 MODE is any multi-word or full-word mode that lacks a move_insn
2902 pattern. Note that you will get better code if you define such
2903 patterns, even if they must turn into multiple assembler instructions. */
2906 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2913 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2915 /* If X is a push on the stack, do the push now and replace
2916 X with a reference to the stack pointer. */
2917 if (push_operand (x, mode))
2918 x = emit_move_resolve_push (mode, x);
2920 /* If we are in reload, see if either operand is a MEM whose address
2921 is scheduled for replacement. */
2922 if (reload_in_progress && MEM_P (x)
2923 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2924 x = replace_equiv_address_nv (x, inner);
2925 if (reload_in_progress && MEM_P (y)
2926 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2927 y = replace_equiv_address_nv (y, inner);
2931 need_clobber = false;
2933 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2936 rtx xpart = operand_subword (x, i, 1, mode);
2937 rtx ypart = operand_subword (y, i, 1, mode);
2939 /* If we can't get a part of Y, put Y into memory if it is a
2940 constant. Otherwise, force it into a register. If we still
2941 can't get a part of Y, abort. */
2942 if (ypart == 0 && CONSTANT_P (y))
2944 y = force_const_mem (mode, y);
2945 ypart = operand_subword (y, i, 1, mode);
2947 else if (ypart == 0)
2948 ypart = operand_subword_force (y, i, mode);
2950 gcc_assert (xpart && ypart);
2952 need_clobber |= (GET_CODE (xpart) == SUBREG);
2954 last_insn = emit_move_insn (xpart, ypart);
2960 /* Show the output dies here. This is necessary for SUBREGs
2961 of pseudos since we cannot track their lifetimes correctly;
2962 hard regs shouldn't appear here except as return values.
2963 We never want to emit such a clobber after reload. */
2965 && ! (reload_in_progress || reload_completed)
2966 && need_clobber != 0)
2967 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2974 /* Low level part of emit_move_insn.
2975 Called just like emit_move_insn, but assumes X and Y
2976 are basically valid. */
2979 emit_move_insn_1 (rtx x, rtx y)
2981 enum machine_mode mode = GET_MODE (x);
2982 enum insn_code code;
2984 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2986 code = mov_optab->handlers[mode].insn_code;
2987 if (code != CODE_FOR_nothing)
2988 return emit_insn (GEN_FCN (code) (x, y));
2990 /* Expand complex moves by moving real part and imag part. */
2991 if (COMPLEX_MODE_P (mode))
2992 return emit_move_complex (mode, x, y);
2994 if (GET_MODE_CLASS (mode) == MODE_CC)
2995 return emit_move_ccmode (mode, x, y);
2997 /* Try using a move pattern for the corresponding integer mode. This is
2998 only safe when simplify_subreg can convert MODE constants into integer
2999 constants. At present, it can only do this reliably if the value
3000 fits within a HOST_WIDE_INT. */
3001 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3003 rtx ret = emit_move_via_integer (mode, x, y);
3008 return emit_move_multi_word (mode, x, y);
3011 /* Generate code to copy Y into X.
3012 Both Y and X must have the same mode, except that
3013 Y can be a constant with VOIDmode.
3014 This mode cannot be BLKmode; use emit_block_move for that.
3016 Return the last instruction emitted. */
3019 emit_move_insn (rtx x, rtx y)
3021 enum machine_mode mode = GET_MODE (x);
3022 rtx y_cst = NULL_RTX;
3025 gcc_assert (mode != BLKmode
3026 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3031 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3032 && (last_insn = compress_float_constant (x, y)))
3037 if (!LEGITIMATE_CONSTANT_P (y))
3039 y = force_const_mem (mode, y);
3041 /* If the target's cannot_force_const_mem prevented the spill,
3042 assume that the target's move expanders will also take care
3043 of the non-legitimate constant. */
3049 /* If X or Y are memory references, verify that their addresses are valid
3052 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3053 && ! push_operand (x, GET_MODE (x)))
3055 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3056 x = validize_mem (x);
3059 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3061 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3062 y = validize_mem (y);
3064 gcc_assert (mode != BLKmode);
3066 last_insn = emit_move_insn_1 (x, y);
3068 if (y_cst && REG_P (x)
3069 && (set = single_set (last_insn)) != NULL_RTX
3070 && SET_DEST (set) == x
3071 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3072 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3077 /* If Y is representable exactly in a narrower mode, and the target can
3078 perform the extension directly from constant or memory, then emit the
3079 move as an extension. */
3082 compress_float_constant (rtx x, rtx y)
3084 enum machine_mode dstmode = GET_MODE (x);
3085 enum machine_mode orig_srcmode = GET_MODE (y);
3086 enum machine_mode srcmode;
3089 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3091 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3092 srcmode != orig_srcmode;
3093 srcmode = GET_MODE_WIDER_MODE (srcmode))
3096 rtx trunc_y, last_insn;
3098 /* Skip if the target can't extend this way. */
3099 ic = can_extend_p (dstmode, srcmode, 0);
3100 if (ic == CODE_FOR_nothing)
3103 /* Skip if the narrowed value isn't exact. */
3104 if (! exact_real_truncate (srcmode, &r))
3107 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3109 if (LEGITIMATE_CONSTANT_P (trunc_y))
3111 /* Skip if the target needs extra instructions to perform
3113 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3116 else if (float_extend_from_mem[dstmode][srcmode])
3117 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3121 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3122 last_insn = get_last_insn ();
3125 set_unique_reg_note (last_insn, REG_EQUAL, y);
3133 /* Pushing data onto the stack. */
3135 /* Push a block of length SIZE (perhaps variable)
3136 and return an rtx to address the beginning of the block.
3137 The value may be virtual_outgoing_args_rtx.
3139 EXTRA is the number of bytes of padding to push in addition to SIZE.
3140 BELOW nonzero means this padding comes at low addresses;
3141 otherwise, the padding comes at high addresses. */
3144 push_block (rtx size, int extra, int below)
3148 size = convert_modes (Pmode, ptr_mode, size, 1);
3149 if (CONSTANT_P (size))
3150 anti_adjust_stack (plus_constant (size, extra));
3151 else if (REG_P (size) && extra == 0)
3152 anti_adjust_stack (size);
3155 temp = copy_to_mode_reg (Pmode, size);
3157 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3158 temp, 0, OPTAB_LIB_WIDEN);
3159 anti_adjust_stack (temp);
3162 #ifndef STACK_GROWS_DOWNWARD
3168 temp = virtual_outgoing_args_rtx;
3169 if (extra != 0 && below)
3170 temp = plus_constant (temp, extra);
3174 if (GET_CODE (size) == CONST_INT)
3175 temp = plus_constant (virtual_outgoing_args_rtx,
3176 -INTVAL (size) - (below ? 0 : extra));
3177 else if (extra != 0 && !below)
3178 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3179 negate_rtx (Pmode, plus_constant (size, extra)));
3181 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3182 negate_rtx (Pmode, size));
3185 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3188 #ifdef PUSH_ROUNDING
3190 /* Emit single push insn. */
3193 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3196 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3198 enum insn_code icode;
3199 insn_operand_predicate_fn pred;
3201 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3202 /* If there is push pattern, use it. Otherwise try old way of throwing
3203 MEM representing push operation to move expander. */
3204 icode = push_optab->handlers[(int) mode].insn_code;
3205 if (icode != CODE_FOR_nothing)
3207 if (((pred = insn_data[(int) icode].operand[0].predicate)
3208 && !((*pred) (x, mode))))
3209 x = force_reg (mode, x);
3210 emit_insn (GEN_FCN (icode) (x));
3213 if (GET_MODE_SIZE (mode) == rounded_size)
3214 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3215 /* If we are to pad downward, adjust the stack pointer first and
3216 then store X into the stack location using an offset. This is
3217 because emit_move_insn does not know how to pad; it does not have
3219 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3221 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3222 HOST_WIDE_INT offset;
3224 emit_move_insn (stack_pointer_rtx,
3225 expand_binop (Pmode,
3226 #ifdef STACK_GROWS_DOWNWARD
3232 GEN_INT (rounded_size),
3233 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3235 offset = (HOST_WIDE_INT) padding_size;
3236 #ifdef STACK_GROWS_DOWNWARD
3237 if (STACK_PUSH_CODE == POST_DEC)
3238 /* We have already decremented the stack pointer, so get the
3240 offset += (HOST_WIDE_INT) rounded_size;
3242 if (STACK_PUSH_CODE == POST_INC)
3243 /* We have already incremented the stack pointer, so get the
3245 offset -= (HOST_WIDE_INT) rounded_size;
3247 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3251 #ifdef STACK_GROWS_DOWNWARD
3252 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3253 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3254 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3256 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3257 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3258 GEN_INT (rounded_size));
3260 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3263 dest = gen_rtx_MEM (mode, dest_addr);
3267 set_mem_attributes (dest, type, 1);
3269 if (flag_optimize_sibling_calls)
3270 /* Function incoming arguments may overlap with sibling call
3271 outgoing arguments and we cannot allow reordering of reads
3272 from function arguments with stores to outgoing arguments
3273 of sibling calls. */
3274 set_mem_alias_set (dest, 0);
3276 emit_move_insn (dest, x);
3280 /* Generate code to push X onto the stack, assuming it has mode MODE and
3282 MODE is redundant except when X is a CONST_INT (since they don't
3284 SIZE is an rtx for the size of data to be copied (in bytes),
3285 needed only if X is BLKmode.
3287 ALIGN (in bits) is maximum alignment we can assume.
3289 If PARTIAL and REG are both nonzero, then copy that many of the first
3290 words of X into registers starting with REG, and push the rest of X.
3291 The amount of space pushed is decreased by PARTIAL words,
3292 rounded *down* to a multiple of PARM_BOUNDARY.
3293 REG must be a hard register in this case.
3294 If REG is zero but PARTIAL is not, take any all others actions for an
3295 argument partially in registers, but do not actually load any
3298 EXTRA is the amount in bytes of extra space to leave next to this arg.
3299 This is ignored if an argument block has already been allocated.
3301 On a machine that lacks real push insns, ARGS_ADDR is the address of
3302 the bottom of the argument block for this call. We use indexing off there
3303 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3304 argument block has not been preallocated.
3306 ARGS_SO_FAR is the size of args previously pushed for this call.
3308 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3309 for arguments passed in registers. If nonzero, it will be the number
3310 of bytes required. */
3313 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3314 unsigned int align, int partial, rtx reg, int extra,
3315 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3319 enum direction stack_direction
3320 #ifdef STACK_GROWS_DOWNWARD
3326 /* Decide where to pad the argument: `downward' for below,
3327 `upward' for above, or `none' for don't pad it.
3328 Default is below for small data on big-endian machines; else above. */
3329 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3331 /* Invert direction if stack is post-decrement.
3333 if (STACK_PUSH_CODE == POST_DEC)
3334 if (where_pad != none)
3335 where_pad = (where_pad == downward ? upward : downward);
3339 if (mode == BLKmode)
3341 /* Copy a block into the stack, entirely or partially. */
3344 int used = partial * UNITS_PER_WORD;
3348 if (reg && GET_CODE (reg) == PARALLEL)
3350 /* Use the size of the elt to compute offset. */
3351 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3352 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3353 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3356 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3362 /* USED is now the # of bytes we need not copy to the stack
3363 because registers will take care of them. */
3366 xinner = adjust_address (xinner, BLKmode, used);
3368 /* If the partial register-part of the arg counts in its stack size,
3369 skip the part of stack space corresponding to the registers.
3370 Otherwise, start copying to the beginning of the stack space,
3371 by setting SKIP to 0. */
3372 skip = (reg_parm_stack_space == 0) ? 0 : used;
3374 #ifdef PUSH_ROUNDING
3375 /* Do it with several push insns if that doesn't take lots of insns
3376 and if there is no difficulty with push insns that skip bytes
3377 on the stack for alignment purposes. */
3380 && GET_CODE (size) == CONST_INT
3382 && MEM_ALIGN (xinner) >= align
3383 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3384 /* Here we avoid the case of a structure whose weak alignment
3385 forces many pushes of a small amount of data,
3386 and such small pushes do rounding that causes trouble. */
3387 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3388 || align >= BIGGEST_ALIGNMENT
3389 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3390 == (align / BITS_PER_UNIT)))
3391 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3393 /* Push padding now if padding above and stack grows down,
3394 or if padding below and stack grows up.
3395 But if space already allocated, this has already been done. */
3396 if (extra && args_addr == 0
3397 && where_pad != none && where_pad != stack_direction)
3398 anti_adjust_stack (GEN_INT (extra));
3400 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3403 #endif /* PUSH_ROUNDING */
3407 /* Otherwise make space on the stack and copy the data
3408 to the address of that space. */
3410 /* Deduct words put into registers from the size we must copy. */
3413 if (GET_CODE (size) == CONST_INT)
3414 size = GEN_INT (INTVAL (size) - used);
3416 size = expand_binop (GET_MODE (size), sub_optab, size,
3417 GEN_INT (used), NULL_RTX, 0,
3421 /* Get the address of the stack space.
3422 In this case, we do not deal with EXTRA separately.
3423 A single stack adjust will do. */
3426 temp = push_block (size, extra, where_pad == downward);
3429 else if (GET_CODE (args_so_far) == CONST_INT)
3430 temp = memory_address (BLKmode,
3431 plus_constant (args_addr,
3432 skip + INTVAL (args_so_far)));
3434 temp = memory_address (BLKmode,
3435 plus_constant (gen_rtx_PLUS (Pmode,
3440 if (!ACCUMULATE_OUTGOING_ARGS)
3442 /* If the source is referenced relative to the stack pointer,
3443 copy it to another register to stabilize it. We do not need
3444 to do this if we know that we won't be changing sp. */
3446 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3447 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3448 temp = copy_to_reg (temp);
3451 target = gen_rtx_MEM (BLKmode, temp);
3453 /* We do *not* set_mem_attributes here, because incoming arguments
3454 may overlap with sibling call outgoing arguments and we cannot
3455 allow reordering of reads from function arguments with stores
3456 to outgoing arguments of sibling calls. We do, however, want
3457 to record the alignment of the stack slot. */
3458 /* ALIGN may well be better aligned than TYPE, e.g. due to
3459 PARM_BOUNDARY. Assume the caller isn't lying. */
3460 set_mem_align (target, align);
3462 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3465 else if (partial > 0)
3467 /* Scalar partly in registers. */
3469 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3472 /* # words of start of argument
3473 that we must make space for but need not store. */
3474 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3475 int args_offset = INTVAL (args_so_far);
3478 /* Push padding now if padding above and stack grows down,
3479 or if padding below and stack grows up.
3480 But if space already allocated, this has already been done. */
3481 if (extra && args_addr == 0
3482 && where_pad != none && where_pad != stack_direction)
3483 anti_adjust_stack (GEN_INT (extra));
3485 /* If we make space by pushing it, we might as well push
3486 the real data. Otherwise, we can leave OFFSET nonzero
3487 and leave the space uninitialized. */
3491 /* Now NOT_STACK gets the number of words that we don't need to
3492 allocate on the stack. */
3493 not_stack = partial - offset;
3495 /* If the partial register-part of the arg counts in its stack size,
3496 skip the part of stack space corresponding to the registers.
3497 Otherwise, start copying to the beginning of the stack space,
3498 by setting SKIP to 0. */
3499 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3501 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3502 x = validize_mem (force_const_mem (mode, x));
3504 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3505 SUBREGs of such registers are not allowed. */
3506 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3507 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3508 x = copy_to_reg (x);
3510 /* Loop over all the words allocated on the stack for this arg. */
3511 /* We can do it by words, because any scalar bigger than a word
3512 has a size a multiple of a word. */
3513 #ifndef PUSH_ARGS_REVERSED
3514 for (i = not_stack; i < size; i++)
3516 for (i = size - 1; i >= not_stack; i--)
3518 if (i >= not_stack + offset)
3519 emit_push_insn (operand_subword_force (x, i, mode),
3520 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3522 GEN_INT (args_offset + ((i - not_stack + skip)
3524 reg_parm_stack_space, alignment_pad);
3531 /* Push padding now if padding above and stack grows down,
3532 or if padding below and stack grows up.
3533 But if space already allocated, this has already been done. */
3534 if (extra && args_addr == 0
3535 && where_pad != none && where_pad != stack_direction)
3536 anti_adjust_stack (GEN_INT (extra));
3538 #ifdef PUSH_ROUNDING
3539 if (args_addr == 0 && PUSH_ARGS)
3540 emit_single_push_insn (mode, x, type);
3544 if (GET_CODE (args_so_far) == CONST_INT)
3546 = memory_address (mode,
3547 plus_constant (args_addr,
3548 INTVAL (args_so_far)));
3550 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3552 dest = gen_rtx_MEM (mode, addr);
3554 /* We do *not* set_mem_attributes here, because incoming arguments
3555 may overlap with sibling call outgoing arguments and we cannot
3556 allow reordering of reads from function arguments with stores
3557 to outgoing arguments of sibling calls. We do, however, want
3558 to record the alignment of the stack slot. */
3559 /* ALIGN may well be better aligned than TYPE, e.g. due to
3560 PARM_BOUNDARY. Assume the caller isn't lying. */
3561 set_mem_align (dest, align);
3563 emit_move_insn (dest, x);
3567 /* If part should go in registers, copy that part
3568 into the appropriate registers. Do this now, at the end,
3569 since mem-to-mem copies above may do function calls. */
3570 if (partial > 0 && reg != 0)
3572 /* Handle calls that pass values in multiple non-contiguous locations.
3573 The Irix 6 ABI has examples of this. */
3574 if (GET_CODE (reg) == PARALLEL)
3575 emit_group_load (reg, x, type, -1);
3577 move_block_to_reg (REGNO (reg), x, partial, mode);
3580 if (extra && args_addr == 0 && where_pad == stack_direction)
3581 anti_adjust_stack (GEN_INT (extra));
3583 if (alignment_pad && args_addr == 0)
3584 anti_adjust_stack (alignment_pad);
3587 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3591 get_subtarget (rtx x)
3595 /* Only registers can be subtargets. */
3597 /* Don't use hard regs to avoid extending their life. */
3598 || REGNO (x) < FIRST_PSEUDO_REGISTER
3602 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3603 FIELD is a bitfield. Returns true if the optimization was successful,
3604 and there's nothing else to do. */
3607 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3608 unsigned HOST_WIDE_INT bitpos,
3609 enum machine_mode mode1, rtx str_rtx,
3612 enum machine_mode str_mode = GET_MODE (str_rtx);
3613 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3618 if (mode1 != VOIDmode
3619 || bitsize >= BITS_PER_WORD
3620 || str_bitsize > BITS_PER_WORD
3621 || TREE_SIDE_EFFECTS (to)
3622 || TREE_THIS_VOLATILE (to))
3626 if (!BINARY_CLASS_P (src)
3627 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3630 op0 = TREE_OPERAND (src, 0);
3631 op1 = TREE_OPERAND (src, 1);
3634 if (!operand_equal_p (to, op0, 0))
3637 if (MEM_P (str_rtx))
3639 unsigned HOST_WIDE_INT offset1;
3641 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3642 str_mode = word_mode;
3643 str_mode = get_best_mode (bitsize, bitpos,
3644 MEM_ALIGN (str_rtx), str_mode, 0);
3645 if (str_mode == VOIDmode)
3647 str_bitsize = GET_MODE_BITSIZE (str_mode);
3650 bitpos %= str_bitsize;
3651 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3652 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3654 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3657 /* If the bit field covers the whole REG/MEM, store_field
3658 will likely generate better code. */
3659 if (bitsize >= str_bitsize)
3662 /* We can't handle fields split across multiple entities. */
3663 if (bitpos + bitsize > str_bitsize)
3666 if (BYTES_BIG_ENDIAN)
3667 bitpos = str_bitsize - bitpos - bitsize;
3669 switch (TREE_CODE (src))
3673 /* For now, just optimize the case of the topmost bitfield
3674 where we don't need to do any masking and also
3675 1 bit bitfields where xor can be used.
3676 We might win by one instruction for the other bitfields
3677 too if insv/extv instructions aren't used, so that
3678 can be added later. */
3679 if (bitpos + bitsize != str_bitsize
3680 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3683 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3684 value = convert_modes (str_mode,
3685 TYPE_MODE (TREE_TYPE (op1)), value,
3686 TYPE_UNSIGNED (TREE_TYPE (op1)));
3688 /* We may be accessing data outside the field, which means
3689 we can alias adjacent data. */
3690 if (MEM_P (str_rtx))
3692 str_rtx = shallow_copy_rtx (str_rtx);
3693 set_mem_alias_set (str_rtx, 0);
3694 set_mem_expr (str_rtx, 0);
3697 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3698 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3700 value = expand_and (str_mode, value, const1_rtx, NULL);
3703 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3704 build_int_cst (NULL_TREE, bitpos),
3706 result = expand_binop (str_mode, binop, str_rtx,
3707 value, str_rtx, 1, OPTAB_WIDEN);
3708 if (result != str_rtx)
3709 emit_move_insn (str_rtx, result);
3720 /* Expand an assignment that stores the value of FROM into TO. */
3723 expand_assignment (tree to, tree from)
3728 /* Don't crash if the lhs of the assignment was erroneous. */
3730 if (TREE_CODE (to) == ERROR_MARK)
3732 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3736 /* Assignment of a structure component needs special treatment
3737 if the structure component's rtx is not simply a MEM.
3738 Assignment of an array element at a constant index, and assignment of
3739 an array element in an unaligned packed structure field, has the same
3741 if (handled_component_p (to)
3742 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3744 enum machine_mode mode1;
3745 HOST_WIDE_INT bitsize, bitpos;
3753 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3754 &unsignedp, &volatilep);
3756 /* If we are going to use store_bit_field and extract_bit_field,
3757 make sure to_rtx will be safe for multiple use. */
3759 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3763 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3765 gcc_assert (MEM_P (to_rtx));
3767 #ifdef POINTERS_EXTEND_UNSIGNED
3768 if (GET_MODE (offset_rtx) != Pmode)
3769 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3771 if (GET_MODE (offset_rtx) != ptr_mode)
3772 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3775 /* A constant address in TO_RTX can have VOIDmode, we must not try
3776 to call force_reg for that case. Avoid that case. */
3778 && GET_MODE (to_rtx) == BLKmode
3779 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3781 && (bitpos % bitsize) == 0
3782 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3783 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3785 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3789 to_rtx = offset_address (to_rtx, offset_rtx,
3790 highest_pow2_factor_for_target (to,
3794 /* Handle expand_expr of a complex value returning a CONCAT. */
3795 if (GET_CODE (to_rtx) == CONCAT)
3797 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3799 gcc_assert (bitpos == 0);
3800 result = store_expr (from, to_rtx, false);
3804 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3805 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3812 /* If the field is at offset zero, we could have been given the
3813 DECL_RTX of the parent struct. Don't munge it. */
3814 to_rtx = shallow_copy_rtx (to_rtx);
3816 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3818 /* Deal with volatile and readonly fields. The former is only
3819 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3821 MEM_VOLATILE_P (to_rtx) = 1;
3822 if (component_uses_parent_alias_set (to))
3823 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3826 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3830 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3831 TREE_TYPE (tem), get_alias_set (to));
3835 preserve_temp_slots (result);
3841 /* If the rhs is a function call and its value is not an aggregate,
3842 call the function before we start to compute the lhs.
3843 This is needed for correct code for cases such as
3844 val = setjmp (buf) on machines where reference to val
3845 requires loading up part of an address in a separate insn.
3847 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3848 since it might be a promoted variable where the zero- or sign- extension
3849 needs to be done. Handling this in the normal way is safe because no
3850 computation is done before the call. */
3851 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3852 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3853 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3854 && REG_P (DECL_RTL (to))))
3859 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3861 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3863 /* Handle calls that return values in multiple non-contiguous locations.
3864 The Irix 6 ABI has examples of this. */
3865 if (GET_CODE (to_rtx) == PARALLEL)
3866 emit_group_load (to_rtx, value, TREE_TYPE (from),
3867 int_size_in_bytes (TREE_TYPE (from)));
3868 else if (GET_MODE (to_rtx) == BLKmode)
3869 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3872 if (POINTER_TYPE_P (TREE_TYPE (to)))
3873 value = convert_memory_address (GET_MODE (to_rtx), value);
3874 emit_move_insn (to_rtx, value);
3876 preserve_temp_slots (to_rtx);
3882 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3883 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3886 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3888 /* Don't move directly into a return register. */
3889 if (TREE_CODE (to) == RESULT_DECL
3890 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3895 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3897 if (GET_CODE (to_rtx) == PARALLEL)
3898 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3899 int_size_in_bytes (TREE_TYPE (from)));
3901 emit_move_insn (to_rtx, temp);
3903 preserve_temp_slots (to_rtx);
3909 /* In case we are returning the contents of an object which overlaps
3910 the place the value is being stored, use a safe function when copying
3911 a value through a pointer into a structure value return block. */
3912 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3913 && current_function_returns_struct
3914 && !current_function_returns_pcc_struct)
3919 size = expr_size (from);
3920 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3922 emit_library_call (memmove_libfunc, LCT_NORMAL,
3923 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3924 XEXP (from_rtx, 0), Pmode,
3925 convert_to_mode (TYPE_MODE (sizetype),
3926 size, TYPE_UNSIGNED (sizetype)),
3927 TYPE_MODE (sizetype));
3929 preserve_temp_slots (to_rtx);
3935 /* Compute FROM and store the value in the rtx we got. */
3938 result = store_expr (from, to_rtx, 0);
3939 preserve_temp_slots (result);
3945 /* Generate code for computing expression EXP,
3946 and storing the value into TARGET.
3948 If the mode is BLKmode then we may return TARGET itself.
3949 It turns out that in BLKmode it doesn't cause a problem.
3950 because C has no operators that could combine two different
3951 assignments into the same BLKmode object with different values
3952 with no sequence point. Will other languages need this to
3955 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3956 stack, and block moves may need to be treated specially. */
3959 store_expr (tree exp, rtx target, int call_param_p)
3962 rtx alt_rtl = NULL_RTX;
3963 int dont_return_target = 0;
3965 if (VOID_TYPE_P (TREE_TYPE (exp)))
3967 /* C++ can generate ?: expressions with a throw expression in one
3968 branch and an rvalue in the other. Here, we resolve attempts to
3969 store the throw expression's nonexistent result. */
3970 gcc_assert (!call_param_p);
3971 expand_expr (exp, const0_rtx, VOIDmode, 0);
3974 if (TREE_CODE (exp) == COMPOUND_EXPR)
3976 /* Perform first part of compound expression, then assign from second
3978 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3979 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3980 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3982 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3984 /* For conditional expression, get safe form of the target. Then
3985 test the condition, doing the appropriate assignment on either
3986 side. This avoids the creation of unnecessary temporaries.
3987 For non-BLKmode, it is more efficient not to do this. */
3989 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3991 do_pending_stack_adjust ();
3993 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3994 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3995 emit_jump_insn (gen_jump (lab2));
3998 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
4004 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4005 /* If this is a scalar in a register that is stored in a wider mode
4006 than the declared mode, compute the result into its declared mode
4007 and then convert to the wider mode. Our value is the computed
4010 rtx inner_target = 0;
4012 /* We can do the conversion inside EXP, which will often result
4013 in some optimizations. Do the conversion in two steps: first
4014 change the signedness, if needed, then the extend. But don't
4015 do this if the type of EXP is a subtype of something else
4016 since then the conversion might involve more than just
4017 converting modes. */
4018 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4019 && TREE_TYPE (TREE_TYPE (exp)) == 0
4020 && (!lang_hooks.reduce_bit_field_operations
4021 || (GET_MODE_PRECISION (GET_MODE (target))
4022 == TYPE_PRECISION (TREE_TYPE (exp)))))
4024 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4025 != SUBREG_PROMOTED_UNSIGNED_P (target))
4027 (lang_hooks.types.signed_or_unsigned_type
4028 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4030 exp = convert (lang_hooks.types.type_for_mode
4031 (GET_MODE (SUBREG_REG (target)),
4032 SUBREG_PROMOTED_UNSIGNED_P (target)),
4035 inner_target = SUBREG_REG (target);
4038 temp = expand_expr (exp, inner_target, VOIDmode,
4039 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4041 /* If TEMP is a VOIDmode constant, use convert_modes to make
4042 sure that we properly convert it. */
4043 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4045 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4046 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4047 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4048 GET_MODE (target), temp,
4049 SUBREG_PROMOTED_UNSIGNED_P (target));
4052 convert_move (SUBREG_REG (target), temp,
4053 SUBREG_PROMOTED_UNSIGNED_P (target));
4059 temp = expand_expr_real (exp, target, GET_MODE (target),
4061 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4063 /* Return TARGET if it's a specified hardware register.
4064 If TARGET is a volatile mem ref, either return TARGET
4065 or return a reg copied *from* TARGET; ANSI requires this.
4067 Otherwise, if TEMP is not TARGET, return TEMP
4068 if it is constant (for efficiency),
4069 or if we really want the correct value. */
4070 if (!(target && REG_P (target)
4071 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4072 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4073 && ! rtx_equal_p (temp, target)
4074 && CONSTANT_P (temp))
4075 dont_return_target = 1;
4078 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4079 the same as that of TARGET, adjust the constant. This is needed, for
4080 example, in case it is a CONST_DOUBLE and we want only a word-sized
4082 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4083 && TREE_CODE (exp) != ERROR_MARK
4084 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4085 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4086 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4088 /* If value was not generated in the target, store it there.
4089 Convert the value to TARGET's type first if necessary and emit the
4090 pending incrementations that have been queued when expanding EXP.
4091 Note that we cannot emit the whole queue blindly because this will
4092 effectively disable the POST_INC optimization later.
4094 If TEMP and TARGET compare equal according to rtx_equal_p, but
4095 one or both of them are volatile memory refs, we have to distinguish
4097 - expand_expr has used TARGET. In this case, we must not generate
4098 another copy. This can be detected by TARGET being equal according
4100 - expand_expr has not used TARGET - that means that the source just
4101 happens to have the same RTX form. Since temp will have been created
4102 by expand_expr, it will compare unequal according to == .
4103 We must generate a copy in this case, to reach the correct number
4104 of volatile memory references. */
4106 if ((! rtx_equal_p (temp, target)
4107 || (temp != target && (side_effects_p (temp)
4108 || side_effects_p (target))))
4109 && TREE_CODE (exp) != ERROR_MARK
4110 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4111 but TARGET is not valid memory reference, TEMP will differ
4112 from TARGET although it is really the same location. */
4113 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4114 /* If there's nothing to copy, don't bother. Don't call expr_size
4115 unless necessary, because some front-ends (C++) expr_size-hook
4116 aborts on objects that are not supposed to be bit-copied or
4118 && expr_size (exp) != const0_rtx)
4120 if (GET_MODE (temp) != GET_MODE (target)
4121 && GET_MODE (temp) != VOIDmode)
4123 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4124 if (dont_return_target)
4126 /* In this case, we will return TEMP,
4127 so make sure it has the proper mode.
4128 But don't forget to store the value into TARGET. */
4129 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4130 emit_move_insn (target, temp);
4133 convert_move (target, temp, unsignedp);
4136 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4138 /* Handle copying a string constant into an array. The string
4139 constant may be shorter than the array. So copy just the string's
4140 actual length, and clear the rest. First get the size of the data
4141 type of the string, which is actually the size of the target. */
4142 rtx size = expr_size (exp);
4144 if (GET_CODE (size) == CONST_INT
4145 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4146 emit_block_move (target, temp, size,
4148 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4151 /* Compute the size of the data to copy from the string. */
4153 = size_binop (MIN_EXPR,
4154 make_tree (sizetype, size),
4155 size_int (TREE_STRING_LENGTH (exp)));
4157 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4159 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4162 /* Copy that much. */
4163 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4164 TYPE_UNSIGNED (sizetype));
4165 emit_block_move (target, temp, copy_size_rtx,
4167 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4169 /* Figure out how much is left in TARGET that we have to clear.
4170 Do all calculations in ptr_mode. */
4171 if (GET_CODE (copy_size_rtx) == CONST_INT)
4173 size = plus_constant (size, -INTVAL (copy_size_rtx));
4174 target = adjust_address (target, BLKmode,
4175 INTVAL (copy_size_rtx));
4179 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4180 copy_size_rtx, NULL_RTX, 0,
4183 #ifdef POINTERS_EXTEND_UNSIGNED
4184 if (GET_MODE (copy_size_rtx) != Pmode)
4185 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4186 TYPE_UNSIGNED (sizetype));
4189 target = offset_address (target, copy_size_rtx,
4190 highest_pow2_factor (copy_size));
4191 label = gen_label_rtx ();
4192 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4193 GET_MODE (size), 0, label);
4196 if (size != const0_rtx)
4197 clear_storage (target, size);
4203 /* Handle calls that return values in multiple non-contiguous locations.
4204 The Irix 6 ABI has examples of this. */
4205 else if (GET_CODE (target) == PARALLEL)
4206 emit_group_load (target, temp, TREE_TYPE (exp),
4207 int_size_in_bytes (TREE_TYPE (exp)));
4208 else if (GET_MODE (temp) == BLKmode)
4209 emit_block_move (target, temp, expr_size (exp),
4211 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4214 temp = force_operand (temp, target);
4216 emit_move_insn (target, temp);
4223 /* Examine CTOR to discover:
4224 * how many scalar fields are set to nonzero values,
4225 and place it in *P_NZ_ELTS;
4226 * how many scalar fields are set to non-constant values,
4227 and place it in *P_NC_ELTS; and
4228 * how many scalar fields in total are in CTOR,
4229 and place it in *P_ELT_COUNT. */
4232 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4233 HOST_WIDE_INT *p_nc_elts,
4234 HOST_WIDE_INT *p_elt_count)
4236 HOST_WIDE_INT nz_elts, nc_elts, elt_count;
4243 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4245 tree value = TREE_VALUE (list);
4246 tree purpose = TREE_PURPOSE (list);
4250 if (TREE_CODE (purpose) == RANGE_EXPR)
4252 tree lo_index = TREE_OPERAND (purpose, 0);
4253 tree hi_index = TREE_OPERAND (purpose, 1);
4255 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4256 mult = (tree_low_cst (hi_index, 1)
4257 - tree_low_cst (lo_index, 1) + 1);
4260 switch (TREE_CODE (value))
4264 HOST_WIDE_INT nz = 0, nc = 0, count = 0;
4265 categorize_ctor_elements_1 (value, &nz, &nc, &count);
4266 nz_elts += mult * nz;
4267 nc_elts += mult * nc;
4268 elt_count += mult * count;
4274 if (!initializer_zerop (value))
4280 nz_elts += mult * TREE_STRING_LENGTH (value);
4281 elt_count += mult * TREE_STRING_LENGTH (value);
4285 if (!initializer_zerop (TREE_REALPART (value)))
4287 if (!initializer_zerop (TREE_IMAGPART (value)))
4295 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4297 if (!initializer_zerop (TREE_VALUE (v)))
4307 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4313 *p_nz_elts += nz_elts;
4314 *p_nc_elts += nc_elts;
4315 *p_elt_count += elt_count;
4319 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4320 HOST_WIDE_INT *p_nc_elts,
4321 HOST_WIDE_INT *p_elt_count)
4326 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts, p_elt_count);
4329 /* Count the number of scalars in TYPE. Return -1 on overflow or
4333 count_type_elements (tree type)
4335 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4336 switch (TREE_CODE (type))
4340 tree telts = array_type_nelts (type);
4341 if (telts && host_integerp (telts, 1))
4343 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4344 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4347 else if (max / n > m)
4355 HOST_WIDE_INT n = 0, t;
4358 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4359 if (TREE_CODE (f) == FIELD_DECL)
4361 t = count_type_elements (TREE_TYPE (f));
4371 case QUAL_UNION_TYPE:
4373 /* Ho hum. How in the world do we guess here? Clearly it isn't
4374 right to count the fields. Guess based on the number of words. */
4375 HOST_WIDE_INT n = int_size_in_bytes (type);
4378 return n / UNITS_PER_WORD;
4385 return TYPE_VECTOR_SUBPARTS (type);
4394 case REFERENCE_TYPE:
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (tree exp)
4412 if (TREE_CODE (exp) == CONSTRUCTOR)
4415 HOST_WIDE_INT nz_elts, nc_elts, count, elts;
4417 categorize_ctor_elements (exp, &nz_elts, &nc_elts, &count);
4418 elts = count_type_elements (TREE_TYPE (exp));
4420 return nz_elts < elts / 4;
4423 return initializer_zerop (exp);
4426 /* Helper function for store_constructor.
4427 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4428 TYPE is the type of the CONSTRUCTOR, not the element type.
4429 CLEARED is as for store_constructor.
4430 ALIAS_SET is the alias set to use for any stores.
4432 This provides a recursive shortcut back to store_constructor when it isn't
4433 necessary to go through store_field. This is so that we can pass through
4434 the cleared field to let store_constructor know that we may not have to
4435 clear a substructure if the outer structure has already been cleared. */
4438 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4439 HOST_WIDE_INT bitpos, enum machine_mode mode,
4440 tree exp, tree type, int cleared, int alias_set)
4442 if (TREE_CODE (exp) == CONSTRUCTOR
4443 /* We can only call store_constructor recursively if the size and
4444 bit position are on a byte boundary. */
4445 && bitpos % BITS_PER_UNIT == 0
4446 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4447 /* If we have a nonzero bitpos for a register target, then we just
4448 let store_field do the bitfield handling. This is unlikely to
4449 generate unnecessary clear instructions anyways. */
4450 && (bitpos == 0 || MEM_P (target)))
4454 = adjust_address (target,
4455 GET_MODE (target) == BLKmode
4457 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4458 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4461 /* Update the alias set, if required. */
4462 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4463 && MEM_ALIAS_SET (target) != 0)
4465 target = copy_rtx (target);
4466 set_mem_alias_set (target, alias_set);
4469 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4472 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4475 /* Store the value of constructor EXP into the rtx TARGET.
4476 TARGET is either a REG or a MEM; we know it cannot conflict, since
4477 safe_from_p has been called.
4478 CLEARED is true if TARGET is known to have been zero'd.
4479 SIZE is the number of bytes of TARGET we are allowed to modify: this
4480 may not be the same as the size of EXP if we are assigning to a field
4481 which has been packed to exclude padding bits. */
4484 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4486 tree type = TREE_TYPE (exp);
4487 #ifdef WORD_REGISTER_OPERATIONS
4488 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4491 switch (TREE_CODE (type))
4495 case QUAL_UNION_TYPE:
4499 /* If size is zero or the target is already cleared, do nothing. */
4500 if (size == 0 || cleared)
4502 /* We either clear the aggregate or indicate the value is dead. */
4503 else if ((TREE_CODE (type) == UNION_TYPE
4504 || TREE_CODE (type) == QUAL_UNION_TYPE)
4505 && ! CONSTRUCTOR_ELTS (exp))
4506 /* If the constructor is empty, clear the union. */
4508 clear_storage (target, expr_size (exp));
4512 /* If we are building a static constructor into a register,
4513 set the initial value as zero so we can fold the value into
4514 a constant. But if more than one register is involved,
4515 this probably loses. */
4516 else if (REG_P (target) && TREE_STATIC (exp)
4517 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4519 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4523 /* If the constructor has fewer fields than the structure or
4524 if we are initializing the structure to mostly zeros, clear
4525 the whole structure first. Don't do this if TARGET is a
4526 register whose mode size isn't equal to SIZE since
4527 clear_storage can't handle this case. */
4529 && ((list_length (CONSTRUCTOR_ELTS (exp))
4530 != fields_length (type))
4531 || mostly_zeros_p (exp))
4533 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4536 clear_storage (target, GEN_INT (size));
4541 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4543 /* Store each element of the constructor into the
4544 corresponding field of TARGET. */
4546 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4548 tree field = TREE_PURPOSE (elt);
4549 tree value = TREE_VALUE (elt);
4550 enum machine_mode mode;
4551 HOST_WIDE_INT bitsize;
4552 HOST_WIDE_INT bitpos = 0;
4554 rtx to_rtx = target;
4556 /* Just ignore missing fields. We cleared the whole
4557 structure, above, if any fields are missing. */
4561 if (cleared && initializer_zerop (value))
4564 if (host_integerp (DECL_SIZE (field), 1))
4565 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4569 mode = DECL_MODE (field);
4570 if (DECL_BIT_FIELD (field))
4573 offset = DECL_FIELD_OFFSET (field);
4574 if (host_integerp (offset, 0)
4575 && host_integerp (bit_position (field), 0))
4577 bitpos = int_bit_position (field);
4581 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4588 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4589 make_tree (TREE_TYPE (exp),
4592 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4593 gcc_assert (MEM_P (to_rtx));
4595 #ifdef POINTERS_EXTEND_UNSIGNED
4596 if (GET_MODE (offset_rtx) != Pmode)
4597 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4599 if (GET_MODE (offset_rtx) != ptr_mode)
4600 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4603 to_rtx = offset_address (to_rtx, offset_rtx,
4604 highest_pow2_factor (offset));
4607 #ifdef WORD_REGISTER_OPERATIONS
4608 /* If this initializes a field that is smaller than a
4609 word, at the start of a word, try to widen it to a full
4610 word. This special case allows us to output C++ member
4611 function initializations in a form that the optimizers
4614 && bitsize < BITS_PER_WORD
4615 && bitpos % BITS_PER_WORD == 0
4616 && GET_MODE_CLASS (mode) == MODE_INT
4617 && TREE_CODE (value) == INTEGER_CST
4619 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4621 tree type = TREE_TYPE (value);
4623 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4625 type = lang_hooks.types.type_for_size
4626 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4627 value = convert (type, value);
4630 if (BYTES_BIG_ENDIAN)
4632 = fold (build2 (LSHIFT_EXPR, type, value,
4633 build_int_cst (NULL_TREE,
4634 BITS_PER_WORD - bitsize)));
4635 bitsize = BITS_PER_WORD;
4640 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4641 && DECL_NONADDRESSABLE_P (field))
4643 to_rtx = copy_rtx (to_rtx);
4644 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4647 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4648 value, type, cleared,
4649 get_alias_set (TREE_TYPE (field)));
4659 tree elttype = TREE_TYPE (type);
4661 HOST_WIDE_INT minelt = 0;
4662 HOST_WIDE_INT maxelt = 0;
4664 domain = TYPE_DOMAIN (type);
4665 const_bounds_p = (TYPE_MIN_VALUE (domain)
4666 && TYPE_MAX_VALUE (domain)
4667 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4668 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4670 /* If we have constant bounds for the range of the type, get them. */
4673 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4674 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4677 /* If the constructor has fewer elements than the array, clear
4678 the whole array first. Similarly if this is static
4679 constructor of a non-BLKmode object. */
4682 else if (REG_P (target) && TREE_STATIC (exp))
4686 HOST_WIDE_INT count = 0, zero_count = 0;
4687 need_to_clear = ! const_bounds_p;
4689 /* This loop is a more accurate version of the loop in
4690 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4691 is also needed to check for missing elements. */
4692 for (elt = CONSTRUCTOR_ELTS (exp);
4693 elt != NULL_TREE && ! need_to_clear;
4694 elt = TREE_CHAIN (elt))
4696 tree index = TREE_PURPOSE (elt);
4697 HOST_WIDE_INT this_node_count;
4699 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4701 tree lo_index = TREE_OPERAND (index, 0);
4702 tree hi_index = TREE_OPERAND (index, 1);
4704 if (! host_integerp (lo_index, 1)
4705 || ! host_integerp (hi_index, 1))
4711 this_node_count = (tree_low_cst (hi_index, 1)
4712 - tree_low_cst (lo_index, 1) + 1);
4715 this_node_count = 1;
4717 count += this_node_count;
4718 if (mostly_zeros_p (TREE_VALUE (elt)))
4719 zero_count += this_node_count;
4722 /* Clear the entire array first if there are any missing
4723 elements, or if the incidence of zero elements is >=
4726 && (count < maxelt - minelt + 1
4727 || 4 * zero_count >= 3 * count))
4731 if (need_to_clear && size > 0)
4734 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4736 clear_storage (target, GEN_INT (size));
4740 if (!cleared && REG_P (target))
4741 /* Inform later passes that the old value is dead. */
4742 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4744 /* Store each element of the constructor into the
4745 corresponding element of TARGET, determined by counting the
4747 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4749 elt = TREE_CHAIN (elt), i++)
4751 enum machine_mode mode;
4752 HOST_WIDE_INT bitsize;
4753 HOST_WIDE_INT bitpos;
4755 tree value = TREE_VALUE (elt);
4756 tree index = TREE_PURPOSE (elt);
4757 rtx xtarget = target;
4759 if (cleared && initializer_zerop (value))
4762 unsignedp = TYPE_UNSIGNED (elttype);
4763 mode = TYPE_MODE (elttype);
4764 if (mode == BLKmode)
4765 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4766 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4769 bitsize = GET_MODE_BITSIZE (mode);
4771 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4773 tree lo_index = TREE_OPERAND (index, 0);
4774 tree hi_index = TREE_OPERAND (index, 1);
4775 rtx index_r, pos_rtx;
4776 HOST_WIDE_INT lo, hi, count;
4779 /* If the range is constant and "small", unroll the loop. */
4781 && host_integerp (lo_index, 0)
4782 && host_integerp (hi_index, 0)
4783 && (lo = tree_low_cst (lo_index, 0),
4784 hi = tree_low_cst (hi_index, 0),
4785 count = hi - lo + 1,
4788 || (host_integerp (TYPE_SIZE (elttype), 1)
4789 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4792 lo -= minelt; hi -= minelt;
4793 for (; lo <= hi; lo++)
4795 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4798 && !MEM_KEEP_ALIAS_SET_P (target)
4799 && TREE_CODE (type) == ARRAY_TYPE
4800 && TYPE_NONALIASED_COMPONENT (type))
4802 target = copy_rtx (target);
4803 MEM_KEEP_ALIAS_SET_P (target) = 1;
4806 store_constructor_field
4807 (target, bitsize, bitpos, mode, value, type, cleared,
4808 get_alias_set (elttype));
4813 rtx loop_start = gen_label_rtx ();
4814 rtx loop_end = gen_label_rtx ();
4817 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4818 unsignedp = TYPE_UNSIGNED (domain);
4820 index = build_decl (VAR_DECL, NULL_TREE, domain);
4823 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4825 SET_DECL_RTL (index, index_r);
4826 store_expr (lo_index, index_r, 0);
4828 /* Build the head of the loop. */
4829 do_pending_stack_adjust ();
4830 emit_label (loop_start);
4832 /* Assign value to element index. */
4834 = convert (ssizetype,
4835 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4836 index, TYPE_MIN_VALUE (domain))));
4837 position = size_binop (MULT_EXPR, position,
4839 TYPE_SIZE_UNIT (elttype)));
4841 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4842 xtarget = offset_address (target, pos_rtx,
4843 highest_pow2_factor (position));
4844 xtarget = adjust_address (xtarget, mode, 0);
4845 if (TREE_CODE (value) == CONSTRUCTOR)
4846 store_constructor (value, xtarget, cleared,
4847 bitsize / BITS_PER_UNIT);
4849 store_expr (value, xtarget, 0);
4851 /* Generate a conditional jump to exit the loop. */
4852 exit_cond = build2 (LT_EXPR, integer_type_node,
4854 jumpif (exit_cond, loop_end);
4856 /* Update the loop counter, and jump to the head of
4858 expand_assignment (index,
4859 build2 (PLUS_EXPR, TREE_TYPE (index),
4860 index, integer_one_node));
4862 emit_jump (loop_start);
4864 /* Build the end of the loop. */
4865 emit_label (loop_end);
4868 else if ((index != 0 && ! host_integerp (index, 0))
4869 || ! host_integerp (TYPE_SIZE (elttype), 1))
4874 index = ssize_int (1);
4877 index = fold_convert (ssizetype,
4878 fold (build2 (MINUS_EXPR,
4881 TYPE_MIN_VALUE (domain))));
4883 position = size_binop (MULT_EXPR, index,
4885 TYPE_SIZE_UNIT (elttype)));
4886 xtarget = offset_address (target,
4887 expand_expr (position, 0, VOIDmode, 0),
4888 highest_pow2_factor (position));
4889 xtarget = adjust_address (xtarget, mode, 0);
4890 store_expr (value, xtarget, 0);
4895 bitpos = ((tree_low_cst (index, 0) - minelt)
4896 * tree_low_cst (TYPE_SIZE (elttype), 1));
4898 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4900 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4901 && TREE_CODE (type) == ARRAY_TYPE
4902 && TYPE_NONALIASED_COMPONENT (type))
4904 target = copy_rtx (target);
4905 MEM_KEEP_ALIAS_SET_P (target) = 1;
4907 store_constructor_field (target, bitsize, bitpos, mode, value,
4908 type, cleared, get_alias_set (elttype));
4920 tree elttype = TREE_TYPE (type);
4921 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4922 enum machine_mode eltmode = TYPE_MODE (elttype);
4923 HOST_WIDE_INT bitsize;
4924 HOST_WIDE_INT bitpos;
4928 gcc_assert (eltmode != BLKmode);
4930 n_elts = TYPE_VECTOR_SUBPARTS (type);
4931 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4933 enum machine_mode mode = GET_MODE (target);
4935 icode = (int) vec_init_optab->handlers[mode].insn_code;
4936 if (icode != CODE_FOR_nothing)
4940 vector = alloca (n_elts);
4941 for (i = 0; i < n_elts; i++)
4942 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4946 /* If the constructor has fewer elements than the vector,
4947 clear the whole array first. Similarly if this is static
4948 constructor of a non-BLKmode object. */
4951 else if (REG_P (target) && TREE_STATIC (exp))
4955 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4957 for (elt = CONSTRUCTOR_ELTS (exp);
4959 elt = TREE_CHAIN (elt))
4961 int n_elts_here = tree_low_cst
4962 (int_const_binop (TRUNC_DIV_EXPR,
4963 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4964 TYPE_SIZE (elttype), 0), 1);
4966 count += n_elts_here;
4967 if (mostly_zeros_p (TREE_VALUE (elt)))
4968 zero_count += n_elts_here;
4971 /* Clear the entire vector first if there are any missing elements,
4972 or if the incidence of zero elements is >= 75%. */
4973 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4976 if (need_to_clear && size > 0 && !vector)
4979 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4981 clear_storage (target, GEN_INT (size));
4985 if (!cleared && REG_P (target))
4986 /* Inform later passes that the old value is dead. */
4987 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4989 /* Store each element of the constructor into the corresponding
4990 element of TARGET, determined by counting the elements. */
4991 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4993 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4995 tree value = TREE_VALUE (elt);
4996 tree index = TREE_PURPOSE (elt);
4997 HOST_WIDE_INT eltpos;
4999 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5000 if (cleared && initializer_zerop (value))
5004 eltpos = tree_low_cst (index, 1);
5010 /* Vector CONSTRUCTORs should only be built from smaller
5011 vectors in the case of BLKmode vectors. */
5012 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5013 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5017 enum machine_mode value_mode =
5018 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5019 ? TYPE_MODE (TREE_TYPE (value))
5021 bitpos = eltpos * elt_size;
5022 store_constructor_field (target, bitsize, bitpos,
5023 value_mode, value, type,
5024 cleared, get_alias_set (elttype));
5029 emit_insn (GEN_FCN (icode)
5031 gen_rtx_PARALLEL (GET_MODE (target),
5032 gen_rtvec_v (n_elts, vector))));
5041 /* Store the value of EXP (an expression tree)
5042 into a subfield of TARGET which has mode MODE and occupies
5043 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5044 If MODE is VOIDmode, it means that we are storing into a bit-field.
5046 Always return const0_rtx unless we have something particular to
5049 TYPE is the type of the underlying object,
5051 ALIAS_SET is the alias set for the destination. This value will
5052 (in general) be different from that for TARGET, since TARGET is a
5053 reference to the containing structure. */
5056 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5057 enum machine_mode mode, tree exp, tree type, int alias_set)
5059 HOST_WIDE_INT width_mask = 0;
5061 if (TREE_CODE (exp) == ERROR_MARK)
5064 /* If we have nothing to store, do nothing unless the expression has
5067 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5068 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5069 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5071 /* If we are storing into an unaligned field of an aligned union that is
5072 in a register, we may have the mode of TARGET being an integer mode but
5073 MODE == BLKmode. In that case, get an aligned object whose size and
5074 alignment are the same as TARGET and store TARGET into it (we can avoid
5075 the store if the field being stored is the entire width of TARGET). Then
5076 call ourselves recursively to store the field into a BLKmode version of
5077 that object. Finally, load from the object into TARGET. This is not
5078 very efficient in general, but should only be slightly more expensive
5079 than the otherwise-required unaligned accesses. Perhaps this can be
5080 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5081 twice, once with emit_move_insn and once via store_field. */
5084 && (REG_P (target) || GET_CODE (target) == SUBREG))
5086 rtx object = assign_temp (type, 0, 1, 1);
5087 rtx blk_object = adjust_address (object, BLKmode, 0);
5089 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5090 emit_move_insn (object, target);
5092 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5094 emit_move_insn (target, object);
5096 /* We want to return the BLKmode version of the data. */
5100 if (GET_CODE (target) == CONCAT)
5102 /* We're storing into a struct containing a single __complex. */
5104 gcc_assert (!bitpos);
5105 return store_expr (exp, target, 0);
5108 /* If the structure is in a register or if the component
5109 is a bit field, we cannot use addressing to access it.
5110 Use bit-field techniques or SUBREG to store in it. */
5112 if (mode == VOIDmode
5113 || (mode != BLKmode && ! direct_store[(int) mode]
5114 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5115 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5117 || GET_CODE (target) == SUBREG
5118 /* If the field isn't aligned enough to store as an ordinary memref,
5119 store it as a bit field. */
5121 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5122 || bitpos % GET_MODE_ALIGNMENT (mode))
5123 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5124 || (bitpos % BITS_PER_UNIT != 0)))
5125 /* If the RHS and field are a constant size and the size of the
5126 RHS isn't the same size as the bitfield, we must use bitfield
5129 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5130 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5132 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5134 /* If BITSIZE is narrower than the size of the type of EXP
5135 we will be narrowing TEMP. Normally, what's wanted are the
5136 low-order bits. However, if EXP's type is a record and this is
5137 big-endian machine, we want the upper BITSIZE bits. */
5138 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5139 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5140 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5141 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5142 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5146 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5148 if (mode != VOIDmode && mode != BLKmode
5149 && mode != TYPE_MODE (TREE_TYPE (exp)))
5150 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5152 /* If the modes of TARGET and TEMP are both BLKmode, both
5153 must be in memory and BITPOS must be aligned on a byte
5154 boundary. If so, we simply do a block copy. */
5155 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5157 gcc_assert (MEM_P (target) && MEM_P (temp)
5158 && !(bitpos % BITS_PER_UNIT));
5160 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5161 emit_block_move (target, temp,
5162 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5169 /* Store the value in the bitfield. */
5170 store_bit_field (target, bitsize, bitpos, mode, temp);
5176 /* Now build a reference to just the desired component. */
5177 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5179 if (to_rtx == target)
5180 to_rtx = copy_rtx (to_rtx);
5182 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5183 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5184 set_mem_alias_set (to_rtx, alias_set);
5186 return store_expr (exp, to_rtx, 0);
5190 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5191 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5192 codes and find the ultimate containing object, which we return.
5194 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5195 bit position, and *PUNSIGNEDP to the signedness of the field.
5196 If the position of the field is variable, we store a tree
5197 giving the variable offset (in units) in *POFFSET.
5198 This offset is in addition to the bit position.
5199 If the position is not variable, we store 0 in *POFFSET.
5201 If any of the extraction expressions is volatile,
5202 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5204 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5205 is a mode that can be used to access the field. In that case, *PBITSIZE
5208 If the field describes a variable-sized object, *PMODE is set to
5209 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5210 this case, but the address of the object can be found. */
5213 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5214 HOST_WIDE_INT *pbitpos, tree *poffset,
5215 enum machine_mode *pmode, int *punsignedp,
5219 enum machine_mode mode = VOIDmode;
5220 tree offset = size_zero_node;
5221 tree bit_offset = bitsize_zero_node;
5224 /* First get the mode, signedness, and size. We do this from just the
5225 outermost expression. */
5226 if (TREE_CODE (exp) == COMPONENT_REF)
5228 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5229 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5230 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5232 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5234 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5236 size_tree = TREE_OPERAND (exp, 1);
5237 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5241 mode = TYPE_MODE (TREE_TYPE (exp));
5242 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5244 if (mode == BLKmode)
5245 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5247 *pbitsize = GET_MODE_BITSIZE (mode);
5252 if (! host_integerp (size_tree, 1))
5253 mode = BLKmode, *pbitsize = -1;
5255 *pbitsize = tree_low_cst (size_tree, 1);
5258 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5259 and find the ultimate containing object. */
5262 switch (TREE_CODE (exp))
5265 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5266 TREE_OPERAND (exp, 2));
5271 tree field = TREE_OPERAND (exp, 1);
5272 tree this_offset = component_ref_field_offset (exp);
5274 /* If this field hasn't been filled in yet, don't go past it.
5275 This should only happen when folding expressions made during
5276 type construction. */
5277 if (this_offset == 0)
5280 offset = size_binop (PLUS_EXPR, offset, this_offset);
5281 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5282 DECL_FIELD_BIT_OFFSET (field));
5284 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5289 case ARRAY_RANGE_REF:
5291 tree index = TREE_OPERAND (exp, 1);
5292 tree low_bound = array_ref_low_bound (exp);
5293 tree unit_size = array_ref_element_size (exp);
5295 /* We assume all arrays have sizes that are a multiple of a byte.
5296 First subtract the lower bound, if any, in the type of the
5297 index, then convert to sizetype and multiply by the size of
5298 the array element. */
5299 if (! integer_zerop (low_bound))
5300 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5303 offset = size_binop (PLUS_EXPR, offset,
5304 size_binop (MULT_EXPR,
5305 convert (sizetype, index),
5314 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5315 bitsize_int (*pbitsize));
5318 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5319 conversions that don't change the mode, and all view conversions
5320 except those that need to "step up" the alignment. */
5322 case VIEW_CONVERT_EXPR:
5323 if ((TYPE_ALIGN (TREE_TYPE (exp))
5324 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5326 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5327 < BIGGEST_ALIGNMENT)
5328 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5329 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5337 /* If any reference in the chain is volatile, the effect is volatile. */
5338 if (TREE_THIS_VOLATILE (exp))
5341 exp = TREE_OPERAND (exp, 0);
5345 /* If OFFSET is constant, see if we can return the whole thing as a
5346 constant bit position. Otherwise, split it up. */
5347 if (host_integerp (offset, 0)
5348 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5350 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5351 && host_integerp (tem, 0))
5352 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5354 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5360 /* Return a tree of sizetype representing the size, in bytes, of the element
5361 of EXP, an ARRAY_REF. */
5364 array_ref_element_size (tree exp)
5366 tree aligned_size = TREE_OPERAND (exp, 3);
5367 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5369 /* If a size was specified in the ARRAY_REF, it's the size measured
5370 in alignment units of the element type. So multiply by that value. */
5373 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5374 sizetype from another type of the same width and signedness. */
5375 if (TREE_TYPE (aligned_size) != sizetype)
5376 aligned_size = fold_convert (sizetype, aligned_size);
5377 return size_binop (MULT_EXPR, aligned_size,
5378 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5381 /* Otherwise, take the size from that of the element type. Substitute
5382 any PLACEHOLDER_EXPR that we have. */
5384 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5387 /* Return a tree representing the lower bound of the array mentioned in
5388 EXP, an ARRAY_REF. */
5391 array_ref_low_bound (tree exp)
5393 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5395 /* If a lower bound is specified in EXP, use it. */
5396 if (TREE_OPERAND (exp, 2))
5397 return TREE_OPERAND (exp, 2);
5399 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5400 substituting for a PLACEHOLDER_EXPR as needed. */
5401 if (domain_type && TYPE_MIN_VALUE (domain_type))
5402 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5404 /* Otherwise, return a zero of the appropriate type. */
5405 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5408 /* Return a tree representing the upper bound of the array mentioned in
5409 EXP, an ARRAY_REF. */
5412 array_ref_up_bound (tree exp)
5414 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5416 /* If there is a domain type and it has an upper bound, use it, substituting
5417 for a PLACEHOLDER_EXPR as needed. */
5418 if (domain_type && TYPE_MAX_VALUE (domain_type))
5419 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5421 /* Otherwise fail. */
5425 /* Return a tree representing the offset, in bytes, of the field referenced
5426 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5429 component_ref_field_offset (tree exp)
5431 tree aligned_offset = TREE_OPERAND (exp, 2);
5432 tree field = TREE_OPERAND (exp, 1);
5434 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5435 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5439 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5440 sizetype from another type of the same width and signedness. */
5441 if (TREE_TYPE (aligned_offset) != sizetype)
5442 aligned_offset = fold_convert (sizetype, aligned_offset);
5443 return size_binop (MULT_EXPR, aligned_offset,
5444 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5447 /* Otherwise, take the offset from that of the field. Substitute
5448 any PLACEHOLDER_EXPR that we have. */
5450 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5453 /* Return 1 if T is an expression that get_inner_reference handles. */
5456 handled_component_p (tree t)
5458 switch (TREE_CODE (t))
5463 case ARRAY_RANGE_REF:
5464 case VIEW_CONVERT_EXPR:
5474 /* Given an rtx VALUE that may contain additions and multiplications, return
5475 an equivalent value that just refers to a register, memory, or constant.
5476 This is done by generating instructions to perform the arithmetic and
5477 returning a pseudo-register containing the value.
5479 The returned value may be a REG, SUBREG, MEM or constant. */
5482 force_operand (rtx value, rtx target)
5485 /* Use subtarget as the target for operand 0 of a binary operation. */
5486 rtx subtarget = get_subtarget (target);
5487 enum rtx_code code = GET_CODE (value);
5489 /* Check for subreg applied to an expression produced by loop optimizer. */
5491 && !REG_P (SUBREG_REG (value))
5492 && !MEM_P (SUBREG_REG (value)))
5494 value = simplify_gen_subreg (GET_MODE (value),
5495 force_reg (GET_MODE (SUBREG_REG (value)),
5496 force_operand (SUBREG_REG (value),
5498 GET_MODE (SUBREG_REG (value)),
5499 SUBREG_BYTE (value));
5500 code = GET_CODE (value);
5503 /* Check for a PIC address load. */
5504 if ((code == PLUS || code == MINUS)
5505 && XEXP (value, 0) == pic_offset_table_rtx
5506 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5507 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5508 || GET_CODE (XEXP (value, 1)) == CONST))
5511 subtarget = gen_reg_rtx (GET_MODE (value));
5512 emit_move_insn (subtarget, value);
5516 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5519 target = gen_reg_rtx (GET_MODE (value));
5520 convert_move (target, force_operand (XEXP (value, 0), NULL),
5521 code == ZERO_EXTEND);
5525 if (ARITHMETIC_P (value))
5527 op2 = XEXP (value, 1);
5528 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5530 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5533 op2 = negate_rtx (GET_MODE (value), op2);
5536 /* Check for an addition with OP2 a constant integer and our first
5537 operand a PLUS of a virtual register and something else. In that
5538 case, we want to emit the sum of the virtual register and the
5539 constant first and then add the other value. This allows virtual
5540 register instantiation to simply modify the constant rather than
5541 creating another one around this addition. */
5542 if (code == PLUS && GET_CODE (op2) == CONST_INT
5543 && GET_CODE (XEXP (value, 0)) == PLUS
5544 && REG_P (XEXP (XEXP (value, 0), 0))
5545 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5546 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5548 rtx temp = expand_simple_binop (GET_MODE (value), code,
5549 XEXP (XEXP (value, 0), 0), op2,
5550 subtarget, 0, OPTAB_LIB_WIDEN);
5551 return expand_simple_binop (GET_MODE (value), code, temp,
5552 force_operand (XEXP (XEXP (value,
5554 target, 0, OPTAB_LIB_WIDEN);
5557 op1 = force_operand (XEXP (value, 0), subtarget);
5558 op2 = force_operand (op2, NULL_RTX);
5562 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5564 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5565 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5566 target, 1, OPTAB_LIB_WIDEN);
5568 return expand_divmod (0,
5569 FLOAT_MODE_P (GET_MODE (value))
5570 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5571 GET_MODE (value), op1, op2, target, 0);
5574 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5578 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5582 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5586 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5587 target, 0, OPTAB_LIB_WIDEN);
5590 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5591 target, 1, OPTAB_LIB_WIDEN);
5594 if (UNARY_P (value))
5596 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5597 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5600 #ifdef INSN_SCHEDULING
5601 /* On machines that have insn scheduling, we want all memory reference to be
5602 explicit, so we need to deal with such paradoxical SUBREGs. */
5603 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5604 && (GET_MODE_SIZE (GET_MODE (value))
5605 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5607 = simplify_gen_subreg (GET_MODE (value),
5608 force_reg (GET_MODE (SUBREG_REG (value)),
5609 force_operand (SUBREG_REG (value),
5611 GET_MODE (SUBREG_REG (value)),
5612 SUBREG_BYTE (value));
5618 /* Subroutine of expand_expr: return nonzero iff there is no way that
5619 EXP can reference X, which is being modified. TOP_P is nonzero if this
5620 call is going to be used to determine whether we need a temporary
5621 for EXP, as opposed to a recursive call to this function.
5623 It is always safe for this routine to return zero since it merely
5624 searches for optimization opportunities. */
5627 safe_from_p (rtx x, tree exp, int top_p)
5633 /* If EXP has varying size, we MUST use a target since we currently
5634 have no way of allocating temporaries of variable size
5635 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5636 So we assume here that something at a higher level has prevented a
5637 clash. This is somewhat bogus, but the best we can do. Only
5638 do this when X is BLKmode and when we are at the top level. */
5639 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5640 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5641 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5642 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5643 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5645 && GET_MODE (x) == BLKmode)
5646 /* If X is in the outgoing argument area, it is always safe. */
5648 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5649 || (GET_CODE (XEXP (x, 0)) == PLUS
5650 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5653 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5654 find the underlying pseudo. */
5655 if (GET_CODE (x) == SUBREG)
5658 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5662 /* Now look at our tree code and possibly recurse. */
5663 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5665 case tcc_declaration:
5666 exp_rtl = DECL_RTL_IF_SET (exp);
5672 case tcc_exceptional:
5673 if (TREE_CODE (exp) == TREE_LIST)
5677 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5679 exp = TREE_CHAIN (exp);
5682 if (TREE_CODE (exp) != TREE_LIST)
5683 return safe_from_p (x, exp, 0);
5686 else if (TREE_CODE (exp) == ERROR_MARK)
5687 return 1; /* An already-visited SAVE_EXPR? */
5692 /* The only case we look at here is the DECL_INITIAL inside a
5694 return (TREE_CODE (exp) != DECL_EXPR
5695 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5696 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5697 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5700 case tcc_comparison:
5701 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5706 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5708 case tcc_expression:
5710 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5711 the expression. If it is set, we conflict iff we are that rtx or
5712 both are in memory. Otherwise, we check all operands of the
5713 expression recursively. */
5715 switch (TREE_CODE (exp))
5718 /* If the operand is static or we are static, we can't conflict.
5719 Likewise if we don't conflict with the operand at all. */
5720 if (staticp (TREE_OPERAND (exp, 0))
5721 || TREE_STATIC (exp)
5722 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5725 /* Otherwise, the only way this can conflict is if we are taking
5726 the address of a DECL a that address if part of X, which is
5728 exp = TREE_OPERAND (exp, 0);
5731 if (!DECL_RTL_SET_P (exp)
5732 || !MEM_P (DECL_RTL (exp)))
5735 exp_rtl = XEXP (DECL_RTL (exp), 0);
5739 case MISALIGNED_INDIRECT_REF:
5740 case ALIGN_INDIRECT_REF:
5743 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5744 get_alias_set (exp)))
5749 /* Assume that the call will clobber all hard registers and
5751 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5756 case WITH_CLEANUP_EXPR:
5757 case CLEANUP_POINT_EXPR:
5758 /* Lowered by gimplify.c. */
5762 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5768 /* If we have an rtx, we do not need to scan our operands. */
5772 nops = TREE_CODE_LENGTH (TREE_CODE (exp));
5773 for (i = 0; i < nops; i++)
5774 if (TREE_OPERAND (exp, i) != 0
5775 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5778 /* If this is a language-specific tree code, it may require
5779 special handling. */
5780 if ((unsigned int) TREE_CODE (exp)
5781 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5782 && !lang_hooks.safe_from_p (x, exp))
5787 /* Should never get a type here. */
5791 /* If we have an rtl, find any enclosed object. Then see if we conflict
5795 if (GET_CODE (exp_rtl) == SUBREG)
5797 exp_rtl = SUBREG_REG (exp_rtl);
5799 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5803 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5804 are memory and they conflict. */
5805 return ! (rtx_equal_p (x, exp_rtl)
5806 || (MEM_P (x) && MEM_P (exp_rtl)
5807 && true_dependence (exp_rtl, VOIDmode, x,
5808 rtx_addr_varies_p)));
5811 /* If we reach here, it is safe. */
5816 /* Return the highest power of two that EXP is known to be a multiple of.
5817 This is used in updating alignment of MEMs in array references. */
5819 static unsigned HOST_WIDE_INT
5820 highest_pow2_factor (tree exp)
5822 unsigned HOST_WIDE_INT c0, c1;
5824 switch (TREE_CODE (exp))
5827 /* We can find the lowest bit that's a one. If the low
5828 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5829 We need to handle this case since we can find it in a COND_EXPR,
5830 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5831 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5833 if (TREE_CONSTANT_OVERFLOW (exp))
5834 return BIGGEST_ALIGNMENT;
5837 /* Note: tree_low_cst is intentionally not used here,
5838 we don't care about the upper bits. */
5839 c0 = TREE_INT_CST_LOW (exp);
5841 return c0 ? c0 : BIGGEST_ALIGNMENT;
5845 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5846 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5847 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5848 return MIN (c0, c1);
5851 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5852 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5855 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5857 if (integer_pow2p (TREE_OPERAND (exp, 1))
5858 && host_integerp (TREE_OPERAND (exp, 1), 1))
5860 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5861 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5862 return MAX (1, c0 / c1);
5866 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5868 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5871 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5874 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5875 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5876 return MIN (c0, c1);
5885 /* Similar, except that the alignment requirements of TARGET are
5886 taken into account. Assume it is at least as aligned as its
5887 type, unless it is a COMPONENT_REF in which case the layout of
5888 the structure gives the alignment. */
5890 static unsigned HOST_WIDE_INT
5891 highest_pow2_factor_for_target (tree target, tree exp)
5893 unsigned HOST_WIDE_INT target_align, factor;
5895 factor = highest_pow2_factor (exp);
5896 if (TREE_CODE (target) == COMPONENT_REF)
5897 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5899 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5900 return MAX (factor, target_align);
5903 /* Expands variable VAR. */
5906 expand_var (tree var)
5908 if (DECL_EXTERNAL (var))
5911 if (TREE_STATIC (var))
5912 /* If this is an inlined copy of a static local variable,
5913 look up the original decl. */
5914 var = DECL_ORIGIN (var);
5916 if (TREE_STATIC (var)
5917 ? !TREE_ASM_WRITTEN (var)
5918 : !DECL_RTL_SET_P (var))
5920 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5921 /* Should be ignored. */;
5922 else if (lang_hooks.expand_decl (var))
5924 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5926 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5927 rest_of_decl_compilation (var, 0, 0);
5929 /* No expansion needed. */
5930 gcc_assert (TREE_CODE (var) == TYPE_DECL
5931 || TREE_CODE (var) == CONST_DECL
5932 || TREE_CODE (var) == FUNCTION_DECL
5933 || TREE_CODE (var) == LABEL_DECL);
5937 /* Subroutine of expand_expr. Expand the two operands of a binary
5938 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5939 The value may be stored in TARGET if TARGET is nonzero. The
5940 MODIFIER argument is as documented by expand_expr. */
5943 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5944 enum expand_modifier modifier)
5946 if (! safe_from_p (target, exp1, 1))
5948 if (operand_equal_p (exp0, exp1, 0))
5950 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5951 *op1 = copy_rtx (*op0);
5955 /* If we need to preserve evaluation order, copy exp0 into its own
5956 temporary variable so that it can't be clobbered by exp1. */
5957 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5958 exp0 = save_expr (exp0);
5959 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5960 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5965 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5966 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5969 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5970 enum expand_modifier modifier)
5972 rtx result, subtarget;
5974 HOST_WIDE_INT bitsize, bitpos;
5975 int volatilep, unsignedp;
5976 enum machine_mode mode1;
5978 /* If we are taking the address of a constant and are at the top level,
5979 we have to use output_constant_def since we can't call force_const_mem
5981 /* ??? This should be considered a front-end bug. We should not be
5982 generating ADDR_EXPR of something that isn't an LVALUE. The only
5983 exception here is STRING_CST. */
5984 if (TREE_CODE (exp) == CONSTRUCTOR
5985 || CONSTANT_CLASS_P (exp))
5986 return XEXP (output_constant_def (exp, 0), 0);
5988 /* Everything must be something allowed by is_gimple_addressable. */
5989 switch (TREE_CODE (exp))
5992 /* This case will happen via recursion for &a->b. */
5993 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5996 /* Recurse and make the output_constant_def clause above handle this. */
5997 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6001 /* The real part of the complex number is always first, therefore
6002 the address is the same as the address of the parent object. */
6005 inner = TREE_OPERAND (exp, 0);
6009 /* The imaginary part of the complex number is always second.
6010 The expression is therefore always offset by the size of the
6013 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6014 inner = TREE_OPERAND (exp, 0);
6018 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6019 expand_expr, as that can have various side effects; LABEL_DECLs for
6020 example, may not have their DECL_RTL set yet. Assume language
6021 specific tree nodes can be expanded in some interesting way. */
6023 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6025 result = expand_expr (exp, target, tmode,
6026 modifier == EXPAND_INITIALIZER
6027 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6029 /* If the DECL isn't in memory, then the DECL wasn't properly
6030 marked TREE_ADDRESSABLE, which will be either a front-end
6031 or a tree optimizer bug. */
6032 gcc_assert (GET_CODE (result) == MEM);
6033 result = XEXP (result, 0);
6035 /* ??? Is this needed anymore? */
6036 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6038 assemble_external (exp);
6039 TREE_USED (exp) = 1;
6042 if (modifier != EXPAND_INITIALIZER
6043 && modifier != EXPAND_CONST_ADDRESS)
6044 result = force_operand (result, target);
6048 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6049 &mode1, &unsignedp, &volatilep);
6053 /* We must have made progress. */
6054 gcc_assert (inner != exp);
6056 subtarget = offset || bitpos ? NULL_RTX : target;
6057 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6063 if (modifier != EXPAND_NORMAL)
6064 result = force_operand (result, NULL);
6065 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6067 result = convert_memory_address (tmode, result);
6068 tmp = convert_memory_address (tmode, tmp);
6070 if (modifier == EXPAND_SUM)
6071 result = gen_rtx_PLUS (tmode, result, tmp);
6074 subtarget = bitpos ? NULL_RTX : target;
6075 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6076 1, OPTAB_LIB_WIDEN);
6082 /* Someone beforehand should have rejected taking the address
6083 of such an object. */
6084 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6086 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6087 if (modifier < EXPAND_SUM)
6088 result = force_operand (result, target);
6094 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6095 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6098 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6099 enum expand_modifier modifier)
6101 enum machine_mode rmode;
6104 /* Target mode of VOIDmode says "whatever's natural". */
6105 if (tmode == VOIDmode)
6106 tmode = TYPE_MODE (TREE_TYPE (exp));
6108 /* We can get called with some Weird Things if the user does silliness
6109 like "(short) &a". In that case, convert_memory_address won't do
6110 the right thing, so ignore the given target mode. */
6111 if (tmode != Pmode && tmode != ptr_mode)
6114 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6117 /* Despite expand_expr claims concerning ignoring TMODE when not
6118 strictly convenient, stuff breaks if we don't honor it. Note
6119 that combined with the above, we only do this for pointer modes. */
6120 rmode = GET_MODE (result);
6121 if (rmode == VOIDmode)
6124 result = convert_memory_address (tmode, result);
6130 /* expand_expr: generate code for computing expression EXP.
6131 An rtx for the computed value is returned. The value is never null.
6132 In the case of a void EXP, const0_rtx is returned.
6134 The value may be stored in TARGET if TARGET is nonzero.
6135 TARGET is just a suggestion; callers must assume that
6136 the rtx returned may not be the same as TARGET.
6138 If TARGET is CONST0_RTX, it means that the value will be ignored.
6140 If TMODE is not VOIDmode, it suggests generating the
6141 result in mode TMODE. But this is done only when convenient.
6142 Otherwise, TMODE is ignored and the value generated in its natural mode.
6143 TMODE is just a suggestion; callers must assume that
6144 the rtx returned may not have mode TMODE.
6146 Note that TARGET may have neither TMODE nor MODE. In that case, it
6147 probably will not be used.
6149 If MODIFIER is EXPAND_SUM then when EXP is an addition
6150 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6151 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6152 products as above, or REG or MEM, or constant.
6153 Ordinarily in such cases we would output mul or add instructions
6154 and then return a pseudo reg containing the sum.
6156 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6157 it also marks a label as absolutely required (it can't be dead).
6158 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6159 This is used for outputting expressions used in initializers.
6161 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6162 with a constant address even if that address is not normally legitimate.
6163 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6165 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6166 a call parameter. Such targets require special care as we haven't yet
6167 marked TARGET so that it's safe from being trashed by libcalls. We
6168 don't want to use TARGET for anything but the final result;
6169 Intermediate values must go elsewhere. Additionally, calls to
6170 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6172 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6173 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6174 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6175 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6178 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6179 enum expand_modifier, rtx *);
6182 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6183 enum expand_modifier modifier, rtx *alt_rtl)
6186 rtx ret, last = NULL;
6188 /* Handle ERROR_MARK before anybody tries to access its type. */
6189 if (TREE_CODE (exp) == ERROR_MARK
6190 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6192 ret = CONST0_RTX (tmode);
6193 return ret ? ret : const0_rtx;
6196 if (flag_non_call_exceptions)
6198 rn = lookup_stmt_eh_region (exp);
6199 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6201 last = get_last_insn ();
6204 /* If this is an expression of some kind and it has an associated line
6205 number, then emit the line number before expanding the expression.
6207 We need to save and restore the file and line information so that
6208 errors discovered during expansion are emitted with the right
6209 information. It would be better of the diagnostic routines
6210 used the file/line information embedded in the tree nodes rather
6212 if (cfun && EXPR_HAS_LOCATION (exp))
6214 location_t saved_location = input_location;
6215 input_location = EXPR_LOCATION (exp);
6216 emit_line_note (input_location);
6218 /* Record where the insns produced belong. */
6219 record_block_change (TREE_BLOCK (exp));
6221 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6223 input_location = saved_location;
6227 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6230 /* If using non-call exceptions, mark all insns that may trap.
6231 expand_call() will mark CALL_INSNs before we get to this code,
6232 but it doesn't handle libcalls, and these may trap. */
6236 for (insn = next_real_insn (last); insn;
6237 insn = next_real_insn (insn))
6239 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6240 /* If we want exceptions for non-call insns, any
6241 may_trap_p instruction may throw. */
6242 && GET_CODE (PATTERN (insn)) != CLOBBER
6243 && GET_CODE (PATTERN (insn)) != USE
6244 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6246 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6256 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6257 enum expand_modifier modifier, rtx *alt_rtl)
6260 tree type = TREE_TYPE (exp);
6262 enum machine_mode mode;
6263 enum tree_code code = TREE_CODE (exp);
6265 rtx subtarget, original_target;
6268 bool reduce_bit_field = false;
6269 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6270 ? reduce_to_bit_field_precision ((expr), \
6275 mode = TYPE_MODE (type);
6276 unsignedp = TYPE_UNSIGNED (type);
6277 if (lang_hooks.reduce_bit_field_operations
6278 && TREE_CODE (type) == INTEGER_TYPE
6279 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6281 /* An operation in what may be a bit-field type needs the
6282 result to be reduced to the precision of the bit-field type,
6283 which is narrower than that of the type's mode. */
6284 reduce_bit_field = true;
6285 if (modifier == EXPAND_STACK_PARM)
6289 /* Use subtarget as the target for operand 0 of a binary operation. */
6290 subtarget = get_subtarget (target);
6291 original_target = target;
6292 ignore = (target == const0_rtx
6293 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6294 || code == CONVERT_EXPR || code == COND_EXPR
6295 || code == VIEW_CONVERT_EXPR)
6296 && TREE_CODE (type) == VOID_TYPE));
6298 /* If we are going to ignore this result, we need only do something
6299 if there is a side-effect somewhere in the expression. If there
6300 is, short-circuit the most common cases here. Note that we must
6301 not call expand_expr with anything but const0_rtx in case this
6302 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6306 if (! TREE_SIDE_EFFECTS (exp))
6309 /* Ensure we reference a volatile object even if value is ignored, but
6310 don't do this if all we are doing is taking its address. */
6311 if (TREE_THIS_VOLATILE (exp)
6312 && TREE_CODE (exp) != FUNCTION_DECL
6313 && mode != VOIDmode && mode != BLKmode
6314 && modifier != EXPAND_CONST_ADDRESS)
6316 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6318 temp = copy_to_reg (temp);
6322 if (TREE_CODE_CLASS (code) == tcc_unary
6323 || code == COMPONENT_REF || code == INDIRECT_REF)
6324 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6327 else if (TREE_CODE_CLASS (code) == tcc_binary
6328 || TREE_CODE_CLASS (code) == tcc_comparison
6329 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6331 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6332 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6335 else if (code == BIT_FIELD_REF)
6337 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6338 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6339 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6346 /* If will do cse, generate all results into pseudo registers
6347 since 1) that allows cse to find more things
6348 and 2) otherwise cse could produce an insn the machine
6349 cannot support. An exception is a CONSTRUCTOR into a multi-word
6350 MEM: that's much more likely to be most efficient into the MEM.
6351 Another is a CALL_EXPR which must return in memory. */
6353 if (! cse_not_expected && mode != BLKmode && target
6354 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6355 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6356 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6363 tree function = decl_function_context (exp);
6365 temp = label_rtx (exp);
6366 temp = gen_rtx_LABEL_REF (Pmode, temp);
6368 if (function != current_function_decl
6370 LABEL_REF_NONLOCAL_P (temp) = 1;
6372 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6377 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6382 /* If a static var's type was incomplete when the decl was written,
6383 but the type is complete now, lay out the decl now. */
6384 if (DECL_SIZE (exp) == 0
6385 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6386 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6387 layout_decl (exp, 0);
6389 /* ... fall through ... */
6393 gcc_assert (DECL_RTL (exp));
6395 /* Ensure variable marked as used even if it doesn't go through
6396 a parser. If it hasn't be used yet, write out an external
6398 if (! TREE_USED (exp))
6400 assemble_external (exp);
6401 TREE_USED (exp) = 1;
6404 /* Show we haven't gotten RTL for this yet. */
6407 /* Variables inherited from containing functions should have
6408 been lowered by this point. */
6409 context = decl_function_context (exp);
6410 gcc_assert (!context
6411 || context == current_function_decl
6412 || TREE_STATIC (exp)
6413 /* ??? C++ creates functions that are not TREE_STATIC. */
6414 || TREE_CODE (exp) == FUNCTION_DECL);
6416 /* This is the case of an array whose size is to be determined
6417 from its initializer, while the initializer is still being parsed.
6420 if (MEM_P (DECL_RTL (exp))
6421 && REG_P (XEXP (DECL_RTL (exp), 0)))
6422 temp = validize_mem (DECL_RTL (exp));
6424 /* If DECL_RTL is memory, we are in the normal case and either
6425 the address is not valid or it is not a register and -fforce-addr
6426 is specified, get the address into a register. */
6428 else if (MEM_P (DECL_RTL (exp))
6429 && modifier != EXPAND_CONST_ADDRESS
6430 && modifier != EXPAND_SUM
6431 && modifier != EXPAND_INITIALIZER
6432 && (! memory_address_p (DECL_MODE (exp),
6433 XEXP (DECL_RTL (exp), 0))
6435 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6438 *alt_rtl = DECL_RTL (exp);
6439 temp = replace_equiv_address (DECL_RTL (exp),
6440 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6443 /* If we got something, return it. But first, set the alignment
6444 if the address is a register. */
6447 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6448 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6453 /* If the mode of DECL_RTL does not match that of the decl, it
6454 must be a promoted value. We return a SUBREG of the wanted mode,
6455 but mark it so that we know that it was already extended. */
6457 if (REG_P (DECL_RTL (exp))
6458 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6460 enum machine_mode pmode;
6462 /* Get the signedness used for this variable. Ensure we get the
6463 same mode we got when the variable was declared. */
6464 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6465 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6466 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6468 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6469 SUBREG_PROMOTED_VAR_P (temp) = 1;
6470 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6474 return DECL_RTL (exp);
6477 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6478 TREE_INT_CST_HIGH (exp), mode);
6480 /* ??? If overflow is set, fold will have done an incomplete job,
6481 which can result in (plus xx (const_int 0)), which can get
6482 simplified by validate_replace_rtx during virtual register
6483 instantiation, which can result in unrecognizable insns.
6484 Avoid this by forcing all overflows into registers. */
6485 if (TREE_CONSTANT_OVERFLOW (exp)
6486 && modifier != EXPAND_INITIALIZER)
6487 temp = force_reg (mode, temp);
6492 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6493 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6494 return const_vector_from_tree (exp);
6496 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6497 TREE_VECTOR_CST_ELTS (exp)),
6498 ignore ? const0_rtx : target, tmode, modifier);
6501 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6504 /* If optimized, generate immediate CONST_DOUBLE
6505 which will be turned into memory by reload if necessary.
6507 We used to force a register so that loop.c could see it. But
6508 this does not allow gen_* patterns to perform optimizations with
6509 the constants. It also produces two insns in cases like "x = 1.0;".
6510 On most machines, floating-point constants are not permitted in
6511 many insns, so we'd end up copying it to a register in any case.
6513 Now, we do the copying in expand_binop, if appropriate. */
6514 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6515 TYPE_MODE (TREE_TYPE (exp)));
6518 /* Handle evaluating a complex constant in a CONCAT target. */
6519 if (original_target && GET_CODE (original_target) == CONCAT)
6521 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6524 rtarg = XEXP (original_target, 0);
6525 itarg = XEXP (original_target, 1);
6527 /* Move the real and imaginary parts separately. */
6528 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6529 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6532 emit_move_insn (rtarg, op0);
6534 emit_move_insn (itarg, op1);
6536 return original_target;
6539 /* ... fall through ... */
6542 temp = output_constant_def (exp, 1);
6544 /* temp contains a constant address.
6545 On RISC machines where a constant address isn't valid,
6546 make some insns to get that address into a register. */
6547 if (modifier != EXPAND_CONST_ADDRESS
6548 && modifier != EXPAND_INITIALIZER
6549 && modifier != EXPAND_SUM
6550 && (! memory_address_p (mode, XEXP (temp, 0))
6551 || flag_force_addr))
6552 return replace_equiv_address (temp,
6553 copy_rtx (XEXP (temp, 0)));
6558 tree val = TREE_OPERAND (exp, 0);
6559 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6561 if (!SAVE_EXPR_RESOLVED_P (exp))
6563 /* We can indeed still hit this case, typically via builtin
6564 expanders calling save_expr immediately before expanding
6565 something. Assume this means that we only have to deal
6566 with non-BLKmode values. */
6567 gcc_assert (GET_MODE (ret) != BLKmode);
6569 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6570 DECL_ARTIFICIAL (val) = 1;
6571 DECL_IGNORED_P (val) = 1;
6572 TREE_OPERAND (exp, 0) = val;
6573 SAVE_EXPR_RESOLVED_P (exp) = 1;
6575 if (!CONSTANT_P (ret))
6576 ret = copy_to_reg (ret);
6577 SET_DECL_RTL (val, ret);
6584 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6585 expand_goto (TREE_OPERAND (exp, 0));
6587 expand_computed_goto (TREE_OPERAND (exp, 0));
6591 /* If we don't need the result, just ensure we evaluate any
6597 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6598 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6603 /* All elts simple constants => refer to a constant in memory. But
6604 if this is a non-BLKmode mode, let it store a field at a time
6605 since that should make a CONST_INT or CONST_DOUBLE when we
6606 fold. Likewise, if we have a target we can use, it is best to
6607 store directly into the target unless the type is large enough
6608 that memcpy will be used. If we are making an initializer and
6609 all operands are constant, put it in memory as well.
6611 FIXME: Avoid trying to fill vector constructors piece-meal.
6612 Output them with output_constant_def below unless we're sure
6613 they're zeros. This should go away when vector initializers
6614 are treated like VECTOR_CST instead of arrays.
6616 else if ((TREE_STATIC (exp)
6617 && ((mode == BLKmode
6618 && ! (target != 0 && safe_from_p (target, exp, 1)))
6619 || TREE_ADDRESSABLE (exp)
6620 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6621 && (! MOVE_BY_PIECES_P
6622 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6624 && ! mostly_zeros_p (exp))))
6625 || ((modifier == EXPAND_INITIALIZER
6626 || modifier == EXPAND_CONST_ADDRESS)
6627 && TREE_CONSTANT (exp)))
6629 rtx constructor = output_constant_def (exp, 1);
6631 if (modifier != EXPAND_CONST_ADDRESS
6632 && modifier != EXPAND_INITIALIZER
6633 && modifier != EXPAND_SUM)
6634 constructor = validize_mem (constructor);
6640 /* Handle calls that pass values in multiple non-contiguous
6641 locations. The Irix 6 ABI has examples of this. */
6642 if (target == 0 || ! safe_from_p (target, exp, 1)
6643 || GET_CODE (target) == PARALLEL
6644 || modifier == EXPAND_STACK_PARM)
6646 = assign_temp (build_qualified_type (type,
6648 | (TREE_READONLY (exp)
6649 * TYPE_QUAL_CONST))),
6650 0, TREE_ADDRESSABLE (exp), 1);
6652 store_constructor (exp, target, 0, int_expr_size (exp));
6656 case MISALIGNED_INDIRECT_REF:
6657 case ALIGN_INDIRECT_REF:
6660 tree exp1 = TREE_OPERAND (exp, 0);
6663 if (code == MISALIGNED_INDIRECT_REF
6664 && !targetm.vectorize.misaligned_mem_ok (mode))
6667 if (modifier != EXPAND_WRITE)
6671 t = fold_read_from_constant_string (exp);
6673 return expand_expr (t, target, tmode, modifier);
6676 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6677 op0 = memory_address (mode, op0);
6679 if (code == ALIGN_INDIRECT_REF)
6681 int align = TYPE_ALIGN_UNIT (type);
6682 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6683 op0 = memory_address (mode, op0);
6686 temp = gen_rtx_MEM (mode, op0);
6688 orig = REF_ORIGINAL (exp);
6691 set_mem_attributes (temp, orig, 0);
6699 tree array = TREE_OPERAND (exp, 0);
6700 tree index = TREE_OPERAND (exp, 1);
6702 /* Fold an expression like: "foo"[2].
6703 This is not done in fold so it won't happen inside &.
6704 Don't fold if this is for wide characters since it's too
6705 difficult to do correctly and this is a very rare case. */
6707 if (modifier != EXPAND_CONST_ADDRESS
6708 && modifier != EXPAND_INITIALIZER
6709 && modifier != EXPAND_MEMORY)
6711 tree t = fold_read_from_constant_string (exp);
6714 return expand_expr (t, target, tmode, modifier);
6717 /* If this is a constant index into a constant array,
6718 just get the value from the array. Handle both the cases when
6719 we have an explicit constructor and when our operand is a variable
6720 that was declared const. */
6722 if (modifier != EXPAND_CONST_ADDRESS
6723 && modifier != EXPAND_INITIALIZER
6724 && modifier != EXPAND_MEMORY
6725 && TREE_CODE (array) == CONSTRUCTOR
6726 && ! TREE_SIDE_EFFECTS (array)
6727 && TREE_CODE (index) == INTEGER_CST)
6731 for (elem = CONSTRUCTOR_ELTS (array);
6732 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6733 elem = TREE_CHAIN (elem))
6736 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6737 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6741 else if (optimize >= 1
6742 && modifier != EXPAND_CONST_ADDRESS
6743 && modifier != EXPAND_INITIALIZER
6744 && modifier != EXPAND_MEMORY
6745 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6746 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6747 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6748 && targetm.binds_local_p (array))
6750 if (TREE_CODE (index) == INTEGER_CST)
6752 tree init = DECL_INITIAL (array);
6754 if (TREE_CODE (init) == CONSTRUCTOR)
6758 for (elem = CONSTRUCTOR_ELTS (init);
6760 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6761 elem = TREE_CHAIN (elem))
6764 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6765 return expand_expr (fold (TREE_VALUE (elem)), target,
6768 else if (TREE_CODE (init) == STRING_CST
6769 && 0 > compare_tree_int (index,
6770 TREE_STRING_LENGTH (init)))
6772 tree type = TREE_TYPE (TREE_TYPE (init));
6773 enum machine_mode mode = TYPE_MODE (type);
6775 if (GET_MODE_CLASS (mode) == MODE_INT
6776 && GET_MODE_SIZE (mode) == 1)
6777 return gen_int_mode (TREE_STRING_POINTER (init)
6778 [TREE_INT_CST_LOW (index)], mode);
6783 goto normal_inner_ref;
6786 /* If the operand is a CONSTRUCTOR, we can just extract the
6787 appropriate field if it is present. */
6788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6792 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6793 elt = TREE_CHAIN (elt))
6794 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6795 /* We can normally use the value of the field in the
6796 CONSTRUCTOR. However, if this is a bitfield in
6797 an integral mode that we can fit in a HOST_WIDE_INT,
6798 we must mask only the number of bits in the bitfield,
6799 since this is done implicitly by the constructor. If
6800 the bitfield does not meet either of those conditions,
6801 we can't do this optimization. */
6802 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6803 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6805 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6806 <= HOST_BITS_PER_WIDE_INT))))
6808 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6809 && modifier == EXPAND_STACK_PARM)
6811 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6812 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6814 HOST_WIDE_INT bitsize
6815 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6816 enum machine_mode imode
6817 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6819 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6821 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6822 op0 = expand_and (imode, op0, op1, target);
6827 = build_int_cst (NULL_TREE,
6828 GET_MODE_BITSIZE (imode) - bitsize);
6830 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6832 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6840 goto normal_inner_ref;
6843 case ARRAY_RANGE_REF:
6846 enum machine_mode mode1;
6847 HOST_WIDE_INT bitsize, bitpos;
6850 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6851 &mode1, &unsignedp, &volatilep);
6854 /* If we got back the original object, something is wrong. Perhaps
6855 we are evaluating an expression too early. In any event, don't
6856 infinitely recurse. */
6857 gcc_assert (tem != exp);
6859 /* If TEM's type is a union of variable size, pass TARGET to the inner
6860 computation, since it will need a temporary and TARGET is known
6861 to have to do. This occurs in unchecked conversion in Ada. */
6865 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6866 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6868 && modifier != EXPAND_STACK_PARM
6869 ? target : NULL_RTX),
6871 (modifier == EXPAND_INITIALIZER
6872 || modifier == EXPAND_CONST_ADDRESS
6873 || modifier == EXPAND_STACK_PARM)
6874 ? modifier : EXPAND_NORMAL);
6876 /* If this is a constant, put it into a register if it is a
6877 legitimate constant and OFFSET is 0 and memory if it isn't. */
6878 if (CONSTANT_P (op0))
6880 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6881 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6883 op0 = force_reg (mode, op0);
6885 op0 = validize_mem (force_const_mem (mode, op0));
6888 /* Otherwise, if this object not in memory and we either have an
6889 offset or a BLKmode result, put it there. This case can't occur in
6890 C, but can in Ada if we have unchecked conversion of an expression
6891 from a scalar type to an array or record type or for an
6892 ARRAY_RANGE_REF whose type is BLKmode. */
6893 else if (!MEM_P (op0)
6895 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6897 tree nt = build_qualified_type (TREE_TYPE (tem),
6898 (TYPE_QUALS (TREE_TYPE (tem))
6899 | TYPE_QUAL_CONST));
6900 rtx memloc = assign_temp (nt, 1, 1, 1);
6902 emit_move_insn (memloc, op0);
6908 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6911 gcc_assert (MEM_P (op0));
6913 #ifdef POINTERS_EXTEND_UNSIGNED
6914 if (GET_MODE (offset_rtx) != Pmode)
6915 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6917 if (GET_MODE (offset_rtx) != ptr_mode)
6918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6921 if (GET_MODE (op0) == BLKmode
6922 /* A constant address in OP0 can have VOIDmode, we must
6923 not try to call force_reg in that case. */
6924 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6926 && (bitpos % bitsize) == 0
6927 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6928 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6930 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6934 op0 = offset_address (op0, offset_rtx,
6935 highest_pow2_factor (offset));
6938 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6939 record its alignment as BIGGEST_ALIGNMENT. */
6940 if (MEM_P (op0) && bitpos == 0 && offset != 0
6941 && is_aligning_offset (offset, tem))
6942 set_mem_align (op0, BIGGEST_ALIGNMENT);
6944 /* Don't forget about volatility even if this is a bitfield. */
6945 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6947 if (op0 == orig_op0)
6948 op0 = copy_rtx (op0);
6950 MEM_VOLATILE_P (op0) = 1;
6953 /* The following code doesn't handle CONCAT.
6954 Assume only bitpos == 0 can be used for CONCAT, due to
6955 one element arrays having the same mode as its element. */
6956 if (GET_CODE (op0) == CONCAT)
6958 gcc_assert (bitpos == 0
6959 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6963 /* In cases where an aligned union has an unaligned object
6964 as a field, we might be extracting a BLKmode value from
6965 an integer-mode (e.g., SImode) object. Handle this case
6966 by doing the extract into an object as wide as the field
6967 (which we know to be the width of a basic mode), then
6968 storing into memory, and changing the mode to BLKmode. */
6969 if (mode1 == VOIDmode
6970 || REG_P (op0) || GET_CODE (op0) == SUBREG
6971 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6972 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6973 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6974 && modifier != EXPAND_CONST_ADDRESS
6975 && modifier != EXPAND_INITIALIZER)
6976 /* If the field isn't aligned enough to fetch as a memref,
6977 fetch it as a bit field. */
6978 || (mode1 != BLKmode
6979 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6980 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6982 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6983 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6984 && ((modifier == EXPAND_CONST_ADDRESS
6985 || modifier == EXPAND_INITIALIZER)
6987 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6988 || (bitpos % BITS_PER_UNIT != 0)))
6989 /* If the type and the field are a constant size and the
6990 size of the type isn't the same size as the bitfield,
6991 we must use bitfield operations. */
6993 && TYPE_SIZE (TREE_TYPE (exp))
6994 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6995 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6998 enum machine_mode ext_mode = mode;
7000 if (ext_mode == BLKmode
7001 && ! (target != 0 && MEM_P (op0)
7003 && bitpos % BITS_PER_UNIT == 0))
7004 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7006 if (ext_mode == BLKmode)
7009 target = assign_temp (type, 0, 1, 1);
7014 /* In this case, BITPOS must start at a byte boundary and
7015 TARGET, if specified, must be a MEM. */
7016 gcc_assert (MEM_P (op0)
7017 && (!target || MEM_P (target))
7018 && !(bitpos % BITS_PER_UNIT));
7020 emit_block_move (target,
7021 adjust_address (op0, VOIDmode,
7022 bitpos / BITS_PER_UNIT),
7023 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7025 (modifier == EXPAND_STACK_PARM
7026 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7031 op0 = validize_mem (op0);
7033 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7034 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7036 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7037 (modifier == EXPAND_STACK_PARM
7038 ? NULL_RTX : target),
7039 ext_mode, ext_mode);
7041 /* If the result is a record type and BITSIZE is narrower than
7042 the mode of OP0, an integral mode, and this is a big endian
7043 machine, we must put the field into the high-order bits. */
7044 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7045 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7046 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7047 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7048 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7052 /* If the result type is BLKmode, store the data into a temporary
7053 of the appropriate type, but with the mode corresponding to the
7054 mode for the data we have (op0's mode). It's tempting to make
7055 this a constant type, since we know it's only being stored once,
7056 but that can cause problems if we are taking the address of this
7057 COMPONENT_REF because the MEM of any reference via that address
7058 will have flags corresponding to the type, which will not
7059 necessarily be constant. */
7060 if (mode == BLKmode)
7063 = assign_stack_temp_for_type
7064 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7066 emit_move_insn (new, op0);
7067 op0 = copy_rtx (new);
7068 PUT_MODE (op0, BLKmode);
7069 set_mem_attributes (op0, exp, 1);
7075 /* If the result is BLKmode, use that to access the object
7077 if (mode == BLKmode)
7080 /* Get a reference to just this component. */
7081 if (modifier == EXPAND_CONST_ADDRESS
7082 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7083 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7085 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7087 if (op0 == orig_op0)
7088 op0 = copy_rtx (op0);
7090 set_mem_attributes (op0, exp, 0);
7091 if (REG_P (XEXP (op0, 0)))
7092 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7094 MEM_VOLATILE_P (op0) |= volatilep;
7095 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7096 || modifier == EXPAND_CONST_ADDRESS
7097 || modifier == EXPAND_INITIALIZER)
7099 else if (target == 0)
7100 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7102 convert_move (target, op0, unsignedp);
7107 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7110 /* Check for a built-in function. */
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7112 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7114 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7116 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7117 == BUILT_IN_FRONTEND)
7118 return lang_hooks.expand_expr (exp, original_target,
7122 return expand_builtin (exp, target, subtarget, tmode, ignore);
7125 return expand_call (exp, target, ignore);
7127 case NON_LVALUE_EXPR:
7130 if (TREE_OPERAND (exp, 0) == error_mark_node)
7133 if (TREE_CODE (type) == UNION_TYPE)
7135 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7137 /* If both input and output are BLKmode, this conversion isn't doing
7138 anything except possibly changing memory attribute. */
7139 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7141 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7144 result = copy_rtx (result);
7145 set_mem_attributes (result, exp, 0);
7151 if (TYPE_MODE (type) != BLKmode)
7152 target = gen_reg_rtx (TYPE_MODE (type));
7154 target = assign_temp (type, 0, 1, 1);
7158 /* Store data into beginning of memory target. */
7159 store_expr (TREE_OPERAND (exp, 0),
7160 adjust_address (target, TYPE_MODE (valtype), 0),
7161 modifier == EXPAND_STACK_PARM);
7165 gcc_assert (REG_P (target));
7167 /* Store this field into a union of the proper type. */
7168 store_field (target,
7169 MIN ((int_size_in_bytes (TREE_TYPE
7170 (TREE_OPERAND (exp, 0)))
7172 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7173 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7177 /* Return the entire union. */
7181 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7183 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7186 /* If the signedness of the conversion differs and OP0 is
7187 a promoted SUBREG, clear that indication since we now
7188 have to do the proper extension. */
7189 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7190 && GET_CODE (op0) == SUBREG)
7191 SUBREG_PROMOTED_VAR_P (op0) = 0;
7193 return REDUCE_BIT_FIELD (op0);
7196 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7197 if (GET_MODE (op0) == mode)
7200 /* If OP0 is a constant, just convert it into the proper mode. */
7201 else if (CONSTANT_P (op0))
7203 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7204 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7206 if (modifier == EXPAND_INITIALIZER)
7207 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7208 subreg_lowpart_offset (mode,
7211 op0= convert_modes (mode, inner_mode, op0,
7212 TYPE_UNSIGNED (inner_type));
7215 else if (modifier == EXPAND_INITIALIZER)
7216 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7218 else if (target == 0)
7219 op0 = convert_to_mode (mode, op0,
7220 TYPE_UNSIGNED (TREE_TYPE
7221 (TREE_OPERAND (exp, 0))));
7224 convert_move (target, op0,
7225 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7229 return REDUCE_BIT_FIELD (op0);
7231 case VIEW_CONVERT_EXPR:
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7234 /* If the input and output modes are both the same, we are done.
7235 Otherwise, if neither mode is BLKmode and both are integral and within
7236 a word, we can use gen_lowpart. If neither is true, make sure the
7237 operand is in memory and convert the MEM to the new mode. */
7238 if (TYPE_MODE (type) == GET_MODE (op0))
7240 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7241 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7242 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7243 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7244 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7245 op0 = gen_lowpart (TYPE_MODE (type), op0);
7246 else if (!MEM_P (op0))
7248 /* If the operand is not a MEM, force it into memory. Since we
7249 are going to be be changing the mode of the MEM, don't call
7250 force_const_mem for constants because we don't allow pool
7251 constants to change mode. */
7252 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7254 gcc_assert (!TREE_ADDRESSABLE (exp));
7256 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7258 = assign_stack_temp_for_type
7259 (TYPE_MODE (inner_type),
7260 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7262 emit_move_insn (target, op0);
7266 /* At this point, OP0 is in the correct mode. If the output type is such
7267 that the operand is known to be aligned, indicate that it is.
7268 Otherwise, we need only be concerned about alignment for non-BLKmode
7272 op0 = copy_rtx (op0);
7274 if (TYPE_ALIGN_OK (type))
7275 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7276 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7277 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7279 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7280 HOST_WIDE_INT temp_size
7281 = MAX (int_size_in_bytes (inner_type),
7282 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7283 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7284 temp_size, 0, type);
7285 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7287 gcc_assert (!TREE_ADDRESSABLE (exp));
7289 if (GET_MODE (op0) == BLKmode)
7290 emit_block_move (new_with_op0_mode, op0,
7291 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7292 (modifier == EXPAND_STACK_PARM
7293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7295 emit_move_insn (new_with_op0_mode, op0);
7300 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7306 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7307 something else, make sure we add the register to the constant and
7308 then to the other thing. This case can occur during strength
7309 reduction and doing it this way will produce better code if the
7310 frame pointer or argument pointer is eliminated.
7312 fold-const.c will ensure that the constant is always in the inner
7313 PLUS_EXPR, so the only case we need to do anything about is if
7314 sp, ap, or fp is our second argument, in which case we must swap
7315 the innermost first argument and our second argument. */
7317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7318 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7319 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7320 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7321 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7322 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7324 tree t = TREE_OPERAND (exp, 1);
7326 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7327 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7330 /* If the result is to be ptr_mode and we are adding an integer to
7331 something, we might be forming a constant. So try to use
7332 plus_constant. If it produces a sum and we can't accept it,
7333 use force_operand. This allows P = &ARR[const] to generate
7334 efficient code on machines where a SYMBOL_REF is not a valid
7337 If this is an EXPAND_SUM call, always return the sum. */
7338 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7339 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7341 if (modifier == EXPAND_STACK_PARM)
7343 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7345 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7349 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7351 /* Use immed_double_const to ensure that the constant is
7352 truncated according to the mode of OP1, then sign extended
7353 to a HOST_WIDE_INT. Using the constant directly can result
7354 in non-canonical RTL in a 64x32 cross compile. */
7356 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7358 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7359 op1 = plus_constant (op1, INTVAL (constant_part));
7360 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7361 op1 = force_operand (op1, target);
7362 return REDUCE_BIT_FIELD (op1);
7365 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7367 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7371 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7372 (modifier == EXPAND_INITIALIZER
7373 ? EXPAND_INITIALIZER : EXPAND_SUM));
7374 if (! CONSTANT_P (op0))
7376 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7377 VOIDmode, modifier);
7378 /* Return a PLUS if modifier says it's OK. */
7379 if (modifier == EXPAND_SUM
7380 || modifier == EXPAND_INITIALIZER)
7381 return simplify_gen_binary (PLUS, mode, op0, op1);
7384 /* Use immed_double_const to ensure that the constant is
7385 truncated according to the mode of OP1, then sign extended
7386 to a HOST_WIDE_INT. Using the constant directly can result
7387 in non-canonical RTL in a 64x32 cross compile. */
7389 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7391 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7392 op0 = plus_constant (op0, INTVAL (constant_part));
7393 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7394 op0 = force_operand (op0, target);
7395 return REDUCE_BIT_FIELD (op0);
7399 /* No sense saving up arithmetic to be done
7400 if it's all in the wrong mode to form part of an address.
7401 And force_operand won't know whether to sign-extend or
7403 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7404 || mode != ptr_mode)
7406 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7407 subtarget, &op0, &op1, 0);
7408 if (op0 == const0_rtx)
7410 if (op1 == const0_rtx)
7415 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7416 subtarget, &op0, &op1, modifier);
7417 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7420 /* For initializers, we are allowed to return a MINUS of two
7421 symbolic constants. Here we handle all cases when both operands
7423 /* Handle difference of two symbolic constants,
7424 for the sake of an initializer. */
7425 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7426 && really_constant_p (TREE_OPERAND (exp, 0))
7427 && really_constant_p (TREE_OPERAND (exp, 1)))
7429 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7430 NULL_RTX, &op0, &op1, modifier);
7432 /* If the last operand is a CONST_INT, use plus_constant of
7433 the negated constant. Else make the MINUS. */
7434 if (GET_CODE (op1) == CONST_INT)
7435 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7437 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7440 /* No sense saving up arithmetic to be done
7441 if it's all in the wrong mode to form part of an address.
7442 And force_operand won't know whether to sign-extend or
7444 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7445 || mode != ptr_mode)
7448 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7449 subtarget, &op0, &op1, modifier);
7451 /* Convert A - const to A + (-const). */
7452 if (GET_CODE (op1) == CONST_INT)
7454 op1 = negate_rtx (mode, op1);
7455 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7461 /* If first operand is constant, swap them.
7462 Thus the following special case checks need only
7463 check the second operand. */
7464 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7466 tree t1 = TREE_OPERAND (exp, 0);
7467 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7468 TREE_OPERAND (exp, 1) = t1;
7471 /* Attempt to return something suitable for generating an
7472 indexed address, for machines that support that. */
7474 if (modifier == EXPAND_SUM && mode == ptr_mode
7475 && host_integerp (TREE_OPERAND (exp, 1), 0))
7477 tree exp1 = TREE_OPERAND (exp, 1);
7479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7483 op0 = force_operand (op0, NULL_RTX);
7485 op0 = copy_to_mode_reg (mode, op0);
7487 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7488 gen_int_mode (tree_low_cst (exp1, 0),
7489 TYPE_MODE (TREE_TYPE (exp1)))));
7492 if (modifier == EXPAND_STACK_PARM)
7495 /* Check for multiplying things that have been extended
7496 from a narrower type. If this machine supports multiplying
7497 in that narrower type with a result in the desired type,
7498 do it that way, and avoid the explicit type-conversion. */
7499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7500 && TREE_CODE (type) == INTEGER_TYPE
7501 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7502 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7503 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7504 && int_fits_type_p (TREE_OPERAND (exp, 1),
7505 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7506 /* Don't use a widening multiply if a shift will do. */
7507 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7508 > HOST_BITS_PER_WIDE_INT)
7509 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7511 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7512 && (TYPE_PRECISION (TREE_TYPE
7513 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7514 == TYPE_PRECISION (TREE_TYPE
7516 (TREE_OPERAND (exp, 0), 0))))
7517 /* If both operands are extended, they must either both
7518 be zero-extended or both be sign-extended. */
7519 && (TYPE_UNSIGNED (TREE_TYPE
7520 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7521 == TYPE_UNSIGNED (TREE_TYPE
7523 (TREE_OPERAND (exp, 0), 0)))))))
7525 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7526 enum machine_mode innermode = TYPE_MODE (op0type);
7527 bool zextend_p = TYPE_UNSIGNED (op0type);
7528 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7529 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7531 if (mode == GET_MODE_WIDER_MODE (innermode))
7533 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7535 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7536 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7537 TREE_OPERAND (exp, 1),
7538 NULL_RTX, &op0, &op1, 0);
7540 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7541 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7542 NULL_RTX, &op0, &op1, 0);
7545 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7546 && innermode == word_mode)
7549 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7550 NULL_RTX, VOIDmode, 0);
7551 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7552 op1 = convert_modes (innermode, mode,
7553 expand_expr (TREE_OPERAND (exp, 1),
7554 NULL_RTX, VOIDmode, 0),
7557 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7558 NULL_RTX, VOIDmode, 0);
7559 temp = expand_binop (mode, other_optab, op0, op1, target,
7560 unsignedp, OPTAB_LIB_WIDEN);
7561 hipart = gen_highpart (innermode, temp);
7562 htem = expand_mult_highpart_adjust (innermode, hipart,
7566 emit_move_insn (hipart, htem);
7567 return REDUCE_BIT_FIELD (temp);
7571 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7572 subtarget, &op0, &op1, 0);
7573 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7575 case TRUNC_DIV_EXPR:
7576 case FLOOR_DIV_EXPR:
7578 case ROUND_DIV_EXPR:
7579 case EXACT_DIV_EXPR:
7580 if (modifier == EXPAND_STACK_PARM)
7582 /* Possible optimization: compute the dividend with EXPAND_SUM
7583 then if the divisor is constant can optimize the case
7584 where some terms of the dividend have coeffs divisible by it. */
7585 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7586 subtarget, &op0, &op1, 0);
7587 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7590 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7591 expensive divide. If not, combine will rebuild the original
7593 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7594 && TREE_CODE (type) == REAL_TYPE
7595 && !real_onep (TREE_OPERAND (exp, 0)))
7596 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7597 build2 (RDIV_EXPR, type,
7598 build_real (type, dconst1),
7599 TREE_OPERAND (exp, 1))),
7600 target, tmode, modifier);
7604 case TRUNC_MOD_EXPR:
7605 case FLOOR_MOD_EXPR:
7607 case ROUND_MOD_EXPR:
7608 if (modifier == EXPAND_STACK_PARM)
7610 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7611 subtarget, &op0, &op1, 0);
7612 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7614 case FIX_ROUND_EXPR:
7615 case FIX_FLOOR_EXPR:
7617 gcc_unreachable (); /* Not used for C. */
7619 case FIX_TRUNC_EXPR:
7620 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7621 if (target == 0 || modifier == EXPAND_STACK_PARM)
7622 target = gen_reg_rtx (mode);
7623 expand_fix (target, op0, unsignedp);
7627 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7628 if (target == 0 || modifier == EXPAND_STACK_PARM)
7629 target = gen_reg_rtx (mode);
7630 /* expand_float can't figure out what to do if FROM has VOIDmode.
7631 So give it the correct mode. With -O, cse will optimize this. */
7632 if (GET_MODE (op0) == VOIDmode)
7633 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7635 expand_float (target, op0,
7636 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7641 if (modifier == EXPAND_STACK_PARM)
7643 temp = expand_unop (mode,
7644 optab_for_tree_code (NEGATE_EXPR, type),
7647 return REDUCE_BIT_FIELD (temp);
7650 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7651 if (modifier == EXPAND_STACK_PARM)
7654 /* ABS_EXPR is not valid for complex arguments. */
7655 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7656 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7658 /* Unsigned abs is simply the operand. Testing here means we don't
7659 risk generating incorrect code below. */
7660 if (TYPE_UNSIGNED (type))
7663 return expand_abs (mode, op0, target, unsignedp,
7664 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7668 target = original_target;
7670 || modifier == EXPAND_STACK_PARM
7671 || (MEM_P (target) && MEM_VOLATILE_P (target))
7672 || GET_MODE (target) != mode
7674 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7675 target = gen_reg_rtx (mode);
7676 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7677 target, &op0, &op1, 0);
7679 /* First try to do it with a special MIN or MAX instruction.
7680 If that does not win, use a conditional jump to select the proper
7682 this_optab = optab_for_tree_code (code, type);
7683 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7688 /* At this point, a MEM target is no longer useful; we will get better
7691 if (! REG_P (target))
7692 target = gen_reg_rtx (mode);
7694 /* If op1 was placed in target, swap op0 and op1. */
7695 if (target != op0 && target == op1)
7702 /* We generate better code and avoid problems with op1 mentioning
7703 target by forcing op1 into a pseudo if it isn't a constant. */
7704 if (! CONSTANT_P (op1))
7705 op1 = force_reg (mode, op1);
7708 emit_move_insn (target, op0);
7710 op0 = gen_label_rtx ();
7712 /* If this mode is an integer too wide to compare properly,
7713 compare word by word. Rely on cse to optimize constant cases. */
7714 if (GET_MODE_CLASS (mode) == MODE_INT
7715 && ! can_compare_p (GE, mode, ccp_jump))
7717 if (code == MAX_EXPR)
7718 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7721 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7726 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7727 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7729 emit_move_insn (target, op1);
7734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7735 if (modifier == EXPAND_STACK_PARM)
7737 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7741 /* ??? Can optimize bitwise operations with one arg constant.
7742 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7743 and (a bitwise1 b) bitwise2 b (etc)
7744 but that is probably not worth while. */
7746 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7747 boolean values when we want in all cases to compute both of them. In
7748 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7749 as actual zero-or-1 values and then bitwise anding. In cases where
7750 there cannot be any side effects, better code would be made by
7751 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7752 how to recognize those cases. */
7754 case TRUTH_AND_EXPR:
7755 code = BIT_AND_EXPR;
7760 code = BIT_IOR_EXPR;
7764 case TRUTH_XOR_EXPR:
7765 code = BIT_XOR_EXPR;
7773 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7775 if (modifier == EXPAND_STACK_PARM)
7777 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7778 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7781 /* Could determine the answer when only additive constants differ. Also,
7782 the addition of one can be handled by changing the condition. */
7789 case UNORDERED_EXPR:
7797 temp = do_store_flag (exp,
7798 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7799 tmode != VOIDmode ? tmode : mode, 0);
7803 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7804 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7806 && REG_P (original_target)
7807 && (GET_MODE (original_target)
7808 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7810 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7813 /* If temp is constant, we can just compute the result. */
7814 if (GET_CODE (temp) == CONST_INT)
7816 if (INTVAL (temp) != 0)
7817 emit_move_insn (target, const1_rtx);
7819 emit_move_insn (target, const0_rtx);
7824 if (temp != original_target)
7826 enum machine_mode mode1 = GET_MODE (temp);
7827 if (mode1 == VOIDmode)
7828 mode1 = tmode != VOIDmode ? tmode : mode;
7830 temp = copy_to_mode_reg (mode1, temp);
7833 op1 = gen_label_rtx ();
7834 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7835 GET_MODE (temp), unsignedp, op1);
7836 emit_move_insn (temp, const1_rtx);
7841 /* If no set-flag instruction, must generate a conditional store
7842 into a temporary variable. Drop through and handle this
7847 || modifier == EXPAND_STACK_PARM
7848 || ! safe_from_p (target, exp, 1)
7849 /* Make sure we don't have a hard reg (such as function's return
7850 value) live across basic blocks, if not optimizing. */
7851 || (!optimize && REG_P (target)
7852 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7853 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7856 emit_move_insn (target, const0_rtx);
7858 op1 = gen_label_rtx ();
7859 jumpifnot (exp, op1);
7862 emit_move_insn (target, const1_rtx);
7865 return ignore ? const0_rtx : target;
7867 case TRUTH_NOT_EXPR:
7868 if (modifier == EXPAND_STACK_PARM)
7870 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7871 /* The parser is careful to generate TRUTH_NOT_EXPR
7872 only with operands that are always zero or one. */
7873 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7874 target, 1, OPTAB_LIB_WIDEN);
7878 case STATEMENT_LIST:
7880 tree_stmt_iterator iter;
7882 gcc_assert (ignore);
7884 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7885 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7890 /* A COND_EXPR with its type being VOID_TYPE represents a
7891 conditional jump and is handled in
7892 expand_gimple_cond_expr. */
7893 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
7895 /* Note that COND_EXPRs whose type is a structure or union
7896 are required to be constructed to contain assignments of
7897 a temporary variable, so that we can evaluate them here
7898 for side effect only. If type is void, we must do likewise. */
7900 gcc_assert (!TREE_ADDRESSABLE (type)
7902 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7903 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7905 /* If we are not to produce a result, we have no target. Otherwise,
7906 if a target was specified use it; it will not be used as an
7907 intermediate target unless it is safe. If no target, use a
7910 if (modifier != EXPAND_STACK_PARM
7912 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7913 && GET_MODE (original_target) == mode
7914 #ifdef HAVE_conditional_move
7915 && (! can_conditionally_move_p (mode)
7916 || REG_P (original_target))
7918 && !MEM_P (original_target))
7919 temp = original_target;
7921 temp = assign_temp (type, 0, 0, 1);
7923 do_pending_stack_adjust ();
7925 op0 = gen_label_rtx ();
7926 op1 = gen_label_rtx ();
7927 jumpifnot (TREE_OPERAND (exp, 0), op0);
7928 store_expr (TREE_OPERAND (exp, 1), temp,
7929 modifier == EXPAND_STACK_PARM);
7931 emit_jump_insn (gen_jump (op1));
7934 store_expr (TREE_OPERAND (exp, 2), temp,
7935 modifier == EXPAND_STACK_PARM);
7942 target = expand_vec_cond_expr (exp, target);
7947 tree lhs = TREE_OPERAND (exp, 0);
7948 tree rhs = TREE_OPERAND (exp, 1);
7950 gcc_assert (ignore);
7952 /* Check for |= or &= of a bitfield of size one into another bitfield
7953 of size 1. In this case, (unless we need the result of the
7954 assignment) we can do this more efficiently with a
7955 test followed by an assignment, if necessary.
7957 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7958 things change so we do, this code should be enhanced to
7960 if (TREE_CODE (lhs) == COMPONENT_REF
7961 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7962 || TREE_CODE (rhs) == BIT_AND_EXPR)
7963 && TREE_OPERAND (rhs, 0) == lhs
7964 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7965 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7966 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7968 rtx label = gen_label_rtx ();
7970 do_jump (TREE_OPERAND (rhs, 1),
7971 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7972 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7973 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7974 (TREE_CODE (rhs) == BIT_IOR_EXPR
7976 : integer_zero_node)));
7977 do_pending_stack_adjust ();
7982 expand_assignment (lhs, rhs);
7988 if (!TREE_OPERAND (exp, 0))
7989 expand_null_return ();
7991 expand_return (TREE_OPERAND (exp, 0));
7995 return expand_expr_addr_expr (exp, target, tmode, modifier);
7998 /* Get the rtx code of the operands. */
7999 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8000 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8003 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8005 /* Move the real (op0) and imaginary (op1) parts to their location. */
8006 write_complex_part (target, op0, false);
8007 write_complex_part (target, op1, true);
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8013 return read_complex_part (op0, false);
8016 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8017 return read_complex_part (op0, true);
8020 expand_resx_expr (exp);
8023 case TRY_CATCH_EXPR:
8025 case EH_FILTER_EXPR:
8026 case TRY_FINALLY_EXPR:
8027 /* Lowered by tree-eh.c. */
8030 case WITH_CLEANUP_EXPR:
8031 case CLEANUP_POINT_EXPR:
8033 case CASE_LABEL_EXPR:
8039 case PREINCREMENT_EXPR:
8040 case PREDECREMENT_EXPR:
8041 case POSTINCREMENT_EXPR:
8042 case POSTDECREMENT_EXPR:
8045 case TRUTH_ANDIF_EXPR:
8046 case TRUTH_ORIF_EXPR:
8047 /* Lowered by gimplify.c. */
8051 return get_exception_pointer (cfun);
8054 return get_exception_filter (cfun);
8057 /* Function descriptors are not valid except for as
8058 initialization constants, and should not be expanded. */
8066 expand_label (TREE_OPERAND (exp, 0));
8070 expand_asm_expr (exp);
8073 case WITH_SIZE_EXPR:
8074 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8075 have pulled out the size to use in whatever context it needed. */
8076 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8079 case REALIGN_LOAD_EXPR:
8081 tree oprnd0 = TREE_OPERAND (exp, 0);
8082 tree oprnd1 = TREE_OPERAND (exp, 1);
8083 tree oprnd2 = TREE_OPERAND (exp, 2);
8086 this_optab = optab_for_tree_code (code, type);
8087 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8088 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8089 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8098 return lang_hooks.expand_expr (exp, original_target, tmode,
8102 /* Here to do an ordinary binary operator. */
8104 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8105 subtarget, &op0, &op1, 0);
8107 this_optab = optab_for_tree_code (code, type);
8109 if (modifier == EXPAND_STACK_PARM)
8111 temp = expand_binop (mode, this_optab, op0, op1, target,
8112 unsignedp, OPTAB_LIB_WIDEN);
8114 return REDUCE_BIT_FIELD (temp);
8116 #undef REDUCE_BIT_FIELD
8118 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8119 signedness of TYPE), possibly returning the result in TARGET. */
8121 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8123 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8124 if (target && GET_MODE (target) != GET_MODE (exp))
8126 if (TYPE_UNSIGNED (type))
8129 if (prec < HOST_BITS_PER_WIDE_INT)
8130 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8133 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8134 ((unsigned HOST_WIDE_INT) 1
8135 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8137 return expand_and (GET_MODE (exp), exp, mask, target);
8141 tree count = build_int_cst (NULL_TREE,
8142 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8143 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8144 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8148 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8149 when applied to the address of EXP produces an address known to be
8150 aligned more than BIGGEST_ALIGNMENT. */
8153 is_aligning_offset (tree offset, tree exp)
8155 /* Strip off any conversions. */
8156 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8157 || TREE_CODE (offset) == NOP_EXPR
8158 || TREE_CODE (offset) == CONVERT_EXPR)
8159 offset = TREE_OPERAND (offset, 0);
8161 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8162 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8163 if (TREE_CODE (offset) != BIT_AND_EXPR
8164 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8165 || compare_tree_int (TREE_OPERAND (offset, 1),
8166 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8167 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8170 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8171 It must be NEGATE_EXPR. Then strip any more conversions. */
8172 offset = TREE_OPERAND (offset, 0);
8173 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8174 || TREE_CODE (offset) == NOP_EXPR
8175 || TREE_CODE (offset) == CONVERT_EXPR)
8176 offset = TREE_OPERAND (offset, 0);
8178 if (TREE_CODE (offset) != NEGATE_EXPR)
8181 offset = TREE_OPERAND (offset, 0);
8182 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8183 || TREE_CODE (offset) == NOP_EXPR
8184 || TREE_CODE (offset) == CONVERT_EXPR)
8185 offset = TREE_OPERAND (offset, 0);
8187 /* This must now be the address of EXP. */
8188 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8191 /* Return the tree node if an ARG corresponds to a string constant or zero
8192 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8193 in bytes within the string that ARG is accessing. The type of the
8194 offset will be `sizetype'. */
8197 string_constant (tree arg, tree *ptr_offset)
8202 if (TREE_CODE (arg) == ADDR_EXPR)
8204 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8206 *ptr_offset = size_zero_node;
8207 return TREE_OPERAND (arg, 0);
8209 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8211 array = TREE_OPERAND (arg, 0);
8212 offset = size_zero_node;
8214 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8216 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8217 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8218 if (TREE_CODE (array) != STRING_CST
8219 && TREE_CODE (array) != VAR_DECL)
8225 else if (TREE_CODE (arg) == PLUS_EXPR)
8227 tree arg0 = TREE_OPERAND (arg, 0);
8228 tree arg1 = TREE_OPERAND (arg, 1);
8233 if (TREE_CODE (arg0) == ADDR_EXPR
8234 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8235 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8237 array = TREE_OPERAND (arg0, 0);
8240 else if (TREE_CODE (arg1) == ADDR_EXPR
8241 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8242 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8244 array = TREE_OPERAND (arg1, 0);
8253 if (TREE_CODE (array) == STRING_CST)
8255 *ptr_offset = convert (sizetype, offset);
8258 else if (TREE_CODE (array) == VAR_DECL)
8262 /* Variables initialized to string literals can be handled too. */
8263 if (DECL_INITIAL (array) == NULL_TREE
8264 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8267 /* If they are read-only, non-volatile and bind locally. */
8268 if (! TREE_READONLY (array)
8269 || TREE_SIDE_EFFECTS (array)
8270 || ! targetm.binds_local_p (array))
8273 /* Avoid const char foo[4] = "abcde"; */
8274 if (DECL_SIZE_UNIT (array) == NULL_TREE
8275 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8276 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8277 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8280 /* If variable is bigger than the string literal, OFFSET must be constant
8281 and inside of the bounds of the string literal. */
8282 offset = convert (sizetype, offset);
8283 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8284 && (! host_integerp (offset, 1)
8285 || compare_tree_int (offset, length) >= 0))
8288 *ptr_offset = offset;
8289 return DECL_INITIAL (array);
8295 /* Generate code to calculate EXP using a store-flag instruction
8296 and return an rtx for the result. EXP is either a comparison
8297 or a TRUTH_NOT_EXPR whose operand is a comparison.
8299 If TARGET is nonzero, store the result there if convenient.
8301 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8304 Return zero if there is no suitable set-flag instruction
8305 available on this machine.
8307 Once expand_expr has been called on the arguments of the comparison,
8308 we are committed to doing the store flag, since it is not safe to
8309 re-evaluate the expression. We emit the store-flag insn by calling
8310 emit_store_flag, but only expand the arguments if we have a reason
8311 to believe that emit_store_flag will be successful. If we think that
8312 it will, but it isn't, we have to simulate the store-flag with a
8313 set/jump/set sequence. */
8316 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8319 tree arg0, arg1, type;
8321 enum machine_mode operand_mode;
8325 enum insn_code icode;
8326 rtx subtarget = target;
8329 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8330 result at the end. We can't simply invert the test since it would
8331 have already been inverted if it were valid. This case occurs for
8332 some floating-point comparisons. */
8334 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8335 invert = 1, exp = TREE_OPERAND (exp, 0);
8337 arg0 = TREE_OPERAND (exp, 0);
8338 arg1 = TREE_OPERAND (exp, 1);
8340 /* Don't crash if the comparison was erroneous. */
8341 if (arg0 == error_mark_node || arg1 == error_mark_node)
8344 type = TREE_TYPE (arg0);
8345 operand_mode = TYPE_MODE (type);
8346 unsignedp = TYPE_UNSIGNED (type);
8348 /* We won't bother with BLKmode store-flag operations because it would mean
8349 passing a lot of information to emit_store_flag. */
8350 if (operand_mode == BLKmode)
8353 /* We won't bother with store-flag operations involving function pointers
8354 when function pointers must be canonicalized before comparisons. */
8355 #ifdef HAVE_canonicalize_funcptr_for_compare
8356 if (HAVE_canonicalize_funcptr_for_compare
8357 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8358 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8360 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8361 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8362 == FUNCTION_TYPE))))
8369 /* Get the rtx comparison code to use. We know that EXP is a comparison
8370 operation of some type. Some comparisons against 1 and -1 can be
8371 converted to comparisons with zero. Do so here so that the tests
8372 below will be aware that we have a comparison with zero. These
8373 tests will not catch constants in the first operand, but constants
8374 are rarely passed as the first operand. */
8376 switch (TREE_CODE (exp))
8385 if (integer_onep (arg1))
8386 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8388 code = unsignedp ? LTU : LT;
8391 if (! unsignedp && integer_all_onesp (arg1))
8392 arg1 = integer_zero_node, code = LT;
8394 code = unsignedp ? LEU : LE;
8397 if (! unsignedp && integer_all_onesp (arg1))
8398 arg1 = integer_zero_node, code = GE;
8400 code = unsignedp ? GTU : GT;
8403 if (integer_onep (arg1))
8404 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8406 code = unsignedp ? GEU : GE;
8409 case UNORDERED_EXPR:
8438 /* Put a constant second. */
8439 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8441 tem = arg0; arg0 = arg1; arg1 = tem;
8442 code = swap_condition (code);
8445 /* If this is an equality or inequality test of a single bit, we can
8446 do this by shifting the bit being tested to the low-order bit and
8447 masking the result with the constant 1. If the condition was EQ,
8448 we xor it with 1. This does not require an scc insn and is faster
8449 than an scc insn even if we have it.
8451 The code to make this transformation was moved into fold_single_bit_test,
8452 so we just call into the folder and expand its result. */
8454 if ((code == NE || code == EQ)
8455 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8456 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8458 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8459 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8461 target, VOIDmode, EXPAND_NORMAL);
8464 /* Now see if we are likely to be able to do this. Return if not. */
8465 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8468 icode = setcc_gen_code[(int) code];
8469 if (icode == CODE_FOR_nothing
8470 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8472 /* We can only do this if it is one of the special cases that
8473 can be handled without an scc insn. */
8474 if ((code == LT && integer_zerop (arg1))
8475 || (! only_cheap && code == GE && integer_zerop (arg1)))
8477 else if (BRANCH_COST >= 0
8478 && ! only_cheap && (code == NE || code == EQ)
8479 && TREE_CODE (type) != REAL_TYPE
8480 && ((abs_optab->handlers[(int) operand_mode].insn_code
8481 != CODE_FOR_nothing)
8482 || (ffs_optab->handlers[(int) operand_mode].insn_code
8483 != CODE_FOR_nothing)))
8489 if (! get_subtarget (target)
8490 || GET_MODE (subtarget) != operand_mode)
8493 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8496 target = gen_reg_rtx (mode);
8498 result = emit_store_flag (target, code, op0, op1,
8499 operand_mode, unsignedp, 1);
8504 result = expand_binop (mode, xor_optab, result, const1_rtx,
8505 result, 0, OPTAB_LIB_WIDEN);
8509 /* If this failed, we have to do this with set/compare/jump/set code. */
8511 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8512 target = gen_reg_rtx (GET_MODE (target));
8514 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8515 result = compare_from_rtx (op0, op1, code, unsignedp,
8516 operand_mode, NULL_RTX);
8517 if (GET_CODE (result) == CONST_INT)
8518 return (((result == const0_rtx && ! invert)
8519 || (result != const0_rtx && invert))
8520 ? const0_rtx : const1_rtx);
8522 /* The code of RESULT may not match CODE if compare_from_rtx
8523 decided to swap its operands and reverse the original code.
8525 We know that compare_from_rtx returns either a CONST_INT or
8526 a new comparison code, so it is safe to just extract the
8527 code from RESULT. */
8528 code = GET_CODE (result);
8530 label = gen_label_rtx ();
8531 gcc_assert (bcc_gen_fctn[(int) code]);
8533 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8534 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8541 /* Stubs in case we haven't got a casesi insn. */
8543 # define HAVE_casesi 0
8544 # define gen_casesi(a, b, c, d, e) (0)
8545 # define CODE_FOR_casesi CODE_FOR_nothing
8548 /* If the machine does not have a case insn that compares the bounds,
8549 this means extra overhead for dispatch tables, which raises the
8550 threshold for using them. */
8551 #ifndef CASE_VALUES_THRESHOLD
8552 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8553 #endif /* CASE_VALUES_THRESHOLD */
8556 case_values_threshold (void)
8558 return CASE_VALUES_THRESHOLD;
8561 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8562 0 otherwise (i.e. if there is no casesi instruction). */
8564 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8565 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8567 enum machine_mode index_mode = SImode;
8568 int index_bits = GET_MODE_BITSIZE (index_mode);
8569 rtx op1, op2, index;
8570 enum machine_mode op_mode;
8575 /* Convert the index to SImode. */
8576 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8578 enum machine_mode omode = TYPE_MODE (index_type);
8579 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8581 /* We must handle the endpoints in the original mode. */
8582 index_expr = build2 (MINUS_EXPR, index_type,
8583 index_expr, minval);
8584 minval = integer_zero_node;
8585 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8586 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8587 omode, 1, default_label);
8588 /* Now we can safely truncate. */
8589 index = convert_to_mode (index_mode, index, 0);
8593 if (TYPE_MODE (index_type) != index_mode)
8595 index_expr = convert (lang_hooks.types.type_for_size
8596 (index_bits, 0), index_expr);
8597 index_type = TREE_TYPE (index_expr);
8600 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8603 do_pending_stack_adjust ();
8605 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8606 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8608 index = copy_to_mode_reg (op_mode, index);
8610 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8612 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8613 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8614 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8615 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8617 op1 = copy_to_mode_reg (op_mode, op1);
8619 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8621 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8622 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8623 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8624 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8626 op2 = copy_to_mode_reg (op_mode, op2);
8628 emit_jump_insn (gen_casesi (index, op1, op2,
8629 table_label, default_label));
8633 /* Attempt to generate a tablejump instruction; same concept. */
8634 #ifndef HAVE_tablejump
8635 #define HAVE_tablejump 0
8636 #define gen_tablejump(x, y) (0)
8639 /* Subroutine of the next function.
8641 INDEX is the value being switched on, with the lowest value
8642 in the table already subtracted.
8643 MODE is its expected mode (needed if INDEX is constant).
8644 RANGE is the length of the jump table.
8645 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8647 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8648 index value is out of range. */
8651 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8656 if (INTVAL (range) > cfun->max_jumptable_ents)
8657 cfun->max_jumptable_ents = INTVAL (range);
8659 /* Do an unsigned comparison (in the proper mode) between the index
8660 expression and the value which represents the length of the range.
8661 Since we just finished subtracting the lower bound of the range
8662 from the index expression, this comparison allows us to simultaneously
8663 check that the original index expression value is both greater than
8664 or equal to the minimum value of the range and less than or equal to
8665 the maximum value of the range. */
8667 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8670 /* If index is in range, it must fit in Pmode.
8671 Convert to Pmode so we can index with it. */
8673 index = convert_to_mode (Pmode, index, 1);
8675 /* Don't let a MEM slip through, because then INDEX that comes
8676 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8677 and break_out_memory_refs will go to work on it and mess it up. */
8678 #ifdef PIC_CASE_VECTOR_ADDRESS
8679 if (flag_pic && !REG_P (index))
8680 index = copy_to_mode_reg (Pmode, index);
8683 /* If flag_force_addr were to affect this address
8684 it could interfere with the tricky assumptions made
8685 about addresses that contain label-refs,
8686 which may be valid only very near the tablejump itself. */
8687 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8688 GET_MODE_SIZE, because this indicates how large insns are. The other
8689 uses should all be Pmode, because they are addresses. This code
8690 could fail if addresses and insns are not the same size. */
8691 index = gen_rtx_PLUS (Pmode,
8692 gen_rtx_MULT (Pmode, index,
8693 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8694 gen_rtx_LABEL_REF (Pmode, table_label));
8695 #ifdef PIC_CASE_VECTOR_ADDRESS
8697 index = PIC_CASE_VECTOR_ADDRESS (index);
8700 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8701 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8702 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8703 convert_move (temp, vector, 0);
8705 emit_jump_insn (gen_tablejump (temp, table_label));
8707 /* If we are generating PIC code or if the table is PC-relative, the
8708 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8709 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8714 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8715 rtx table_label, rtx default_label)
8719 if (! HAVE_tablejump)
8722 index_expr = fold (build2 (MINUS_EXPR, index_type,
8723 convert (index_type, index_expr),
8724 convert (index_type, minval)));
8725 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8726 do_pending_stack_adjust ();
8728 do_tablejump (index, TYPE_MODE (index_type),
8729 convert_modes (TYPE_MODE (index_type),
8730 TYPE_MODE (TREE_TYPE (range)),
8731 expand_expr (range, NULL_RTX,
8733 TYPE_UNSIGNED (TREE_TYPE (range))),
8734 table_label, default_label);
8738 /* Nonzero if the mode is a valid vector mode for this architecture.
8739 This returns nonzero even if there is no hardware support for the
8740 vector mode, but we can emulate with narrower modes. */
8743 vector_mode_valid_p (enum machine_mode mode)
8745 enum mode_class class = GET_MODE_CLASS (mode);
8746 enum machine_mode innermode;
8748 /* Doh! What's going on? */
8749 if (class != MODE_VECTOR_INT
8750 && class != MODE_VECTOR_FLOAT)
8753 /* Hardware support. Woo hoo! */
8754 if (targetm.vector_mode_supported_p (mode))
8757 innermode = GET_MODE_INNER (mode);
8759 /* We should probably return 1 if requesting V4DI and we have no DI,
8760 but we have V2DI, but this is probably very unlikely. */
8762 /* If we have support for the inner mode, we can safely emulate it.
8763 We may not have V2DI, but me can emulate with a pair of DIs. */
8764 return targetm.scalar_mode_supported_p (innermode);
8767 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8769 const_vector_from_tree (tree exp)
8774 enum machine_mode inner, mode;
8776 mode = TYPE_MODE (TREE_TYPE (exp));
8778 if (initializer_zerop (exp))
8779 return CONST0_RTX (mode);
8781 units = GET_MODE_NUNITS (mode);
8782 inner = GET_MODE_INNER (mode);
8784 v = rtvec_alloc (units);
8786 link = TREE_VECTOR_CST_ELTS (exp);
8787 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8789 elt = TREE_VALUE (link);
8791 if (TREE_CODE (elt) == REAL_CST)
8792 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8795 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8796 TREE_INT_CST_HIGH (elt),
8800 /* Initialize remaining elements to 0. */
8801 for (; i < units; ++i)
8802 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8804 return gen_rtx_CONST_VECTOR (mode, v);
8806 #include "gt-expr.h"