1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1680 0, 1, NULL_RTX, mode, mode);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Emit code to move a block SRC of type TYPE to a block DST,
1714 where DST is non-consecutive registers represented by a PARALLEL.
1715 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1719 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1724 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1725 emit_group_load_1 (tmps, dst, src, type, ssize);
1727 /* Copy the extracted pieces into the proper (probable) hard regs. */
1728 for (i = 0; i < XVECLEN (dst, 0); i++)
1730 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1733 emit_move_insn (d, tmps[i]);
1737 /* Similar, but load SRC into new pseudos in a format that looks like
1738 PARALLEL. This can later be fed to emit_group_move to get things
1739 in the right place. */
1742 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1747 vec = rtvec_alloc (XVECLEN (parallel, 0));
1748 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1750 /* Convert the vector to look just like the original PARALLEL, except
1751 with the computed values. */
1752 for (i = 0; i < XVECLEN (parallel, 0); i++)
1754 rtx e = XVECEXP (parallel, 0, i);
1755 rtx d = XEXP (e, 0);
1759 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1760 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1768 /* Emit code to move a block SRC to block DST, where SRC and DST are
1769 non-consecutive groups of registers, each represented by a PARALLEL. */
1772 emit_group_move (rtx dst, rtx src)
1776 gcc_assert (GET_CODE (src) == PARALLEL
1777 && GET_CODE (dst) == PARALLEL
1778 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1780 /* Skip first entry if NULL. */
1781 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1782 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1783 XEXP (XVECEXP (src, 0, i), 0));
1786 /* Move a group of registers represented by a PARALLEL into pseudos. */
1789 emit_group_move_into_temps (rtx src)
1791 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1794 for (i = 0; i < XVECLEN (src, 0); i++)
1796 rtx e = XVECEXP (src, 0, i);
1797 rtx d = XEXP (e, 0);
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1807 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1808 where SRC is non-consecutive registers represented by a PARALLEL.
1809 SSIZE represents the total size of block ORIG_DST, or -1 if not
1813 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 enum machine_mode m = GET_MODE (orig_dst);
1819 gcc_assert (GET_CODE (src) == PARALLEL);
1821 if (!SCALAR_INT_MODE_P (m)
1822 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1824 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1825 if (imode == BLKmode)
1826 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1828 dst = gen_reg_rtx (imode);
1829 emit_group_store (dst, src, type, ssize);
1830 if (imode != BLKmode)
1831 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1832 emit_move_insn (orig_dst, dst);
1836 /* Check for a NULL entry, used to indicate that the parameter goes
1837 both on the stack and in registers. */
1838 if (XEXP (XVECEXP (src, 0, 0), 0))
1843 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1845 /* Copy the (probable) hard regs into pseudos. */
1846 for (i = start; i < XVECLEN (src, 0); i++)
1848 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1849 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1850 emit_move_insn (tmps[i], reg);
1853 /* If we won't be storing directly into memory, protect the real destination
1854 from strange tricks we might play. */
1856 if (GET_CODE (dst) == PARALLEL)
1860 /* We can get a PARALLEL dst if there is a conditional expression in
1861 a return statement. In that case, the dst and src are the same,
1862 so no action is necessary. */
1863 if (rtx_equal_p (dst, src))
1866 /* It is unclear if we can ever reach here, but we may as well handle
1867 it. Allocate a temporary, and split this into a store/load to/from
1870 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1871 emit_group_store (temp, src, type, ssize);
1872 emit_group_load (dst, temp, type, ssize);
1875 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1877 dst = gen_reg_rtx (GET_MODE (orig_dst));
1878 /* Make life a bit easier for combine. */
1879 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1882 /* Process the pieces. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
1885 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1886 enum machine_mode mode = GET_MODE (tmps[i]);
1887 unsigned int bytelen = GET_MODE_SIZE (mode);
1890 /* Handle trailing fragments that run over the size of the struct. */
1891 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1893 /* store_bit_field always takes its value from the lsb.
1894 Move the fragment to the lsb if it's not already there. */
1896 #ifdef BLOCK_REG_PADDING
1897 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1898 == (BYTES_BIG_ENDIAN ? upward : downward)
1904 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1905 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1906 build_int_cst (NULL_TREE, shift),
1909 bytelen = ssize - bytepos;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 dest = XEXP (dst, 0);
1916 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1918 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1919 dest = XEXP (dst, 1);
1923 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1924 dest = assign_stack_temp (GET_MODE (dest),
1925 GET_MODE_SIZE (GET_MODE (dest)), 0);
1926 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1933 /* Optimize the access just a bit. */
1935 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1936 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1941 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1945 /* Copy from the pseudo into the (probable) hard reg. */
1946 if (orig_dst != dst)
1947 emit_move_insn (orig_dst, dst);
1950 /* Generate code to copy a BLKmode object of TYPE out of a
1951 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1952 is null, a stack temporary is created. TGTBLK is returned.
1954 The purpose of this routine is to handle functions that return
1955 BLKmode structures in registers. Some machines (the PA for example)
1956 want to return all small structures in registers regardless of the
1957 structure's alignment. */
1960 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1962 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1963 rtx src = NULL, dst = NULL;
1964 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1965 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1969 tgtblk = assign_temp (build_qualified_type (type,
1971 | TYPE_QUAL_CONST)),
1973 preserve_temp_slots (tgtblk);
1976 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1977 into a new pseudo which is a full word. */
1979 if (GET_MODE (srcreg) != BLKmode
1980 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1981 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1983 /* If the structure doesn't take up a whole number of words, see whether
1984 SRCREG is padded on the left or on the right. If it's on the left,
1985 set PADDING_CORRECTION to the number of bits to skip.
1987 In most ABIs, the structure will be returned at the least end of
1988 the register, which translates to right padding on little-endian
1989 targets and left padding on big-endian targets. The opposite
1990 holds if the structure is returned at the most significant
1991 end of the register. */
1992 if (bytes % UNITS_PER_WORD != 0
1993 && (targetm.calls.return_in_msb (type)
1995 : BYTES_BIG_ENDIAN))
1997 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1999 /* Copy the structure BITSIZE bites at a time.
2001 We could probably emit more efficient code for machines which do not use
2002 strict alignment, but it doesn't seem worth the effort at the current
2004 for (bitpos = 0, xbitpos = padding_correction;
2005 bitpos < bytes * BITS_PER_UNIT;
2006 bitpos += bitsize, xbitpos += bitsize)
2008 /* We need a new source operand each time xbitpos is on a
2009 word boundary and when xbitpos == padding_correction
2010 (the first time through). */
2011 if (xbitpos % BITS_PER_WORD == 0
2012 || xbitpos == padding_correction)
2013 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2016 /* We need a new destination operand each time bitpos is on
2018 if (bitpos % BITS_PER_WORD == 0)
2019 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2021 /* Use xbitpos for the source extraction (right justified) and
2022 xbitpos for the destination store (left justified). */
2023 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2024 extract_bit_field (src, bitsize,
2025 xbitpos % BITS_PER_WORD, 1,
2026 NULL_RTX, word_mode, word_mode));
2032 /* Add a USE expression for REG to the (possibly empty) list pointed
2033 to by CALL_FUSAGE. REG must denote a hard register. */
2036 use_reg (rtx *call_fusage, rtx reg)
2038 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2041 = gen_rtx_EXPR_LIST (VOIDmode,
2042 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2045 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2046 starting at REGNO. All of these registers must be hard registers. */
2049 use_regs (rtx *call_fusage, int regno, int nregs)
2053 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2055 for (i = 0; i < nregs; i++)
2056 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2059 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2060 PARALLEL REGS. This is for calls that pass values in multiple
2061 non-contiguous locations. The Irix 6 ABI has examples of this. */
2064 use_group_regs (rtx *call_fusage, rtx regs)
2068 for (i = 0; i < XVECLEN (regs, 0); i++)
2070 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2072 /* A NULL entry means the parameter goes both on the stack and in
2073 registers. This can also be a MEM for targets that pass values
2074 partially on the stack and partially in registers. */
2075 if (reg != 0 && REG_P (reg))
2076 use_reg (call_fusage, reg);
2081 /* Determine whether the LEN bytes generated by CONSTFUN can be
2082 stored to memory using several move instructions. CONSTFUNDATA is
2083 a pointer which will be passed as argument in every CONSTFUN call.
2084 ALIGN is maximum alignment we can assume. Return nonzero if a
2085 call to store_by_pieces should succeed. */
2088 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2089 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2090 void *constfundata, unsigned int align)
2092 unsigned HOST_WIDE_INT l;
2093 unsigned int max_size;
2094 HOST_WIDE_INT offset = 0;
2095 enum machine_mode mode, tmode;
2096 enum insn_code icode;
2103 if (! STORE_BY_PIECES_P (len, align))
2106 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2107 if (align >= GET_MODE_ALIGNMENT (tmode))
2108 align = GET_MODE_ALIGNMENT (tmode);
2111 enum machine_mode xmode;
2113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2115 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2116 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2117 || SLOW_UNALIGNED_ACCESS (tmode, align))
2120 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2123 /* We would first store what we can in the largest integer mode, then go to
2124 successively smaller modes. */
2127 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2132 max_size = STORE_MAX_PIECES + 1;
2133 while (max_size > 1)
2135 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2136 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2137 if (GET_MODE_SIZE (tmode) < max_size)
2140 if (mode == VOIDmode)
2143 icode = mov_optab->handlers[(int) mode].insn_code;
2144 if (icode != CODE_FOR_nothing
2145 && align >= GET_MODE_ALIGNMENT (mode))
2147 unsigned int size = GET_MODE_SIZE (mode);
2154 cst = (*constfun) (constfundata, offset, mode);
2155 if (!LEGITIMATE_CONSTANT_P (cst))
2165 max_size = GET_MODE_SIZE (mode);
2168 /* The code above should have handled everything. */
2175 /* Generate several move instructions to store LEN bytes generated by
2176 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2177 pointer which will be passed as argument in every CONSTFUN call.
2178 ALIGN is maximum alignment we can assume.
2179 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2180 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2184 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2185 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2186 void *constfundata, unsigned int align, int endp)
2188 struct store_by_pieces data;
2192 gcc_assert (endp != 2);
2196 gcc_assert (STORE_BY_PIECES_P (len, align));
2197 data.constfun = constfun;
2198 data.constfundata = constfundata;
2201 store_by_pieces_1 (&data, align);
2206 gcc_assert (!data.reverse);
2211 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2212 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2214 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2217 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2224 to1 = adjust_address (data.to, QImode, data.offset);
2232 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2233 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2236 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2238 struct store_by_pieces data;
2243 data.constfun = clear_by_pieces_1;
2244 data.constfundata = NULL;
2247 store_by_pieces_1 (&data, align);
2250 /* Callback routine for clear_by_pieces.
2251 Return const0_rtx unconditionally. */
2254 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2255 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2256 enum machine_mode mode ATTRIBUTE_UNUSED)
2261 /* Subroutine of clear_by_pieces and store_by_pieces.
2262 Generate several move instructions to store LEN bytes of block TO. (A MEM
2263 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2266 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2267 unsigned int align ATTRIBUTE_UNUSED)
2269 rtx to_addr = XEXP (data->to, 0);
2270 unsigned int max_size = STORE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2275 data->to_addr = to_addr;
2277 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2278 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280 data->explicit_inc_to = 0;
2282 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 data->offset = data->len;
2286 /* If storing requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2289 if (!data->autinc_to
2290 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2300 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2301 data->autinc_to = 1;
2302 data->explicit_inc_to = -1;
2305 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2306 && ! data->autinc_to)
2308 data->to_addr = copy_addr_to_reg (to_addr);
2309 data->autinc_to = 1;
2310 data->explicit_inc_to = 1;
2313 if ( !data->autinc_to && CONSTANT_P (to_addr))
2314 data->to_addr = copy_addr_to_reg (to_addr);
2317 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2318 if (align >= GET_MODE_ALIGNMENT (tmode))
2319 align = GET_MODE_ALIGNMENT (tmode);
2322 enum machine_mode xmode;
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 /* First store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2337 while (max_size > 1)
2339 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341 if (GET_MODE_SIZE (tmode) < max_size)
2344 if (mode == VOIDmode)
2347 icode = mov_optab->handlers[(int) mode].insn_code;
2348 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2349 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2351 max_size = GET_MODE_SIZE (mode);
2354 /* The code above should have handled everything. */
2355 gcc_assert (!data->len);
2358 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2359 with move instructions for mode MODE. GENFUN is the gen_... function
2360 to make a move insn for that mode. DATA has all the other info. */
2363 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2364 struct store_by_pieces *data)
2366 unsigned int size = GET_MODE_SIZE (mode);
2369 while (data->len >= size)
2372 data->offset -= size;
2374 if (data->autinc_to)
2375 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2378 to1 = adjust_address (data->to, mode, data->offset);
2380 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2381 emit_insn (gen_add2_insn (data->to_addr,
2382 GEN_INT (-(HOST_WIDE_INT) size)));
2384 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2385 emit_insn ((*genfun) (to1, cst));
2387 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2388 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390 if (! data->reverse)
2391 data->offset += size;
2397 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2398 its length in bytes. */
2401 clear_storage (rtx object, rtx size)
2404 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2405 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2415 if (size == const0_rtx)
2417 else if (GET_CODE (size) == CONST_INT
2418 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2419 clear_by_pieces (object, INTVAL (size), align);
2420 else if (clear_storage_via_clrmem (object, size, align))
2423 retval = clear_storage_via_libcall (object, size);
2429 /* A subroutine of clear_storage. Expand a clrmem pattern;
2430 return true if successful. */
2433 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2435 /* Try the most limited insn first, because there's no point
2436 including more than one in the machine description unless
2437 the more limited one has some advantage. */
2439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2440 enum machine_mode mode;
2442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2443 mode = GET_MODE_WIDER_MODE (mode))
2445 enum insn_code code = clrmem_optab[(int) mode];
2446 insn_operand_predicate_fn pred;
2448 if (code != CODE_FOR_nothing
2449 /* We don't need MODE to be narrower than
2450 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2451 the mode mask, as it is returned by the macro, it will
2452 definitely be less than the actual mode mask. */
2453 && ((GET_CODE (size) == CONST_INT
2454 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2455 <= (GET_MODE_MASK (mode) >> 1)))
2456 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2457 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2458 || (*pred) (object, BLKmode))
2459 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2460 || (*pred) (opalign, VOIDmode)))
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 pred = insn_data[(int) code].operand[1].predicate;
2468 if (pred != 0 && ! (*pred) (op1, mode))
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2485 /* A subroutine of clear_storage. Expand a call to memset.
2486 Return the return value of memset, 0 otherwise. */
2489 clear_storage_via_libcall (rtx object, rtx size)
2491 tree call_expr, arg_list, fn, object_tree, size_tree;
2492 enum machine_mode size_mode;
2495 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2496 place those into new pseudos into a VAR_DECL and use them later. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 size_mode = TYPE_MODE (sizetype);
2501 size = convert_to_mode (size_mode, size, 1);
2502 size = copy_to_mode_reg (size_mode, size);
2504 /* It is incorrect to use the libcall calling conventions to call
2505 memset in this context. This could be a user call to memset and
2506 the user may wish to examine the return value from memset. For
2507 targets where libcalls and normal calls have different conventions
2508 for returning pointers, we could end up generating incorrect code. */
2510 object_tree = make_tree (ptr_type_node, object);
2511 size_tree = make_tree (sizetype, size);
2513 fn = clear_storage_libcall_fn (true);
2514 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2515 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2516 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2521 call_expr, arg_list, NULL_TREE);
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 /* A subroutine of clear_storage_via_libcall. Create the tree node
2529 for the function we use for block clears. The first time FOR_CALL
2530 is true, we call assemble_external. */
2532 static GTY(()) tree block_clear_fn;
2535 init_block_clear_fn (const char *asmspec)
2537 if (!block_clear_fn)
2541 fn = get_identifier ("memset");
2542 args = build_function_type_list (ptr_type_node, ptr_type_node,
2543 integer_type_node, sizetype,
2546 fn = build_decl (FUNCTION_DECL, fn, args);
2547 DECL_EXTERNAL (fn) = 1;
2548 TREE_PUBLIC (fn) = 1;
2549 DECL_ARTIFICIAL (fn) = 1;
2550 TREE_NOTHROW (fn) = 1;
2552 block_clear_fn = fn;
2556 set_user_assembler_name (block_clear_fn, asmspec);
2560 clear_storage_libcall_fn (int for_call)
2562 static bool emitted_extern;
2564 if (!block_clear_fn)
2565 init_block_clear_fn (NULL);
2567 if (for_call && !emitted_extern)
2569 emitted_extern = true;
2570 make_decl_rtl (block_clear_fn);
2571 assemble_external (block_clear_fn);
2574 return block_clear_fn;
2577 /* Write to one of the components of the complex value CPLX. Write VAL to
2578 the real part if IMAG_P is false, and the imaginary part if its true. */
2581 write_complex_part (rtx cplx, rtx val, bool imag_p)
2583 if (GET_CODE (cplx) == CONCAT)
2584 emit_move_insn (XEXP (cplx, imag_p), val);
2587 enum machine_mode cmode = GET_MODE (cplx);
2588 enum machine_mode imode = GET_MODE_INNER (cmode);
2589 unsigned ibitsize = GET_MODE_BITSIZE (imode);
2591 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2595 /* Extract one of the components of the complex value CPLX. Extract the
2596 real part if IMAG_P is false, and the imaginary part if it's true. */
2599 read_complex_part (rtx cplx, bool imag_p)
2601 enum machine_mode cmode, imode;
2604 if (GET_CODE (cplx) == CONCAT)
2605 return XEXP (cplx, imag_p);
2607 cmode = GET_MODE (cplx);
2608 imode = GET_MODE_INNER (cmode);
2609 ibitsize = GET_MODE_BITSIZE (imode);
2611 /* Special case reads from complex constants that got spilled to memory. */
2612 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2614 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2615 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2617 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2618 if (CONSTANT_CLASS_P (part))
2619 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2623 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2624 true, NULL_RTX, imode, imode);
2627 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2628 ALT_MODE instead of the operand's natural mode, MODE. CODE is the insn
2629 code for the move in ALT_MODE, and is known to be valid. Returns the
2630 instruction emitted. */
2633 emit_move_via_alt_mode (enum machine_mode alt_mode, enum machine_mode mode,
2634 enum insn_code code, rtx x, rtx y)
2636 /* Get X and Y in ALT_MODE. We can't use gen_lowpart here because it
2637 may call change_address which is not appropriate if we were
2638 called when a reload was in progress. We don't have to worry
2639 about changing the address since the size in bytes is supposed to
2640 be the same. Copy the MEM to change the mode and move any
2641 substitutions from the old MEM to the new one. */
2643 if (reload_in_progress)
2647 x = gen_lowpart_common (alt_mode, x1);
2648 if (x == 0 && MEM_P (x1))
2650 x = adjust_address_nv (x1, alt_mode, 0);
2651 copy_replacements (x1, x);
2654 y = gen_lowpart_common (alt_mode, y1);
2655 if (y == 0 && MEM_P (y1))
2657 y = adjust_address_nv (y1, alt_mode, 0);
2658 copy_replacements (y1, y);
2663 x = simplify_gen_subreg (alt_mode, x, mode, 0);
2664 y = simplify_gen_subreg (alt_mode, y, mode, 0);
2667 return emit_insn (GEN_FCN (code) (x, y));
2670 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2671 an integer mode of the same size as MODE. Returns the instruction
2672 emitted, or NULL if such a move could not be generated. */
2675 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y)
2677 enum machine_mode imode;
2678 enum insn_code code;
2680 /* There must exist a mode of the exact size we require. */
2681 imode = int_mode_for_mode (mode);
2682 if (imode == BLKmode)
2685 /* The target must support moves in this mode. */
2686 code = mov_optab->handlers[imode].insn_code;
2687 if (code == CODE_FOR_nothing)
2690 return emit_move_via_alt_mode (imode, mode, code, x, y);
2693 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
2694 Return an equivalent MEM that does not use an auto-increment. */
2697 emit_move_resolve_push (enum machine_mode mode, rtx x)
2699 enum rtx_code code = GET_CODE (XEXP (x, 0));
2700 HOST_WIDE_INT adjust;
2703 adjust = GET_MODE_SIZE (mode);
2704 #ifdef PUSH_ROUNDING
2705 adjust = PUSH_ROUNDING (adjust);
2707 if (code == PRE_DEC || code == POST_DEC)
2710 /* Do not use anti_adjust_stack, since we don't want to update
2711 stack_pointer_delta. */
2712 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
2713 GEN_INT (adjust), stack_pointer_rtx,
2714 0, OPTAB_LIB_WIDEN);
2715 if (temp != stack_pointer_rtx)
2716 emit_move_insn (stack_pointer_rtx, temp);
2722 temp = stack_pointer_rtx;
2725 temp = plus_constant (stack_pointer_rtx, -GET_MODE_SIZE (mode));
2728 temp = plus_constant (stack_pointer_rtx, GET_MODE_SIZE (mode));
2734 return replace_equiv_address (x, temp);
2737 /* A subroutine of emit_move_complex. Generate a move from Y into X.
2738 X is known to satisfy push_operand, and MODE is known to be complex.
2739 Returns the last instruction emitted. */
2742 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
2744 enum machine_mode submode = GET_MODE_INNER (mode);
2747 #ifdef PUSH_ROUNDING
2748 unsigned int submodesize = GET_MODE_SIZE (submode);
2750 /* In case we output to the stack, but the size is smaller than the
2751 machine can push exactly, we need to use move instructions. */
2752 if (PUSH_ROUNDING (submodesize) != submodesize)
2754 x = emit_move_resolve_push (mode, x);
2755 return emit_move_insn (x, y);
2759 /* Note that the real part always precedes the imag part in memory
2760 regardless of machine's endianness. */
2761 switch (GET_CODE (XEXP (x, 0)))
2775 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2776 read_complex_part (y, imag_first));
2777 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2778 read_complex_part (y, !imag_first));
2781 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2782 MODE is known to be complex. Returns the last instruction emitted. */
2785 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
2789 /* Need to take special care for pushes, to maintain proper ordering
2790 of the data, and possibly extra padding. */
2791 if (push_operand (x, mode))
2792 return emit_move_complex_push (mode, x, y);
2794 /* For memory to memory moves, optimial behaviour can be had with the
2795 existing block move logic. */
2796 if (MEM_P (x) && MEM_P (y))
2798 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
2799 BLOCK_OP_NO_LIBCALL);
2800 return get_last_insn ();
2803 /* See if we can coerce the target into moving both values at once. */
2805 /* Not possible if the values are inherently not adjacent. */
2806 if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
2808 /* Is possible if both are registers (or subregs of registers). */
2809 else if (register_operand (x, mode) && register_operand (y, mode))
2811 /* If one of the operands is a memory, and alignment constraints
2812 are friendly enough, we may be able to do combined memory operations.
2813 We do not attempt this if Y is a constant because that combination is
2814 usually better with the by-parts thing below. */
2815 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
2816 && (!STRICT_ALIGNMENT
2817 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
2824 rtx ret = emit_move_via_integer (mode, x, y);
2829 /* Show the output dies here. This is necessary for SUBREGs
2830 of pseudos since we cannot track their lifetimes correctly;
2831 hard regs shouldn't appear here except as return values. */
2832 if (!reload_completed && !reload_in_progress
2833 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
2834 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2836 write_complex_part (x, read_complex_part (y, false), false);
2837 write_complex_part (x, read_complex_part (y, true), true);
2838 return get_last_insn ();
2841 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2842 MODE is known to be MODE_CC. Returns the last instruction emitted. */
2845 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
2849 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
2852 enum insn_code code = mov_optab->handlers[CCmode].insn_code;
2853 if (code != CODE_FOR_nothing)
2854 return emit_move_via_alt_mode (CCmode, mode, code, x, y);
2857 /* Otherwise, find the MODE_INT mode of the same width. */
2858 ret = emit_move_via_integer (mode, x, y);
2859 gcc_assert (ret != NULL);
2863 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
2864 MODE is any multi-word or full-word mode that lacks a move_insn
2865 pattern. Note that you will get better code if you define such
2866 patterns, even if they must turn into multiple assembler instructions. */
2869 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
2876 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2878 /* If X is a push on the stack, do the push now and replace
2879 X with a reference to the stack pointer. */
2880 if (push_operand (x, mode))
2881 x = emit_move_resolve_push (mode, x);
2883 /* If we are in reload, see if either operand is a MEM whose address
2884 is scheduled for replacement. */
2885 if (reload_in_progress && MEM_P (x)
2886 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2887 x = replace_equiv_address_nv (x, inner);
2888 if (reload_in_progress && MEM_P (y)
2889 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2890 y = replace_equiv_address_nv (y, inner);
2894 need_clobber = false;
2896 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2899 rtx xpart = operand_subword (x, i, 1, mode);
2900 rtx ypart = operand_subword (y, i, 1, mode);
2902 /* If we can't get a part of Y, put Y into memory if it is a
2903 constant. Otherwise, force it into a register. If we still
2904 can't get a part of Y, abort. */
2905 if (ypart == 0 && CONSTANT_P (y))
2907 y = force_const_mem (mode, y);
2908 ypart = operand_subword (y, i, 1, mode);
2910 else if (ypart == 0)
2911 ypart = operand_subword_force (y, i, mode);
2913 gcc_assert (xpart && ypart);
2915 need_clobber |= (GET_CODE (xpart) == SUBREG);
2917 last_insn = emit_move_insn (xpart, ypart);
2923 /* Show the output dies here. This is necessary for SUBREGs
2924 of pseudos since we cannot track their lifetimes correctly;
2925 hard regs shouldn't appear here except as return values.
2926 We never want to emit such a clobber after reload. */
2928 && ! (reload_in_progress || reload_completed)
2929 && need_clobber != 0)
2930 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2937 /* Low level part of emit_move_insn.
2938 Called just like emit_move_insn, but assumes X and Y
2939 are basically valid. */
2942 emit_move_insn_1 (rtx x, rtx y)
2944 enum machine_mode mode = GET_MODE (x);
2945 enum insn_code code;
2947 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2949 code = mov_optab->handlers[mode].insn_code;
2950 if (code != CODE_FOR_nothing)
2951 return emit_insn (GEN_FCN (code) (x, y));
2953 /* Expand complex moves by moving real part and imag part. */
2954 if (COMPLEX_MODE_P (mode))
2955 return emit_move_complex (mode, x, y);
2957 if (GET_MODE_CLASS (mode) == MODE_CC)
2958 return emit_move_ccmode (mode, x, y);
2960 /* Try using a move pattern for the corresponding integer mode. This is
2961 only safe when simplify_subreg can convert MODE constants into integer
2962 constants. At present, it can only do this reliably if the value
2963 fits within a HOST_WIDE_INT. */
2964 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2966 rtx ret = emit_move_via_integer (mode, x, y);
2971 return emit_move_multi_word (mode, x, y);
2974 /* Generate code to copy Y into X.
2975 Both Y and X must have the same mode, except that
2976 Y can be a constant with VOIDmode.
2977 This mode cannot be BLKmode; use emit_block_move for that.
2979 Return the last instruction emitted. */
2982 emit_move_insn (rtx x, rtx y)
2984 enum machine_mode mode = GET_MODE (x);
2985 rtx y_cst = NULL_RTX;
2988 gcc_assert (mode != BLKmode
2989 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2994 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2995 && (last_insn = compress_float_constant (x, y)))
3000 if (!LEGITIMATE_CONSTANT_P (y))
3002 y = force_const_mem (mode, y);
3004 /* If the target's cannot_force_const_mem prevented the spill,
3005 assume that the target's move expanders will also take care
3006 of the non-legitimate constant. */
3012 /* If X or Y are memory references, verify that their addresses are valid
3015 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3016 && ! push_operand (x, GET_MODE (x)))
3018 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3019 x = validize_mem (x);
3022 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3024 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3025 y = validize_mem (y);
3027 gcc_assert (mode != BLKmode);
3029 last_insn = emit_move_insn_1 (x, y);
3031 if (y_cst && REG_P (x)
3032 && (set = single_set (last_insn)) != NULL_RTX
3033 && SET_DEST (set) == x
3034 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3035 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3040 /* If Y is representable exactly in a narrower mode, and the target can
3041 perform the extension directly from constant or memory, then emit the
3042 move as an extension. */
3045 compress_float_constant (rtx x, rtx y)
3047 enum machine_mode dstmode = GET_MODE (x);
3048 enum machine_mode orig_srcmode = GET_MODE (y);
3049 enum machine_mode srcmode;
3052 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3054 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3055 srcmode != orig_srcmode;
3056 srcmode = GET_MODE_WIDER_MODE (srcmode))
3059 rtx trunc_y, last_insn;
3061 /* Skip if the target can't extend this way. */
3062 ic = can_extend_p (dstmode, srcmode, 0);
3063 if (ic == CODE_FOR_nothing)
3066 /* Skip if the narrowed value isn't exact. */
3067 if (! exact_real_truncate (srcmode, &r))
3070 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3072 if (LEGITIMATE_CONSTANT_P (trunc_y))
3074 /* Skip if the target needs extra instructions to perform
3076 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3079 else if (float_extend_from_mem[dstmode][srcmode])
3080 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3084 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3085 last_insn = get_last_insn ();
3088 set_unique_reg_note (last_insn, REG_EQUAL, y);
3096 /* Pushing data onto the stack. */
3098 /* Push a block of length SIZE (perhaps variable)
3099 and return an rtx to address the beginning of the block.
3100 The value may be virtual_outgoing_args_rtx.
3102 EXTRA is the number of bytes of padding to push in addition to SIZE.
3103 BELOW nonzero means this padding comes at low addresses;
3104 otherwise, the padding comes at high addresses. */
3107 push_block (rtx size, int extra, int below)
3111 size = convert_modes (Pmode, ptr_mode, size, 1);
3112 if (CONSTANT_P (size))
3113 anti_adjust_stack (plus_constant (size, extra));
3114 else if (REG_P (size) && extra == 0)
3115 anti_adjust_stack (size);
3118 temp = copy_to_mode_reg (Pmode, size);
3120 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3121 temp, 0, OPTAB_LIB_WIDEN);
3122 anti_adjust_stack (temp);
3125 #ifndef STACK_GROWS_DOWNWARD
3131 temp = virtual_outgoing_args_rtx;
3132 if (extra != 0 && below)
3133 temp = plus_constant (temp, extra);
3137 if (GET_CODE (size) == CONST_INT)
3138 temp = plus_constant (virtual_outgoing_args_rtx,
3139 -INTVAL (size) - (below ? 0 : extra));
3140 else if (extra != 0 && !below)
3141 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3142 negate_rtx (Pmode, plus_constant (size, extra)));
3144 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3145 negate_rtx (Pmode, size));
3148 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3151 #ifdef PUSH_ROUNDING
3153 /* Emit single push insn. */
3156 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3159 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3161 enum insn_code icode;
3162 insn_operand_predicate_fn pred;
3164 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3165 /* If there is push pattern, use it. Otherwise try old way of throwing
3166 MEM representing push operation to move expander. */
3167 icode = push_optab->handlers[(int) mode].insn_code;
3168 if (icode != CODE_FOR_nothing)
3170 if (((pred = insn_data[(int) icode].operand[0].predicate)
3171 && !((*pred) (x, mode))))
3172 x = force_reg (mode, x);
3173 emit_insn (GEN_FCN (icode) (x));
3176 if (GET_MODE_SIZE (mode) == rounded_size)
3177 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3178 /* If we are to pad downward, adjust the stack pointer first and
3179 then store X into the stack location using an offset. This is
3180 because emit_move_insn does not know how to pad; it does not have
3182 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3184 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3185 HOST_WIDE_INT offset;
3187 emit_move_insn (stack_pointer_rtx,
3188 expand_binop (Pmode,
3189 #ifdef STACK_GROWS_DOWNWARD
3195 GEN_INT (rounded_size),
3196 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3198 offset = (HOST_WIDE_INT) padding_size;
3199 #ifdef STACK_GROWS_DOWNWARD
3200 if (STACK_PUSH_CODE == POST_DEC)
3201 /* We have already decremented the stack pointer, so get the
3203 offset += (HOST_WIDE_INT) rounded_size;
3205 if (STACK_PUSH_CODE == POST_INC)
3206 /* We have already incremented the stack pointer, so get the
3208 offset -= (HOST_WIDE_INT) rounded_size;
3210 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3214 #ifdef STACK_GROWS_DOWNWARD
3215 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3216 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3217 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3219 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3220 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3221 GEN_INT (rounded_size));
3223 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3226 dest = gen_rtx_MEM (mode, dest_addr);
3230 set_mem_attributes (dest, type, 1);
3232 if (flag_optimize_sibling_calls)
3233 /* Function incoming arguments may overlap with sibling call
3234 outgoing arguments and we cannot allow reordering of reads
3235 from function arguments with stores to outgoing arguments
3236 of sibling calls. */
3237 set_mem_alias_set (dest, 0);
3239 emit_move_insn (dest, x);
3243 /* Generate code to push X onto the stack, assuming it has mode MODE and
3245 MODE is redundant except when X is a CONST_INT (since they don't
3247 SIZE is an rtx for the size of data to be copied (in bytes),
3248 needed only if X is BLKmode.
3250 ALIGN (in bits) is maximum alignment we can assume.
3252 If PARTIAL and REG are both nonzero, then copy that many of the first
3253 words of X into registers starting with REG, and push the rest of X.
3254 The amount of space pushed is decreased by PARTIAL words,
3255 rounded *down* to a multiple of PARM_BOUNDARY.
3256 REG must be a hard register in this case.
3257 If REG is zero but PARTIAL is not, take any all others actions for an
3258 argument partially in registers, but do not actually load any
3261 EXTRA is the amount in bytes of extra space to leave next to this arg.
3262 This is ignored if an argument block has already been allocated.
3264 On a machine that lacks real push insns, ARGS_ADDR is the address of
3265 the bottom of the argument block for this call. We use indexing off there
3266 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3267 argument block has not been preallocated.
3269 ARGS_SO_FAR is the size of args previously pushed for this call.
3271 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3272 for arguments passed in registers. If nonzero, it will be the number
3273 of bytes required. */
3276 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3277 unsigned int align, int partial, rtx reg, int extra,
3278 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3282 enum direction stack_direction
3283 #ifdef STACK_GROWS_DOWNWARD
3289 /* Decide where to pad the argument: `downward' for below,
3290 `upward' for above, or `none' for don't pad it.
3291 Default is below for small data on big-endian machines; else above. */
3292 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3294 /* Invert direction if stack is post-decrement.
3296 if (STACK_PUSH_CODE == POST_DEC)
3297 if (where_pad != none)
3298 where_pad = (where_pad == downward ? upward : downward);
3302 if (mode == BLKmode)
3304 /* Copy a block into the stack, entirely or partially. */
3307 int used = partial * UNITS_PER_WORD;
3311 if (reg && GET_CODE (reg) == PARALLEL)
3313 /* Use the size of the elt to compute offset. */
3314 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3315 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3316 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3319 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3325 /* USED is now the # of bytes we need not copy to the stack
3326 because registers will take care of them. */
3329 xinner = adjust_address (xinner, BLKmode, used);
3331 /* If the partial register-part of the arg counts in its stack size,
3332 skip the part of stack space corresponding to the registers.
3333 Otherwise, start copying to the beginning of the stack space,
3334 by setting SKIP to 0. */
3335 skip = (reg_parm_stack_space == 0) ? 0 : used;
3337 #ifdef PUSH_ROUNDING
3338 /* Do it with several push insns if that doesn't take lots of insns
3339 and if there is no difficulty with push insns that skip bytes
3340 on the stack for alignment purposes. */
3343 && GET_CODE (size) == CONST_INT
3345 && MEM_ALIGN (xinner) >= align
3346 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3347 /* Here we avoid the case of a structure whose weak alignment
3348 forces many pushes of a small amount of data,
3349 and such small pushes do rounding that causes trouble. */
3350 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3351 || align >= BIGGEST_ALIGNMENT
3352 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3353 == (align / BITS_PER_UNIT)))
3354 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3356 /* Push padding now if padding above and stack grows down,
3357 or if padding below and stack grows up.
3358 But if space already allocated, this has already been done. */
3359 if (extra && args_addr == 0
3360 && where_pad != none && where_pad != stack_direction)
3361 anti_adjust_stack (GEN_INT (extra));
3363 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3366 #endif /* PUSH_ROUNDING */
3370 /* Otherwise make space on the stack and copy the data
3371 to the address of that space. */
3373 /* Deduct words put into registers from the size we must copy. */
3376 if (GET_CODE (size) == CONST_INT)
3377 size = GEN_INT (INTVAL (size) - used);
3379 size = expand_binop (GET_MODE (size), sub_optab, size,
3380 GEN_INT (used), NULL_RTX, 0,
3384 /* Get the address of the stack space.
3385 In this case, we do not deal with EXTRA separately.
3386 A single stack adjust will do. */
3389 temp = push_block (size, extra, where_pad == downward);
3392 else if (GET_CODE (args_so_far) == CONST_INT)
3393 temp = memory_address (BLKmode,
3394 plus_constant (args_addr,
3395 skip + INTVAL (args_so_far)));
3397 temp = memory_address (BLKmode,
3398 plus_constant (gen_rtx_PLUS (Pmode,
3403 if (!ACCUMULATE_OUTGOING_ARGS)
3405 /* If the source is referenced relative to the stack pointer,
3406 copy it to another register to stabilize it. We do not need
3407 to do this if we know that we won't be changing sp. */
3409 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3410 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3411 temp = copy_to_reg (temp);
3414 target = gen_rtx_MEM (BLKmode, temp);
3416 /* We do *not* set_mem_attributes here, because incoming arguments
3417 may overlap with sibling call outgoing arguments and we cannot
3418 allow reordering of reads from function arguments with stores
3419 to outgoing arguments of sibling calls. We do, however, want
3420 to record the alignment of the stack slot. */
3421 /* ALIGN may well be better aligned than TYPE, e.g. due to
3422 PARM_BOUNDARY. Assume the caller isn't lying. */
3423 set_mem_align (target, align);
3425 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3428 else if (partial > 0)
3430 /* Scalar partly in registers. */
3432 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3435 /* # words of start of argument
3436 that we must make space for but need not store. */
3437 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3438 int args_offset = INTVAL (args_so_far);
3441 /* Push padding now if padding above and stack grows down,
3442 or if padding below and stack grows up.
3443 But if space already allocated, this has already been done. */
3444 if (extra && args_addr == 0
3445 && where_pad != none && where_pad != stack_direction)
3446 anti_adjust_stack (GEN_INT (extra));
3448 /* If we make space by pushing it, we might as well push
3449 the real data. Otherwise, we can leave OFFSET nonzero
3450 and leave the space uninitialized. */
3454 /* Now NOT_STACK gets the number of words that we don't need to
3455 allocate on the stack. */
3456 not_stack = partial - offset;
3458 /* If the partial register-part of the arg counts in its stack size,
3459 skip the part of stack space corresponding to the registers.
3460 Otherwise, start copying to the beginning of the stack space,
3461 by setting SKIP to 0. */
3462 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3464 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3465 x = validize_mem (force_const_mem (mode, x));
3467 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3468 SUBREGs of such registers are not allowed. */
3469 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3470 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3471 x = copy_to_reg (x);
3473 /* Loop over all the words allocated on the stack for this arg. */
3474 /* We can do it by words, because any scalar bigger than a word
3475 has a size a multiple of a word. */
3476 #ifndef PUSH_ARGS_REVERSED
3477 for (i = not_stack; i < size; i++)
3479 for (i = size - 1; i >= not_stack; i--)
3481 if (i >= not_stack + offset)
3482 emit_push_insn (operand_subword_force (x, i, mode),
3483 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3485 GEN_INT (args_offset + ((i - not_stack + skip)
3487 reg_parm_stack_space, alignment_pad);
3494 /* Push padding now if padding above and stack grows down,
3495 or if padding below and stack grows up.
3496 But if space already allocated, this has already been done. */
3497 if (extra && args_addr == 0
3498 && where_pad != none && where_pad != stack_direction)
3499 anti_adjust_stack (GEN_INT (extra));
3501 #ifdef PUSH_ROUNDING
3502 if (args_addr == 0 && PUSH_ARGS)
3503 emit_single_push_insn (mode, x, type);
3507 if (GET_CODE (args_so_far) == CONST_INT)
3509 = memory_address (mode,
3510 plus_constant (args_addr,
3511 INTVAL (args_so_far)));
3513 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3515 dest = gen_rtx_MEM (mode, addr);
3517 /* We do *not* set_mem_attributes here, because incoming arguments
3518 may overlap with sibling call outgoing arguments and we cannot
3519 allow reordering of reads from function arguments with stores
3520 to outgoing arguments of sibling calls. We do, however, want
3521 to record the alignment of the stack slot. */
3522 /* ALIGN may well be better aligned than TYPE, e.g. due to
3523 PARM_BOUNDARY. Assume the caller isn't lying. */
3524 set_mem_align (dest, align);
3526 emit_move_insn (dest, x);
3530 /* If part should go in registers, copy that part
3531 into the appropriate registers. Do this now, at the end,
3532 since mem-to-mem copies above may do function calls. */
3533 if (partial > 0 && reg != 0)
3535 /* Handle calls that pass values in multiple non-contiguous locations.
3536 The Irix 6 ABI has examples of this. */
3537 if (GET_CODE (reg) == PARALLEL)
3538 emit_group_load (reg, x, type, -1);
3540 move_block_to_reg (REGNO (reg), x, partial, mode);
3543 if (extra && args_addr == 0 && where_pad == stack_direction)
3544 anti_adjust_stack (GEN_INT (extra));
3546 if (alignment_pad && args_addr == 0)
3547 anti_adjust_stack (alignment_pad);
3550 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3554 get_subtarget (rtx x)
3558 /* Only registers can be subtargets. */
3560 /* Don't use hard regs to avoid extending their life. */
3561 || REGNO (x) < FIRST_PSEUDO_REGISTER
3565 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3566 FIELD is a bitfield. Returns true if the optimization was successful,
3567 and there's nothing else to do. */
3570 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3571 unsigned HOST_WIDE_INT bitpos,
3572 enum machine_mode mode1, rtx str_rtx,
3575 enum machine_mode str_mode = GET_MODE (str_rtx);
3576 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3581 if (mode1 != VOIDmode
3582 || bitsize >= BITS_PER_WORD
3583 || str_bitsize > BITS_PER_WORD
3584 || TREE_SIDE_EFFECTS (to)
3585 || TREE_THIS_VOLATILE (to))
3589 if (!BINARY_CLASS_P (src)
3590 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3593 op0 = TREE_OPERAND (src, 0);
3594 op1 = TREE_OPERAND (src, 1);
3597 if (!operand_equal_p (to, op0, 0))
3600 if (MEM_P (str_rtx))
3602 unsigned HOST_WIDE_INT offset1;
3604 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3605 str_mode = word_mode;
3606 str_mode = get_best_mode (bitsize, bitpos,
3607 MEM_ALIGN (str_rtx), str_mode, 0);
3608 if (str_mode == VOIDmode)
3610 str_bitsize = GET_MODE_BITSIZE (str_mode);
3613 bitpos %= str_bitsize;
3614 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3615 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3617 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3620 /* If the bit field covers the whole REG/MEM, store_field
3621 will likely generate better code. */
3622 if (bitsize >= str_bitsize)
3625 /* We can't handle fields split across multiple entities. */
3626 if (bitpos + bitsize > str_bitsize)
3629 if (BYTES_BIG_ENDIAN)
3630 bitpos = str_bitsize - bitpos - bitsize;
3632 switch (TREE_CODE (src))
3636 /* For now, just optimize the case of the topmost bitfield
3637 where we don't need to do any masking and also
3638 1 bit bitfields where xor can be used.
3639 We might win by one instruction for the other bitfields
3640 too if insv/extv instructions aren't used, so that
3641 can be added later. */
3642 if (bitpos + bitsize != str_bitsize
3643 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3646 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3647 value = convert_modes (str_mode,
3648 TYPE_MODE (TREE_TYPE (op1)), value,
3649 TYPE_UNSIGNED (TREE_TYPE (op1)));
3651 /* We may be accessing data outside the field, which means
3652 we can alias adjacent data. */
3653 if (MEM_P (str_rtx))
3655 str_rtx = shallow_copy_rtx (str_rtx);
3656 set_mem_alias_set (str_rtx, 0);
3657 set_mem_expr (str_rtx, 0);
3660 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3661 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3663 value = expand_and (str_mode, value, const1_rtx, NULL);
3666 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3667 build_int_cst (NULL_TREE, bitpos),
3669 result = expand_binop (str_mode, binop, str_rtx,
3670 value, str_rtx, 1, OPTAB_WIDEN);
3671 if (result != str_rtx)
3672 emit_move_insn (str_rtx, result);
3683 /* Expand an assignment that stores the value of FROM into TO. */
3686 expand_assignment (tree to, tree from)
3691 /* Don't crash if the lhs of the assignment was erroneous. */
3693 if (TREE_CODE (to) == ERROR_MARK)
3695 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3699 /* Assignment of a structure component needs special treatment
3700 if the structure component's rtx is not simply a MEM.
3701 Assignment of an array element at a constant index, and assignment of
3702 an array element in an unaligned packed structure field, has the same
3704 if (handled_component_p (to)
3705 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3707 enum machine_mode mode1;
3708 HOST_WIDE_INT bitsize, bitpos;
3716 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3717 &unsignedp, &volatilep);
3719 /* If we are going to use store_bit_field and extract_bit_field,
3720 make sure to_rtx will be safe for multiple use. */
3722 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3726 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3728 gcc_assert (MEM_P (to_rtx));
3730 #ifdef POINTERS_EXTEND_UNSIGNED
3731 if (GET_MODE (offset_rtx) != Pmode)
3732 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3734 if (GET_MODE (offset_rtx) != ptr_mode)
3735 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3738 /* A constant address in TO_RTX can have VOIDmode, we must not try
3739 to call force_reg for that case. Avoid that case. */
3741 && GET_MODE (to_rtx) == BLKmode
3742 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3744 && (bitpos % bitsize) == 0
3745 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3746 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3748 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3752 to_rtx = offset_address (to_rtx, offset_rtx,
3753 highest_pow2_factor_for_target (to,
3757 /* Handle expand_expr of a complex value returning a CONCAT. */
3758 if (GET_CODE (to_rtx) == CONCAT)
3760 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
3762 gcc_assert (bitpos == 0);
3763 result = store_expr (from, to_rtx, false);
3767 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3768 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3775 /* If the field is at offset zero, we could have been given the
3776 DECL_RTX of the parent struct. Don't munge it. */
3777 to_rtx = shallow_copy_rtx (to_rtx);
3779 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3781 /* Deal with volatile and readonly fields. The former is only
3782 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3784 MEM_VOLATILE_P (to_rtx) = 1;
3786 if (!can_address_p (to))
3787 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3790 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3794 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3795 TREE_TYPE (tem), get_alias_set (to));
3799 preserve_temp_slots (result);
3805 /* If the rhs is a function call and its value is not an aggregate,
3806 call the function before we start to compute the lhs.
3807 This is needed for correct code for cases such as
3808 val = setjmp (buf) on machines where reference to val
3809 requires loading up part of an address in a separate insn.
3811 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3812 since it might be a promoted variable where the zero- or sign- extension
3813 needs to be done. Handling this in the normal way is safe because no
3814 computation is done before the call. */
3815 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3816 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3817 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3818 && REG_P (DECL_RTL (to))))
3823 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3825 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3827 /* Handle calls that return values in multiple non-contiguous locations.
3828 The Irix 6 ABI has examples of this. */
3829 if (GET_CODE (to_rtx) == PARALLEL)
3830 emit_group_load (to_rtx, value, TREE_TYPE (from),
3831 int_size_in_bytes (TREE_TYPE (from)));
3832 else if (GET_MODE (to_rtx) == BLKmode)
3833 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3836 if (POINTER_TYPE_P (TREE_TYPE (to)))
3837 value = convert_memory_address (GET_MODE (to_rtx), value);
3838 emit_move_insn (to_rtx, value);
3840 preserve_temp_slots (to_rtx);
3846 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3847 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3850 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3852 /* Don't move directly into a return register. */
3853 if (TREE_CODE (to) == RESULT_DECL
3854 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3859 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3861 if (GET_CODE (to_rtx) == PARALLEL)
3862 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3863 int_size_in_bytes (TREE_TYPE (from)));
3865 emit_move_insn (to_rtx, temp);
3867 preserve_temp_slots (to_rtx);
3873 /* In case we are returning the contents of an object which overlaps
3874 the place the value is being stored, use a safe function when copying
3875 a value through a pointer into a structure value return block. */
3876 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3877 && current_function_returns_struct
3878 && !current_function_returns_pcc_struct)
3883 size = expr_size (from);
3884 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3886 emit_library_call (memmove_libfunc, LCT_NORMAL,
3887 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3888 XEXP (from_rtx, 0), Pmode,
3889 convert_to_mode (TYPE_MODE (sizetype),
3890 size, TYPE_UNSIGNED (sizetype)),
3891 TYPE_MODE (sizetype));
3893 preserve_temp_slots (to_rtx);
3899 /* Compute FROM and store the value in the rtx we got. */
3902 result = store_expr (from, to_rtx, 0);
3903 preserve_temp_slots (result);
3909 /* Generate code for computing expression EXP,
3910 and storing the value into TARGET.
3912 If the mode is BLKmode then we may return TARGET itself.
3913 It turns out that in BLKmode it doesn't cause a problem.
3914 because C has no operators that could combine two different
3915 assignments into the same BLKmode object with different values
3916 with no sequence point. Will other languages need this to
3919 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3920 stack, and block moves may need to be treated specially. */
3923 store_expr (tree exp, rtx target, int call_param_p)
3926 rtx alt_rtl = NULL_RTX;
3927 int dont_return_target = 0;
3929 if (VOID_TYPE_P (TREE_TYPE (exp)))
3931 /* C++ can generate ?: expressions with a throw expression in one
3932 branch and an rvalue in the other. Here, we resolve attempts to
3933 store the throw expression's nonexistent result. */
3934 gcc_assert (!call_param_p);
3935 expand_expr (exp, const0_rtx, VOIDmode, 0);
3938 if (TREE_CODE (exp) == COMPOUND_EXPR)
3940 /* Perform first part of compound expression, then assign from second
3942 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3943 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3944 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3946 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3948 /* For conditional expression, get safe form of the target. Then
3949 test the condition, doing the appropriate assignment on either
3950 side. This avoids the creation of unnecessary temporaries.
3951 For non-BLKmode, it is more efficient not to do this. */
3953 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3955 do_pending_stack_adjust ();
3957 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3958 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3959 emit_jump_insn (gen_jump (lab2));
3962 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3968 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3969 /* If this is a scalar in a register that is stored in a wider mode
3970 than the declared mode, compute the result into its declared mode
3971 and then convert to the wider mode. Our value is the computed
3974 rtx inner_target = 0;
3976 /* We can do the conversion inside EXP, which will often result
3977 in some optimizations. Do the conversion in two steps: first
3978 change the signedness, if needed, then the extend. But don't
3979 do this if the type of EXP is a subtype of something else
3980 since then the conversion might involve more than just
3981 converting modes. */
3982 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3983 && TREE_TYPE (TREE_TYPE (exp)) == 0
3984 && (!lang_hooks.reduce_bit_field_operations
3985 || (GET_MODE_PRECISION (GET_MODE (target))
3986 == TYPE_PRECISION (TREE_TYPE (exp)))))
3988 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3989 != SUBREG_PROMOTED_UNSIGNED_P (target))
3991 (lang_hooks.types.signed_or_unsigned_type
3992 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3994 exp = convert (lang_hooks.types.type_for_mode
3995 (GET_MODE (SUBREG_REG (target)),
3996 SUBREG_PROMOTED_UNSIGNED_P (target)),
3999 inner_target = SUBREG_REG (target);
4002 temp = expand_expr (exp, inner_target, VOIDmode,
4003 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4005 /* If TEMP is a VOIDmode constant, use convert_modes to make
4006 sure that we properly convert it. */
4007 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4009 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4010 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4011 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4012 GET_MODE (target), temp,
4013 SUBREG_PROMOTED_UNSIGNED_P (target));
4016 convert_move (SUBREG_REG (target), temp,
4017 SUBREG_PROMOTED_UNSIGNED_P (target));
4023 temp = expand_expr_real (exp, target, GET_MODE (target),
4025 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4027 /* Return TARGET if it's a specified hardware register.
4028 If TARGET is a volatile mem ref, either return TARGET
4029 or return a reg copied *from* TARGET; ANSI requires this.
4031 Otherwise, if TEMP is not TARGET, return TEMP
4032 if it is constant (for efficiency),
4033 or if we really want the correct value. */
4034 if (!(target && REG_P (target)
4035 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4036 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4037 && ! rtx_equal_p (temp, target)
4038 && CONSTANT_P (temp))
4039 dont_return_target = 1;
4042 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4043 the same as that of TARGET, adjust the constant. This is needed, for
4044 example, in case it is a CONST_DOUBLE and we want only a word-sized
4046 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4047 && TREE_CODE (exp) != ERROR_MARK
4048 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4049 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4050 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4052 /* If value was not generated in the target, store it there.
4053 Convert the value to TARGET's type first if necessary and emit the
4054 pending incrementations that have been queued when expanding EXP.
4055 Note that we cannot emit the whole queue blindly because this will
4056 effectively disable the POST_INC optimization later.
4058 If TEMP and TARGET compare equal according to rtx_equal_p, but
4059 one or both of them are volatile memory refs, we have to distinguish
4061 - expand_expr has used TARGET. In this case, we must not generate
4062 another copy. This can be detected by TARGET being equal according
4064 - expand_expr has not used TARGET - that means that the source just
4065 happens to have the same RTX form. Since temp will have been created
4066 by expand_expr, it will compare unequal according to == .
4067 We must generate a copy in this case, to reach the correct number
4068 of volatile memory references. */
4070 if ((! rtx_equal_p (temp, target)
4071 || (temp != target && (side_effects_p (temp)
4072 || side_effects_p (target))))
4073 && TREE_CODE (exp) != ERROR_MARK
4074 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4075 but TARGET is not valid memory reference, TEMP will differ
4076 from TARGET although it is really the same location. */
4077 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4078 /* If there's nothing to copy, don't bother. Don't call expr_size
4079 unless necessary, because some front-ends (C++) expr_size-hook
4080 aborts on objects that are not supposed to be bit-copied or
4082 && expr_size (exp) != const0_rtx)
4084 if (GET_MODE (temp) != GET_MODE (target)
4085 && GET_MODE (temp) != VOIDmode)
4087 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4088 if (dont_return_target)
4090 /* In this case, we will return TEMP,
4091 so make sure it has the proper mode.
4092 But don't forget to store the value into TARGET. */
4093 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4094 emit_move_insn (target, temp);
4097 convert_move (target, temp, unsignedp);
4100 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4102 /* Handle copying a string constant into an array. The string
4103 constant may be shorter than the array. So copy just the string's
4104 actual length, and clear the rest. First get the size of the data
4105 type of the string, which is actually the size of the target. */
4106 rtx size = expr_size (exp);
4108 if (GET_CODE (size) == CONST_INT
4109 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4110 emit_block_move (target, temp, size,
4112 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4115 /* Compute the size of the data to copy from the string. */
4117 = size_binop (MIN_EXPR,
4118 make_tree (sizetype, size),
4119 size_int (TREE_STRING_LENGTH (exp)));
4121 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4123 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4126 /* Copy that much. */
4127 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4128 TYPE_UNSIGNED (sizetype));
4129 emit_block_move (target, temp, copy_size_rtx,
4131 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4133 /* Figure out how much is left in TARGET that we have to clear.
4134 Do all calculations in ptr_mode. */
4135 if (GET_CODE (copy_size_rtx) == CONST_INT)
4137 size = plus_constant (size, -INTVAL (copy_size_rtx));
4138 target = adjust_address (target, BLKmode,
4139 INTVAL (copy_size_rtx));
4143 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4144 copy_size_rtx, NULL_RTX, 0,
4147 #ifdef POINTERS_EXTEND_UNSIGNED
4148 if (GET_MODE (copy_size_rtx) != Pmode)
4149 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4150 TYPE_UNSIGNED (sizetype));
4153 target = offset_address (target, copy_size_rtx,
4154 highest_pow2_factor (copy_size));
4155 label = gen_label_rtx ();
4156 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4157 GET_MODE (size), 0, label);
4160 if (size != const0_rtx)
4161 clear_storage (target, size);
4167 /* Handle calls that return values in multiple non-contiguous locations.
4168 The Irix 6 ABI has examples of this. */
4169 else if (GET_CODE (target) == PARALLEL)
4170 emit_group_load (target, temp, TREE_TYPE (exp),
4171 int_size_in_bytes (TREE_TYPE (exp)));
4172 else if (GET_MODE (temp) == BLKmode)
4173 emit_block_move (target, temp, expr_size (exp),
4175 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4178 temp = force_operand (temp, target);
4180 emit_move_insn (target, temp);
4187 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4188 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4189 are set to non-constant values and place it in *P_NC_ELTS. */
4192 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4193 HOST_WIDE_INT *p_nc_elts)
4195 HOST_WIDE_INT nz_elts, nc_elts;
4201 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4203 tree value = TREE_VALUE (list);
4204 tree purpose = TREE_PURPOSE (list);
4208 if (TREE_CODE (purpose) == RANGE_EXPR)
4210 tree lo_index = TREE_OPERAND (purpose, 0);
4211 tree hi_index = TREE_OPERAND (purpose, 1);
4213 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4214 mult = (tree_low_cst (hi_index, 1)
4215 - tree_low_cst (lo_index, 1) + 1);
4218 switch (TREE_CODE (value))
4222 HOST_WIDE_INT nz = 0, nc = 0;
4223 categorize_ctor_elements_1 (value, &nz, &nc);
4224 nz_elts += mult * nz;
4225 nc_elts += mult * nc;
4231 if (!initializer_zerop (value))
4236 nz_elts += mult * TREE_STRING_LENGTH (value);
4240 if (!initializer_zerop (TREE_REALPART (value)))
4242 if (!initializer_zerop (TREE_IMAGPART (value)))
4249 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4250 if (!initializer_zerop (TREE_VALUE (v)))
4257 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4263 *p_nz_elts += nz_elts;
4264 *p_nc_elts += nc_elts;
4268 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4269 HOST_WIDE_INT *p_nc_elts)
4273 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4276 /* Count the number of scalars in TYPE. Return -1 on overflow or
4280 count_type_elements (tree type)
4282 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4283 switch (TREE_CODE (type))
4287 tree telts = array_type_nelts (type);
4288 if (telts && host_integerp (telts, 1))
4290 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4291 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4294 else if (max / n > m)
4302 HOST_WIDE_INT n = 0, t;
4305 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4306 if (TREE_CODE (f) == FIELD_DECL)
4308 t = count_type_elements (TREE_TYPE (f));
4318 case QUAL_UNION_TYPE:
4320 /* Ho hum. How in the world do we guess here? Clearly it isn't
4321 right to count the fields. Guess based on the number of words. */
4322 HOST_WIDE_INT n = int_size_in_bytes (type);
4325 return n / UNITS_PER_WORD;
4332 return TYPE_VECTOR_SUBPARTS (type);
4341 case REFERENCE_TYPE:
4355 /* Return 1 if EXP contains mostly (3/4) zeros. */
4358 mostly_zeros_p (tree exp)
4360 if (TREE_CODE (exp) == CONSTRUCTOR)
4363 HOST_WIDE_INT nz_elts, nc_elts, elts;
4365 /* If there are no ranges of true bits, it is all zero. */
4366 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4367 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4369 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4370 elts = count_type_elements (TREE_TYPE (exp));
4372 return nz_elts < elts / 4;
4375 return initializer_zerop (exp);
4378 /* Helper function for store_constructor.
4379 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4380 TYPE is the type of the CONSTRUCTOR, not the element type.
4381 CLEARED is as for store_constructor.
4382 ALIAS_SET is the alias set to use for any stores.
4384 This provides a recursive shortcut back to store_constructor when it isn't
4385 necessary to go through store_field. This is so that we can pass through
4386 the cleared field to let store_constructor know that we may not have to
4387 clear a substructure if the outer structure has already been cleared. */
4390 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4391 HOST_WIDE_INT bitpos, enum machine_mode mode,
4392 tree exp, tree type, int cleared, int alias_set)
4394 if (TREE_CODE (exp) == CONSTRUCTOR
4395 /* We can only call store_constructor recursively if the size and
4396 bit position are on a byte boundary. */
4397 && bitpos % BITS_PER_UNIT == 0
4398 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4399 /* If we have a nonzero bitpos for a register target, then we just
4400 let store_field do the bitfield handling. This is unlikely to
4401 generate unnecessary clear instructions anyways. */
4402 && (bitpos == 0 || MEM_P (target)))
4406 = adjust_address (target,
4407 GET_MODE (target) == BLKmode
4409 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4410 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4413 /* Update the alias set, if required. */
4414 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4415 && MEM_ALIAS_SET (target) != 0)
4417 target = copy_rtx (target);
4418 set_mem_alias_set (target, alias_set);
4421 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4424 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4427 /* Store the value of constructor EXP into the rtx TARGET.
4428 TARGET is either a REG or a MEM; we know it cannot conflict, since
4429 safe_from_p has been called.
4430 CLEARED is true if TARGET is known to have been zero'd.
4431 SIZE is the number of bytes of TARGET we are allowed to modify: this
4432 may not be the same as the size of EXP if we are assigning to a field
4433 which has been packed to exclude padding bits. */
4436 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4438 tree type = TREE_TYPE (exp);
4439 #ifdef WORD_REGISTER_OPERATIONS
4440 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4443 switch (TREE_CODE (type))
4447 case QUAL_UNION_TYPE:
4451 /* If size is zero or the target is already cleared, do nothing. */
4452 if (size == 0 || cleared)
4454 /* We either clear the aggregate or indicate the value is dead. */
4455 else if ((TREE_CODE (type) == UNION_TYPE
4456 || TREE_CODE (type) == QUAL_UNION_TYPE)
4457 && ! CONSTRUCTOR_ELTS (exp))
4458 /* If the constructor is empty, clear the union. */
4460 clear_storage (target, expr_size (exp));
4464 /* If we are building a static constructor into a register,
4465 set the initial value as zero so we can fold the value into
4466 a constant. But if more than one register is involved,
4467 this probably loses. */
4468 else if (REG_P (target) && TREE_STATIC (exp)
4469 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4471 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4475 /* If the constructor has fewer fields than the structure or
4476 if we are initializing the structure to mostly zeros, clear
4477 the whole structure first. Don't do this if TARGET is a
4478 register whose mode size isn't equal to SIZE since
4479 clear_storage can't handle this case. */
4481 && ((list_length (CONSTRUCTOR_ELTS (exp))
4482 != fields_length (type))
4483 || mostly_zeros_p (exp))
4485 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4488 clear_storage (target, GEN_INT (size));
4493 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4495 /* Store each element of the constructor into the
4496 corresponding field of TARGET. */
4498 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4500 tree field = TREE_PURPOSE (elt);
4501 tree value = TREE_VALUE (elt);
4502 enum machine_mode mode;
4503 HOST_WIDE_INT bitsize;
4504 HOST_WIDE_INT bitpos = 0;
4506 rtx to_rtx = target;
4508 /* Just ignore missing fields. We cleared the whole
4509 structure, above, if any fields are missing. */
4513 if (cleared && initializer_zerop (value))
4516 if (host_integerp (DECL_SIZE (field), 1))
4517 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4521 mode = DECL_MODE (field);
4522 if (DECL_BIT_FIELD (field))
4525 offset = DECL_FIELD_OFFSET (field);
4526 if (host_integerp (offset, 0)
4527 && host_integerp (bit_position (field), 0))
4529 bitpos = int_bit_position (field);
4533 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4540 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4541 make_tree (TREE_TYPE (exp),
4544 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4545 gcc_assert (MEM_P (to_rtx));
4547 #ifdef POINTERS_EXTEND_UNSIGNED
4548 if (GET_MODE (offset_rtx) != Pmode)
4549 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4551 if (GET_MODE (offset_rtx) != ptr_mode)
4552 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4555 to_rtx = offset_address (to_rtx, offset_rtx,
4556 highest_pow2_factor (offset));
4559 #ifdef WORD_REGISTER_OPERATIONS
4560 /* If this initializes a field that is smaller than a
4561 word, at the start of a word, try to widen it to a full
4562 word. This special case allows us to output C++ member
4563 function initializations in a form that the optimizers
4566 && bitsize < BITS_PER_WORD
4567 && bitpos % BITS_PER_WORD == 0
4568 && GET_MODE_CLASS (mode) == MODE_INT
4569 && TREE_CODE (value) == INTEGER_CST
4571 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4573 tree type = TREE_TYPE (value);
4575 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4577 type = lang_hooks.types.type_for_size
4578 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4579 value = convert (type, value);
4582 if (BYTES_BIG_ENDIAN)
4584 = fold (build2 (LSHIFT_EXPR, type, value,
4585 build_int_cst (NULL_TREE,
4586 BITS_PER_WORD - bitsize)));
4587 bitsize = BITS_PER_WORD;
4592 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4593 && DECL_NONADDRESSABLE_P (field))
4595 to_rtx = copy_rtx (to_rtx);
4596 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4599 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4600 value, type, cleared,
4601 get_alias_set (TREE_TYPE (field)));
4611 tree elttype = TREE_TYPE (type);
4613 HOST_WIDE_INT minelt = 0;
4614 HOST_WIDE_INT maxelt = 0;
4616 domain = TYPE_DOMAIN (type);
4617 const_bounds_p = (TYPE_MIN_VALUE (domain)
4618 && TYPE_MAX_VALUE (domain)
4619 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4620 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4622 /* If we have constant bounds for the range of the type, get them. */
4625 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4626 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4629 /* If the constructor has fewer elements than the array, clear
4630 the whole array first. Similarly if this is static
4631 constructor of a non-BLKmode object. */
4634 else if (REG_P (target) && TREE_STATIC (exp))
4638 HOST_WIDE_INT count = 0, zero_count = 0;
4639 need_to_clear = ! const_bounds_p;
4641 /* This loop is a more accurate version of the loop in
4642 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4643 is also needed to check for missing elements. */
4644 for (elt = CONSTRUCTOR_ELTS (exp);
4645 elt != NULL_TREE && ! need_to_clear;
4646 elt = TREE_CHAIN (elt))
4648 tree index = TREE_PURPOSE (elt);
4649 HOST_WIDE_INT this_node_count;
4651 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4653 tree lo_index = TREE_OPERAND (index, 0);
4654 tree hi_index = TREE_OPERAND (index, 1);
4656 if (! host_integerp (lo_index, 1)
4657 || ! host_integerp (hi_index, 1))
4663 this_node_count = (tree_low_cst (hi_index, 1)
4664 - tree_low_cst (lo_index, 1) + 1);
4667 this_node_count = 1;
4669 count += this_node_count;
4670 if (mostly_zeros_p (TREE_VALUE (elt)))
4671 zero_count += this_node_count;
4674 /* Clear the entire array first if there are any missing
4675 elements, or if the incidence of zero elements is >=
4678 && (count < maxelt - minelt + 1
4679 || 4 * zero_count >= 3 * count))
4683 if (need_to_clear && size > 0)
4686 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4688 clear_storage (target, GEN_INT (size));
4692 if (!cleared && REG_P (target))
4693 /* Inform later passes that the old value is dead. */
4694 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4696 /* Store each element of the constructor into the
4697 corresponding element of TARGET, determined by counting the
4699 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4701 elt = TREE_CHAIN (elt), i++)
4703 enum machine_mode mode;
4704 HOST_WIDE_INT bitsize;
4705 HOST_WIDE_INT bitpos;
4707 tree value = TREE_VALUE (elt);
4708 tree index = TREE_PURPOSE (elt);
4709 rtx xtarget = target;
4711 if (cleared && initializer_zerop (value))
4714 unsignedp = TYPE_UNSIGNED (elttype);
4715 mode = TYPE_MODE (elttype);
4716 if (mode == BLKmode)
4717 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4718 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4721 bitsize = GET_MODE_BITSIZE (mode);
4723 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4725 tree lo_index = TREE_OPERAND (index, 0);
4726 tree hi_index = TREE_OPERAND (index, 1);
4727 rtx index_r, pos_rtx;
4728 HOST_WIDE_INT lo, hi, count;
4731 /* If the range is constant and "small", unroll the loop. */
4733 && host_integerp (lo_index, 0)
4734 && host_integerp (hi_index, 0)
4735 && (lo = tree_low_cst (lo_index, 0),
4736 hi = tree_low_cst (hi_index, 0),
4737 count = hi - lo + 1,
4740 || (host_integerp (TYPE_SIZE (elttype), 1)
4741 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4744 lo -= minelt; hi -= minelt;
4745 for (; lo <= hi; lo++)
4747 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4750 && !MEM_KEEP_ALIAS_SET_P (target)
4751 && TREE_CODE (type) == ARRAY_TYPE
4752 && TYPE_NONALIASED_COMPONENT (type))
4754 target = copy_rtx (target);
4755 MEM_KEEP_ALIAS_SET_P (target) = 1;
4758 store_constructor_field
4759 (target, bitsize, bitpos, mode, value, type, cleared,
4760 get_alias_set (elttype));
4765 rtx loop_start = gen_label_rtx ();
4766 rtx loop_end = gen_label_rtx ();
4769 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4770 unsignedp = TYPE_UNSIGNED (domain);
4772 index = build_decl (VAR_DECL, NULL_TREE, domain);
4775 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4777 SET_DECL_RTL (index, index_r);
4778 store_expr (lo_index, index_r, 0);
4780 /* Build the head of the loop. */
4781 do_pending_stack_adjust ();
4782 emit_label (loop_start);
4784 /* Assign value to element index. */
4786 = convert (ssizetype,
4787 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4788 index, TYPE_MIN_VALUE (domain))));
4789 position = size_binop (MULT_EXPR, position,
4791 TYPE_SIZE_UNIT (elttype)));
4793 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4794 xtarget = offset_address (target, pos_rtx,
4795 highest_pow2_factor (position));
4796 xtarget = adjust_address (xtarget, mode, 0);
4797 if (TREE_CODE (value) == CONSTRUCTOR)
4798 store_constructor (value, xtarget, cleared,
4799 bitsize / BITS_PER_UNIT);
4801 store_expr (value, xtarget, 0);
4803 /* Generate a conditional jump to exit the loop. */
4804 exit_cond = build2 (LT_EXPR, integer_type_node,
4806 jumpif (exit_cond, loop_end);
4808 /* Update the loop counter, and jump to the head of
4810 expand_assignment (index,
4811 build2 (PLUS_EXPR, TREE_TYPE (index),
4812 index, integer_one_node));
4814 emit_jump (loop_start);
4816 /* Build the end of the loop. */
4817 emit_label (loop_end);
4820 else if ((index != 0 && ! host_integerp (index, 0))
4821 || ! host_integerp (TYPE_SIZE (elttype), 1))
4826 index = ssize_int (1);
4829 index = fold_convert (ssizetype,
4830 fold (build2 (MINUS_EXPR,
4833 TYPE_MIN_VALUE (domain))));
4835 position = size_binop (MULT_EXPR, index,
4837 TYPE_SIZE_UNIT (elttype)));
4838 xtarget = offset_address (target,
4839 expand_expr (position, 0, VOIDmode, 0),
4840 highest_pow2_factor (position));
4841 xtarget = adjust_address (xtarget, mode, 0);
4842 store_expr (value, xtarget, 0);
4847 bitpos = ((tree_low_cst (index, 0) - minelt)
4848 * tree_low_cst (TYPE_SIZE (elttype), 1));
4850 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4852 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4853 && TREE_CODE (type) == ARRAY_TYPE
4854 && TYPE_NONALIASED_COMPONENT (type))
4856 target = copy_rtx (target);
4857 MEM_KEEP_ALIAS_SET_P (target) = 1;
4859 store_constructor_field (target, bitsize, bitpos, mode, value,
4860 type, cleared, get_alias_set (elttype));
4872 tree elttype = TREE_TYPE (type);
4873 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4874 enum machine_mode eltmode = TYPE_MODE (elttype);
4875 HOST_WIDE_INT bitsize;
4876 HOST_WIDE_INT bitpos;
4880 gcc_assert (eltmode != BLKmode);
4882 n_elts = TYPE_VECTOR_SUBPARTS (type);
4883 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4885 enum machine_mode mode = GET_MODE (target);
4887 icode = (int) vec_init_optab->handlers[mode].insn_code;
4888 if (icode != CODE_FOR_nothing)
4892 vector = alloca (n_elts);
4893 for (i = 0; i < n_elts; i++)
4894 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4898 /* If the constructor has fewer elements than the vector,
4899 clear the whole array first. Similarly if this is static
4900 constructor of a non-BLKmode object. */
4903 else if (REG_P (target) && TREE_STATIC (exp))
4907 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4909 for (elt = CONSTRUCTOR_ELTS (exp);
4911 elt = TREE_CHAIN (elt))
4913 int n_elts_here = tree_low_cst
4914 (int_const_binop (TRUNC_DIV_EXPR,
4915 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4916 TYPE_SIZE (elttype), 0), 1);
4918 count += n_elts_here;
4919 if (mostly_zeros_p (TREE_VALUE (elt)))
4920 zero_count += n_elts_here;
4923 /* Clear the entire vector first if there are any missing elements,
4924 or if the incidence of zero elements is >= 75%. */
4925 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4928 if (need_to_clear && size > 0 && !vector)
4931 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4933 clear_storage (target, GEN_INT (size));
4937 if (!cleared && REG_P (target))
4938 /* Inform later passes that the old value is dead. */
4939 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4941 /* Store each element of the constructor into the corresponding
4942 element of TARGET, determined by counting the elements. */
4943 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4945 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4947 tree value = TREE_VALUE (elt);
4948 tree index = TREE_PURPOSE (elt);
4949 HOST_WIDE_INT eltpos;
4951 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4952 if (cleared && initializer_zerop (value))
4956 eltpos = tree_low_cst (index, 1);
4962 /* Vector CONSTRUCTORs should only be built from smaller
4963 vectors in the case of BLKmode vectors. */
4964 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4965 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4969 enum machine_mode value_mode =
4970 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4971 ? TYPE_MODE (TREE_TYPE (value))
4973 bitpos = eltpos * elt_size;
4974 store_constructor_field (target, bitsize, bitpos,
4975 value_mode, value, type,
4976 cleared, get_alias_set (elttype));
4981 emit_insn (GEN_FCN (icode)
4983 gen_rtx_PARALLEL (GET_MODE (target),
4984 gen_rtvec_v (n_elts, vector))));
4988 /* Set constructor assignments. */
4991 tree elt = CONSTRUCTOR_ELTS (exp);
4992 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4993 tree domain = TYPE_DOMAIN (type);
4994 tree domain_min, domain_max, bitlength;
4996 /* The default implementation strategy is to extract the
4997 constant parts of the constructor, use that to initialize
4998 the target, and then "or" in whatever non-constant ranges
4999 we need in addition.
5001 If a large set is all zero or all ones, it is probably
5002 better to set it using memset. Also, if a large set has
5003 just a single range, it may also be better to first clear
5004 all the first clear the set (using memset), and set the
5007 /* Check for all zeros. */
5008 if (elt == NULL_TREE && size > 0)
5011 clear_storage (target, GEN_INT (size));
5015 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5016 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5017 bitlength = size_binop (PLUS_EXPR,
5018 size_diffop (domain_max, domain_min),
5021 nbits = tree_low_cst (bitlength, 1);
5023 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
5024 that are "complicated" (more than one range), initialize
5025 (the constant parts) by copying from a constant. */
5026 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5027 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5029 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5030 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5031 char *bit_buffer = alloca (nbits);
5032 HOST_WIDE_INT word = 0;
5033 unsigned int bit_pos = 0;
5034 unsigned int ibit = 0;
5035 unsigned int offset = 0; /* In bytes from beginning of set. */
5037 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5040 if (bit_buffer[ibit])
5042 if (BYTES_BIG_ENDIAN)
5043 word |= (1 << (set_word_size - 1 - bit_pos));
5045 word |= 1 << bit_pos;
5049 if (bit_pos >= set_word_size || ibit == nbits)
5051 if (word != 0 || ! cleared)
5053 rtx datum = gen_int_mode (word, mode);
5056 /* The assumption here is that it is safe to
5057 use XEXP if the set is multi-word, but not
5058 if it's single-word. */
5060 to_rtx = adjust_address (target, mode, offset);
5063 gcc_assert (!offset);
5066 emit_move_insn (to_rtx, datum);
5073 offset += set_word_size / BITS_PER_UNIT;
5078 /* Don't bother clearing storage if the set is all ones. */
5079 if (TREE_CHAIN (elt) != NULL_TREE
5080 || (TREE_PURPOSE (elt) == NULL_TREE
5082 : ( ! host_integerp (TREE_VALUE (elt), 0)
5083 || ! host_integerp (TREE_PURPOSE (elt), 0)
5084 || (tree_low_cst (TREE_VALUE (elt), 0)
5085 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5086 != (HOST_WIDE_INT) nbits))))
5087 clear_storage (target, expr_size (exp));
5089 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5091 /* Start of range of element or NULL. */
5092 tree startbit = TREE_PURPOSE (elt);
5093 /* End of range of element, or element value. */
5094 tree endbit = TREE_VALUE (elt);
5095 HOST_WIDE_INT startb, endb;
5096 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5098 bitlength_rtx = expand_expr (bitlength,
5099 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5101 /* Handle non-range tuple element like [ expr ]. */
5102 if (startbit == NULL_TREE)
5104 startbit = save_expr (endbit);
5108 startbit = convert (sizetype, startbit);
5109 endbit = convert (sizetype, endbit);
5110 if (! integer_zerop (domain_min))
5112 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5113 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5115 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5116 EXPAND_CONST_ADDRESS);
5117 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5118 EXPAND_CONST_ADDRESS);
5124 ((build_qualified_type (lang_hooks.types.type_for_mode
5125 (GET_MODE (target), 0),
5128 emit_move_insn (targetx, target);
5133 gcc_assert (MEM_P (target));
5137 /* Optimization: If startbit and endbit are constants divisible
5138 by BITS_PER_UNIT, call memset instead. */
5139 if (TREE_CODE (startbit) == INTEGER_CST
5140 && TREE_CODE (endbit) == INTEGER_CST
5141 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5142 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5144 emit_library_call (memset_libfunc, LCT_NORMAL,
5146 plus_constant (XEXP (targetx, 0),
5147 startb / BITS_PER_UNIT),
5149 constm1_rtx, TYPE_MODE (integer_type_node),
5150 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5151 TYPE_MODE (sizetype));
5154 emit_library_call (setbits_libfunc, LCT_NORMAL,
5155 VOIDmode, 4, XEXP (targetx, 0),
5156 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5157 startbit_rtx, TYPE_MODE (sizetype),
5158 endbit_rtx, TYPE_MODE (sizetype));
5161 emit_move_insn (target, targetx);
5170 /* Store the value of EXP (an expression tree)
5171 into a subfield of TARGET which has mode MODE and occupies
5172 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5173 If MODE is VOIDmode, it means that we are storing into a bit-field.
5175 Always return const0_rtx unless we have something particular to
5178 TYPE is the type of the underlying object,
5180 ALIAS_SET is the alias set for the destination. This value will
5181 (in general) be different from that for TARGET, since TARGET is a
5182 reference to the containing structure. */
5185 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5186 enum machine_mode mode, tree exp, tree type, int alias_set)
5188 HOST_WIDE_INT width_mask = 0;
5190 if (TREE_CODE (exp) == ERROR_MARK)
5193 /* If we have nothing to store, do nothing unless the expression has
5196 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5197 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5198 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5200 /* If we are storing into an unaligned field of an aligned union that is
5201 in a register, we may have the mode of TARGET being an integer mode but
5202 MODE == BLKmode. In that case, get an aligned object whose size and
5203 alignment are the same as TARGET and store TARGET into it (we can avoid
5204 the store if the field being stored is the entire width of TARGET). Then
5205 call ourselves recursively to store the field into a BLKmode version of
5206 that object. Finally, load from the object into TARGET. This is not
5207 very efficient in general, but should only be slightly more expensive
5208 than the otherwise-required unaligned accesses. Perhaps this can be
5209 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5210 twice, once with emit_move_insn and once via store_field. */
5213 && (REG_P (target) || GET_CODE (target) == SUBREG))
5215 rtx object = assign_temp (type, 0, 1, 1);
5216 rtx blk_object = adjust_address (object, BLKmode, 0);
5218 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5219 emit_move_insn (object, target);
5221 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5223 emit_move_insn (target, object);
5225 /* We want to return the BLKmode version of the data. */
5229 if (GET_CODE (target) == CONCAT)
5231 /* We're storing into a struct containing a single __complex. */
5233 gcc_assert (!bitpos);
5234 return store_expr (exp, target, 0);
5237 /* If the structure is in a register or if the component
5238 is a bit field, we cannot use addressing to access it.
5239 Use bit-field techniques or SUBREG to store in it. */
5241 if (mode == VOIDmode
5242 || (mode != BLKmode && ! direct_store[(int) mode]
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5244 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5246 || GET_CODE (target) == SUBREG
5247 /* If the field isn't aligned enough to store as an ordinary memref,
5248 store it as a bit field. */
5250 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5251 || bitpos % GET_MODE_ALIGNMENT (mode))
5252 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5253 || (bitpos % BITS_PER_UNIT != 0)))
5254 /* If the RHS and field are a constant size and the size of the
5255 RHS isn't the same size as the bitfield, we must use bitfield
5258 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5259 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5261 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5263 /* If BITSIZE is narrower than the size of the type of EXP
5264 we will be narrowing TEMP. Normally, what's wanted are the
5265 low-order bits. However, if EXP's type is a record and this is
5266 big-endian machine, we want the upper BITSIZE bits. */
5267 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5268 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5269 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5270 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5271 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5275 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5277 if (mode != VOIDmode && mode != BLKmode
5278 && mode != TYPE_MODE (TREE_TYPE (exp)))
5279 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5281 /* If the modes of TARGET and TEMP are both BLKmode, both
5282 must be in memory and BITPOS must be aligned on a byte
5283 boundary. If so, we simply do a block copy. */
5284 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5286 gcc_assert (MEM_P (target) && MEM_P (temp)
5287 && !(bitpos % BITS_PER_UNIT));
5289 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5290 emit_block_move (target, temp,
5291 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5298 /* Store the value in the bitfield. */
5299 store_bit_field (target, bitsize, bitpos, mode, temp);
5305 /* Now build a reference to just the desired component. */
5306 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5308 if (to_rtx == target)
5309 to_rtx = copy_rtx (to_rtx);
5311 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5312 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5313 set_mem_alias_set (to_rtx, alias_set);
5315 return store_expr (exp, to_rtx, 0);
5319 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5320 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5321 codes and find the ultimate containing object, which we return.
5323 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5324 bit position, and *PUNSIGNEDP to the signedness of the field.
5325 If the position of the field is variable, we store a tree
5326 giving the variable offset (in units) in *POFFSET.
5327 This offset is in addition to the bit position.
5328 If the position is not variable, we store 0 in *POFFSET.
5330 If any of the extraction expressions is volatile,
5331 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5333 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5334 is a mode that can be used to access the field. In that case, *PBITSIZE
5337 If the field describes a variable-sized object, *PMODE is set to
5338 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5339 this case, but the address of the object can be found. */
5342 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5343 HOST_WIDE_INT *pbitpos, tree *poffset,
5344 enum machine_mode *pmode, int *punsignedp,
5348 enum machine_mode mode = VOIDmode;
5349 tree offset = size_zero_node;
5350 tree bit_offset = bitsize_zero_node;
5353 /* First get the mode, signedness, and size. We do this from just the
5354 outermost expression. */
5355 if (TREE_CODE (exp) == COMPONENT_REF)
5357 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5358 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5359 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5361 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5363 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5365 size_tree = TREE_OPERAND (exp, 1);
5366 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5370 mode = TYPE_MODE (TREE_TYPE (exp));
5371 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5373 if (mode == BLKmode)
5374 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5376 *pbitsize = GET_MODE_BITSIZE (mode);
5381 if (! host_integerp (size_tree, 1))
5382 mode = BLKmode, *pbitsize = -1;
5384 *pbitsize = tree_low_cst (size_tree, 1);
5387 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5388 and find the ultimate containing object. */
5391 switch (TREE_CODE (exp))
5394 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5395 TREE_OPERAND (exp, 2));
5400 tree field = TREE_OPERAND (exp, 1);
5401 tree this_offset = component_ref_field_offset (exp);
5403 /* If this field hasn't been filled in yet, don't go past it.
5404 This should only happen when folding expressions made during
5405 type construction. */
5406 if (this_offset == 0)
5409 offset = size_binop (PLUS_EXPR, offset, this_offset);
5410 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5411 DECL_FIELD_BIT_OFFSET (field));
5413 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5418 case ARRAY_RANGE_REF:
5420 tree index = TREE_OPERAND (exp, 1);
5421 tree low_bound = array_ref_low_bound (exp);
5422 tree unit_size = array_ref_element_size (exp);
5424 /* We assume all arrays have sizes that are a multiple of a byte.
5425 First subtract the lower bound, if any, in the type of the
5426 index, then convert to sizetype and multiply by the size of
5427 the array element. */
5428 if (! integer_zerop (low_bound))
5429 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5432 offset = size_binop (PLUS_EXPR, offset,
5433 size_binop (MULT_EXPR,
5434 convert (sizetype, index),
5443 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5444 bitsize_int (*pbitsize));
5447 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5448 conversions that don't change the mode, and all view conversions
5449 except those that need to "step up" the alignment. */
5451 case NON_LVALUE_EXPR:
5456 if (TYPE_MODE (TREE_TYPE (exp))
5457 != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5461 case VIEW_CONVERT_EXPR:
5462 if ((TYPE_ALIGN (TREE_TYPE (exp))
5463 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5465 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5466 < BIGGEST_ALIGNMENT)
5467 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5468 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5476 /* If any reference in the chain is volatile, the effect is volatile. */
5477 if (TREE_THIS_VOLATILE (exp))
5480 exp = TREE_OPERAND (exp, 0);
5484 /* If OFFSET is constant, see if we can return the whole thing as a
5485 constant bit position. Otherwise, split it up. */
5486 if (host_integerp (offset, 0)
5487 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5489 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5490 && host_integerp (tem, 0))
5491 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5493 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5499 /* Return a tree of sizetype representing the size, in bytes, of the element
5500 of EXP, an ARRAY_REF. */
5503 array_ref_element_size (tree exp)
5505 tree aligned_size = TREE_OPERAND (exp, 3);
5506 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5508 /* If a size was specified in the ARRAY_REF, it's the size measured
5509 in alignment units of the element type. So multiply by that value. */
5512 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5513 sizetype from another type of the same width and signedness. */
5514 if (TREE_TYPE (aligned_size) != sizetype)
5515 aligned_size = fold_convert (sizetype, aligned_size);
5516 return size_binop (MULT_EXPR, aligned_size,
5517 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5520 /* Otherwise, take the size from that of the element type. Substitute
5521 any PLACEHOLDER_EXPR that we have. */
5523 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5526 /* Return a tree representing the lower bound of the array mentioned in
5527 EXP, an ARRAY_REF. */
5530 array_ref_low_bound (tree exp)
5532 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5534 /* If a lower bound is specified in EXP, use it. */
5535 if (TREE_OPERAND (exp, 2))
5536 return TREE_OPERAND (exp, 2);
5538 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5539 substituting for a PLACEHOLDER_EXPR as needed. */
5540 if (domain_type && TYPE_MIN_VALUE (domain_type))
5541 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5543 /* Otherwise, return a zero of the appropriate type. */
5544 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5547 /* Return a tree representing the upper bound of the array mentioned in
5548 EXP, an ARRAY_REF. */
5551 array_ref_up_bound (tree exp)
5553 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5555 /* If there is a domain type and it has an upper bound, use it, substituting
5556 for a PLACEHOLDER_EXPR as needed. */
5557 if (domain_type && TYPE_MAX_VALUE (domain_type))
5558 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5560 /* Otherwise fail. */
5564 /* Return a tree representing the offset, in bytes, of the field referenced
5565 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5568 component_ref_field_offset (tree exp)
5570 tree aligned_offset = TREE_OPERAND (exp, 2);
5571 tree field = TREE_OPERAND (exp, 1);
5573 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5574 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5578 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5579 sizetype from another type of the same width and signedness. */
5580 if (TREE_TYPE (aligned_offset) != sizetype)
5581 aligned_offset = fold_convert (sizetype, aligned_offset);
5582 return size_binop (MULT_EXPR, aligned_offset,
5583 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5586 /* Otherwise, take the offset from that of the field. Substitute
5587 any PLACEHOLDER_EXPR that we have. */
5589 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5592 /* Return 1 if T is an expression that get_inner_reference handles. */
5595 handled_component_p (tree t)
5597 switch (TREE_CODE (t))
5602 case ARRAY_RANGE_REF:
5603 case NON_LVALUE_EXPR:
5604 case VIEW_CONVERT_EXPR:
5609 /* ??? Sure they are handled, but get_inner_reference may return
5610 a different PBITSIZE, depending upon whether the expression is
5611 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5614 return (TYPE_MODE (TREE_TYPE (t))
5615 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5622 /* Given an rtx VALUE that may contain additions and multiplications, return
5623 an equivalent value that just refers to a register, memory, or constant.
5624 This is done by generating instructions to perform the arithmetic and
5625 returning a pseudo-register containing the value.
5627 The returned value may be a REG, SUBREG, MEM or constant. */
5630 force_operand (rtx value, rtx target)
5633 /* Use subtarget as the target for operand 0 of a binary operation. */
5634 rtx subtarget = get_subtarget (target);
5635 enum rtx_code code = GET_CODE (value);
5637 /* Check for subreg applied to an expression produced by loop optimizer. */
5639 && !REG_P (SUBREG_REG (value))
5640 && !MEM_P (SUBREG_REG (value)))
5642 value = simplify_gen_subreg (GET_MODE (value),
5643 force_reg (GET_MODE (SUBREG_REG (value)),
5644 force_operand (SUBREG_REG (value),
5646 GET_MODE (SUBREG_REG (value)),
5647 SUBREG_BYTE (value));
5648 code = GET_CODE (value);
5651 /* Check for a PIC address load. */
5652 if ((code == PLUS || code == MINUS)
5653 && XEXP (value, 0) == pic_offset_table_rtx
5654 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5655 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5656 || GET_CODE (XEXP (value, 1)) == CONST))
5659 subtarget = gen_reg_rtx (GET_MODE (value));
5660 emit_move_insn (subtarget, value);
5664 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5667 target = gen_reg_rtx (GET_MODE (value));
5668 convert_move (target, force_operand (XEXP (value, 0), NULL),
5669 code == ZERO_EXTEND);
5673 if (ARITHMETIC_P (value))
5675 op2 = XEXP (value, 1);
5676 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5678 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5681 op2 = negate_rtx (GET_MODE (value), op2);
5684 /* Check for an addition with OP2 a constant integer and our first
5685 operand a PLUS of a virtual register and something else. In that
5686 case, we want to emit the sum of the virtual register and the
5687 constant first and then add the other value. This allows virtual
5688 register instantiation to simply modify the constant rather than
5689 creating another one around this addition. */
5690 if (code == PLUS && GET_CODE (op2) == CONST_INT
5691 && GET_CODE (XEXP (value, 0)) == PLUS
5692 && REG_P (XEXP (XEXP (value, 0), 0))
5693 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5694 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5696 rtx temp = expand_simple_binop (GET_MODE (value), code,
5697 XEXP (XEXP (value, 0), 0), op2,
5698 subtarget, 0, OPTAB_LIB_WIDEN);
5699 return expand_simple_binop (GET_MODE (value), code, temp,
5700 force_operand (XEXP (XEXP (value,
5702 target, 0, OPTAB_LIB_WIDEN);
5705 op1 = force_operand (XEXP (value, 0), subtarget);
5706 op2 = force_operand (op2, NULL_RTX);
5710 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5712 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5713 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5714 target, 1, OPTAB_LIB_WIDEN);
5716 return expand_divmod (0,
5717 FLOAT_MODE_P (GET_MODE (value))
5718 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5719 GET_MODE (value), op1, op2, target, 0);
5722 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5726 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5730 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5734 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5735 target, 0, OPTAB_LIB_WIDEN);
5738 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5739 target, 1, OPTAB_LIB_WIDEN);
5742 if (UNARY_P (value))
5744 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5745 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5748 #ifdef INSN_SCHEDULING
5749 /* On machines that have insn scheduling, we want all memory reference to be
5750 explicit, so we need to deal with such paradoxical SUBREGs. */
5751 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5752 && (GET_MODE_SIZE (GET_MODE (value))
5753 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5755 = simplify_gen_subreg (GET_MODE (value),
5756 force_reg (GET_MODE (SUBREG_REG (value)),
5757 force_operand (SUBREG_REG (value),
5759 GET_MODE (SUBREG_REG (value)),
5760 SUBREG_BYTE (value));
5766 /* Subroutine of expand_expr: return nonzero iff there is no way that
5767 EXP can reference X, which is being modified. TOP_P is nonzero if this
5768 call is going to be used to determine whether we need a temporary
5769 for EXP, as opposed to a recursive call to this function.
5771 It is always safe for this routine to return zero since it merely
5772 searches for optimization opportunities. */
5775 safe_from_p (rtx x, tree exp, int top_p)
5781 /* If EXP has varying size, we MUST use a target since we currently
5782 have no way of allocating temporaries of variable size
5783 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5784 So we assume here that something at a higher level has prevented a
5785 clash. This is somewhat bogus, but the best we can do. Only
5786 do this when X is BLKmode and when we are at the top level. */
5787 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5788 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5789 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5790 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5791 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5793 && GET_MODE (x) == BLKmode)
5794 /* If X is in the outgoing argument area, it is always safe. */
5796 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5797 || (GET_CODE (XEXP (x, 0)) == PLUS
5798 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5801 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5802 find the underlying pseudo. */
5803 if (GET_CODE (x) == SUBREG)
5806 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5810 /* Now look at our tree code and possibly recurse. */
5811 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5813 case tcc_declaration:
5814 exp_rtl = DECL_RTL_IF_SET (exp);
5820 case tcc_exceptional:
5821 if (TREE_CODE (exp) == TREE_LIST)
5825 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5827 exp = TREE_CHAIN (exp);
5830 if (TREE_CODE (exp) != TREE_LIST)
5831 return safe_from_p (x, exp, 0);
5834 else if (TREE_CODE (exp) == ERROR_MARK)
5835 return 1; /* An already-visited SAVE_EXPR? */
5840 /* The only case we look at here is the DECL_INITIAL inside a
5842 return (TREE_CODE (exp) != DECL_EXPR
5843 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5844 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5845 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5848 case tcc_comparison:
5849 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5854 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5856 case tcc_expression:
5858 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5859 the expression. If it is set, we conflict iff we are that rtx or
5860 both are in memory. Otherwise, we check all operands of the
5861 expression recursively. */
5863 switch (TREE_CODE (exp))
5866 /* If the operand is static or we are static, we can't conflict.
5867 Likewise if we don't conflict with the operand at all. */
5868 if (staticp (TREE_OPERAND (exp, 0))
5869 || TREE_STATIC (exp)
5870 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5873 /* Otherwise, the only way this can conflict is if we are taking
5874 the address of a DECL a that address if part of X, which is
5876 exp = TREE_OPERAND (exp, 0);
5879 if (!DECL_RTL_SET_P (exp)
5880 || !MEM_P (DECL_RTL (exp)))
5883 exp_rtl = XEXP (DECL_RTL (exp), 0);
5887 case MISALIGNED_INDIRECT_REF:
5888 case ALIGN_INDIRECT_REF:
5891 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5892 get_alias_set (exp)))
5897 /* Assume that the call will clobber all hard registers and
5899 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5904 case WITH_CLEANUP_EXPR:
5905 case CLEANUP_POINT_EXPR:
5906 /* Lowered by gimplify.c. */
5910 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5916 /* If we have an rtx, we do not need to scan our operands. */
5920 nops = first_rtl_op (TREE_CODE (exp));
5921 for (i = 0; i < nops; i++)
5922 if (TREE_OPERAND (exp, i) != 0
5923 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5926 /* If this is a language-specific tree code, it may require
5927 special handling. */
5928 if ((unsigned int) TREE_CODE (exp)
5929 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5930 && !lang_hooks.safe_from_p (x, exp))
5935 /* Should never get a type here. */
5939 /* If we have an rtl, find any enclosed object. Then see if we conflict
5943 if (GET_CODE (exp_rtl) == SUBREG)
5945 exp_rtl = SUBREG_REG (exp_rtl);
5947 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5951 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5952 are memory and they conflict. */
5953 return ! (rtx_equal_p (x, exp_rtl)
5954 || (MEM_P (x) && MEM_P (exp_rtl)
5955 && true_dependence (exp_rtl, VOIDmode, x,
5956 rtx_addr_varies_p)));
5959 /* If we reach here, it is safe. */
5964 /* Return the highest power of two that EXP is known to be a multiple of.
5965 This is used in updating alignment of MEMs in array references. */
5967 static unsigned HOST_WIDE_INT
5968 highest_pow2_factor (tree exp)
5970 unsigned HOST_WIDE_INT c0, c1;
5972 switch (TREE_CODE (exp))
5975 /* We can find the lowest bit that's a one. If the low
5976 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5977 We need to handle this case since we can find it in a COND_EXPR,
5978 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5979 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5981 if (TREE_CONSTANT_OVERFLOW (exp))
5982 return BIGGEST_ALIGNMENT;
5985 /* Note: tree_low_cst is intentionally not used here,
5986 we don't care about the upper bits. */
5987 c0 = TREE_INT_CST_LOW (exp);
5989 return c0 ? c0 : BIGGEST_ALIGNMENT;
5993 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5994 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5995 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5996 return MIN (c0, c1);
5999 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6000 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6003 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6005 if (integer_pow2p (TREE_OPERAND (exp, 1))
6006 && host_integerp (TREE_OPERAND (exp, 1), 1))
6008 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6009 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6010 return MAX (1, c0 / c1);
6014 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6016 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6019 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6023 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6024 return MIN (c0, c1);
6033 /* Similar, except that the alignment requirements of TARGET are
6034 taken into account. Assume it is at least as aligned as its
6035 type, unless it is a COMPONENT_REF in which case the layout of
6036 the structure gives the alignment. */
6038 static unsigned HOST_WIDE_INT
6039 highest_pow2_factor_for_target (tree target, tree exp)
6041 unsigned HOST_WIDE_INT target_align, factor;
6043 factor = highest_pow2_factor (exp);
6044 if (TREE_CODE (target) == COMPONENT_REF)
6045 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6047 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6048 return MAX (factor, target_align);
6051 /* Expands variable VAR. */
6054 expand_var (tree var)
6056 if (DECL_EXTERNAL (var))
6059 if (TREE_STATIC (var))
6060 /* If this is an inlined copy of a static local variable,
6061 look up the original decl. */
6062 var = DECL_ORIGIN (var);
6064 if (TREE_STATIC (var)
6065 ? !TREE_ASM_WRITTEN (var)
6066 : !DECL_RTL_SET_P (var))
6068 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6069 /* Should be ignored. */;
6070 else if (lang_hooks.expand_decl (var))
6072 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6074 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6075 rest_of_decl_compilation (var, 0, 0);
6077 /* No expansion needed. */
6078 gcc_assert (TREE_CODE (var) == TYPE_DECL
6079 || TREE_CODE (var) == CONST_DECL
6080 || TREE_CODE (var) == FUNCTION_DECL
6081 || TREE_CODE (var) == LABEL_DECL);
6085 /* Subroutine of expand_expr. Expand the two operands of a binary
6086 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6087 The value may be stored in TARGET if TARGET is nonzero. The
6088 MODIFIER argument is as documented by expand_expr. */
6091 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6092 enum expand_modifier modifier)
6094 if (! safe_from_p (target, exp1, 1))
6096 if (operand_equal_p (exp0, exp1, 0))
6098 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6099 *op1 = copy_rtx (*op0);
6103 /* If we need to preserve evaluation order, copy exp0 into its own
6104 temporary variable so that it can't be clobbered by exp1. */
6105 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6106 exp0 = save_expr (exp0);
6107 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6108 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6113 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6114 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6117 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6118 enum expand_modifier modifier)
6120 rtx result, subtarget;
6122 HOST_WIDE_INT bitsize, bitpos;
6123 int volatilep, unsignedp;
6124 enum machine_mode mode1;
6126 /* If we are taking the address of a constant and are at the top level,
6127 we have to use output_constant_def since we can't call force_const_mem
6129 /* ??? This should be considered a front-end bug. We should not be
6130 generating ADDR_EXPR of something that isn't an LVALUE. The only
6131 exception here is STRING_CST. */
6132 if (TREE_CODE (exp) == CONSTRUCTOR
6133 || CONSTANT_CLASS_P (exp))
6134 return XEXP (output_constant_def (exp, 0), 0);
6136 /* Everything must be something allowed by is_gimple_addressable. */
6137 switch (TREE_CODE (exp))
6140 /* This case will happen via recursion for &a->b. */
6141 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6144 /* Recurse and make the output_constant_def clause above handle this. */
6145 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6149 /* The real part of the complex number is always first, therefore
6150 the address is the same as the address of the parent object. */
6153 inner = TREE_OPERAND (exp, 0);
6157 /* The imaginary part of the complex number is always second.
6158 The expression is therefore always offset by the size of the
6161 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6162 inner = TREE_OPERAND (exp, 0);
6166 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6167 expand_expr, as that can have various side effects; LABEL_DECLs for
6168 example, may not have their DECL_RTL set yet. Assume language
6169 specific tree nodes can be expanded in some interesting way. */
6171 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6173 result = expand_expr (exp, target, tmode,
6174 modifier == EXPAND_INITIALIZER
6175 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6177 /* If the DECL isn't in memory, then the DECL wasn't properly
6178 marked TREE_ADDRESSABLE, which will be either a front-end
6179 or a tree optimizer bug. */
6180 gcc_assert (GET_CODE (result) == MEM);
6181 result = XEXP (result, 0);
6183 /* ??? Is this needed anymore? */
6184 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6186 assemble_external (exp);
6187 TREE_USED (exp) = 1;
6190 if (modifier != EXPAND_INITIALIZER
6191 && modifier != EXPAND_CONST_ADDRESS)
6192 result = force_operand (result, target);
6196 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6197 &mode1, &unsignedp, &volatilep);
6201 /* We must have made progress. */
6202 gcc_assert (inner != exp);
6204 subtarget = offset || bitpos ? NULL_RTX : target;
6205 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6211 if (modifier != EXPAND_NORMAL)
6212 result = force_operand (result, NULL);
6213 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6215 result = convert_memory_address (tmode, result);
6216 tmp = convert_memory_address (tmode, tmp);
6218 if (modifier == EXPAND_SUM)
6219 result = gen_rtx_PLUS (tmode, result, tmp);
6222 subtarget = bitpos ? NULL_RTX : target;
6223 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6224 1, OPTAB_LIB_WIDEN);
6230 /* Someone beforehand should have rejected taking the address
6231 of such an object. */
6232 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6234 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6235 if (modifier < EXPAND_SUM)
6236 result = force_operand (result, target);
6242 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6243 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6246 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6247 enum expand_modifier modifier)
6249 enum machine_mode rmode;
6252 /* Target mode of VOIDmode says "whatever's natural". */
6253 if (tmode == VOIDmode)
6254 tmode = TYPE_MODE (TREE_TYPE (exp));
6256 /* We can get called with some Weird Things if the user does silliness
6257 like "(short) &a". In that case, convert_memory_address won't do
6258 the right thing, so ignore the given target mode. */
6259 if (tmode != Pmode && tmode != ptr_mode)
6262 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6265 /* Despite expand_expr claims concerning ignoring TMODE when not
6266 strictly convenient, stuff breaks if we don't honor it. Note
6267 that combined with the above, we only do this for pointer modes. */
6268 rmode = GET_MODE (result);
6269 if (rmode == VOIDmode)
6272 result = convert_memory_address (tmode, result);
6278 /* expand_expr: generate code for computing expression EXP.
6279 An rtx for the computed value is returned. The value is never null.
6280 In the case of a void EXP, const0_rtx is returned.
6282 The value may be stored in TARGET if TARGET is nonzero.
6283 TARGET is just a suggestion; callers must assume that
6284 the rtx returned may not be the same as TARGET.
6286 If TARGET is CONST0_RTX, it means that the value will be ignored.
6288 If TMODE is not VOIDmode, it suggests generating the
6289 result in mode TMODE. But this is done only when convenient.
6290 Otherwise, TMODE is ignored and the value generated in its natural mode.
6291 TMODE is just a suggestion; callers must assume that
6292 the rtx returned may not have mode TMODE.
6294 Note that TARGET may have neither TMODE nor MODE. In that case, it
6295 probably will not be used.
6297 If MODIFIER is EXPAND_SUM then when EXP is an addition
6298 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6299 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6300 products as above, or REG or MEM, or constant.
6301 Ordinarily in such cases we would output mul or add instructions
6302 and then return a pseudo reg containing the sum.
6304 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6305 it also marks a label as absolutely required (it can't be dead).
6306 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6307 This is used for outputting expressions used in initializers.
6309 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6310 with a constant address even if that address is not normally legitimate.
6311 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6313 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6314 a call parameter. Such targets require special care as we haven't yet
6315 marked TARGET so that it's safe from being trashed by libcalls. We
6316 don't want to use TARGET for anything but the final result;
6317 Intermediate values must go elsewhere. Additionally, calls to
6318 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6320 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6321 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6322 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6323 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6326 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6327 enum expand_modifier, rtx *);
6330 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6331 enum expand_modifier modifier, rtx *alt_rtl)
6334 rtx ret, last = NULL;
6336 /* Handle ERROR_MARK before anybody tries to access its type. */
6337 if (TREE_CODE (exp) == ERROR_MARK
6338 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6340 ret = CONST0_RTX (tmode);
6341 return ret ? ret : const0_rtx;
6344 if (flag_non_call_exceptions)
6346 rn = lookup_stmt_eh_region (exp);
6347 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6349 last = get_last_insn ();
6352 /* If this is an expression of some kind and it has an associated line
6353 number, then emit the line number before expanding the expression.
6355 We need to save and restore the file and line information so that
6356 errors discovered during expansion are emitted with the right
6357 information. It would be better of the diagnostic routines
6358 used the file/line information embedded in the tree nodes rather
6360 if (cfun && EXPR_HAS_LOCATION (exp))
6362 location_t saved_location = input_location;
6363 input_location = EXPR_LOCATION (exp);
6364 emit_line_note (input_location);
6366 /* Record where the insns produced belong. */
6367 record_block_change (TREE_BLOCK (exp));
6369 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6371 input_location = saved_location;
6375 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6378 /* If using non-call exceptions, mark all insns that may trap.
6379 expand_call() will mark CALL_INSNs before we get to this code,
6380 but it doesn't handle libcalls, and these may trap. */
6384 for (insn = next_real_insn (last); insn;
6385 insn = next_real_insn (insn))
6387 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6388 /* If we want exceptions for non-call insns, any
6389 may_trap_p instruction may throw. */
6390 && GET_CODE (PATTERN (insn)) != CLOBBER
6391 && GET_CODE (PATTERN (insn)) != USE
6392 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6394 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6404 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6405 enum expand_modifier modifier, rtx *alt_rtl)
6408 tree type = TREE_TYPE (exp);
6410 enum machine_mode mode;
6411 enum tree_code code = TREE_CODE (exp);
6413 rtx subtarget, original_target;
6416 bool reduce_bit_field = false;
6417 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6418 ? reduce_to_bit_field_precision ((expr), \
6423 mode = TYPE_MODE (type);
6424 unsignedp = TYPE_UNSIGNED (type);
6425 if (lang_hooks.reduce_bit_field_operations
6426 && TREE_CODE (type) == INTEGER_TYPE
6427 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6429 /* An operation in what may be a bit-field type needs the
6430 result to be reduced to the precision of the bit-field type,
6431 which is narrower than that of the type's mode. */
6432 reduce_bit_field = true;
6433 if (modifier == EXPAND_STACK_PARM)
6437 /* Use subtarget as the target for operand 0 of a binary operation. */
6438 subtarget = get_subtarget (target);
6439 original_target = target;
6440 ignore = (target == const0_rtx
6441 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6442 || code == CONVERT_EXPR || code == COND_EXPR
6443 || code == VIEW_CONVERT_EXPR)
6444 && TREE_CODE (type) == VOID_TYPE));
6446 /* If we are going to ignore this result, we need only do something
6447 if there is a side-effect somewhere in the expression. If there
6448 is, short-circuit the most common cases here. Note that we must
6449 not call expand_expr with anything but const0_rtx in case this
6450 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6454 if (! TREE_SIDE_EFFECTS (exp))
6457 /* Ensure we reference a volatile object even if value is ignored, but
6458 don't do this if all we are doing is taking its address. */
6459 if (TREE_THIS_VOLATILE (exp)
6460 && TREE_CODE (exp) != FUNCTION_DECL
6461 && mode != VOIDmode && mode != BLKmode
6462 && modifier != EXPAND_CONST_ADDRESS)
6464 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6466 temp = copy_to_reg (temp);
6470 if (TREE_CODE_CLASS (code) == tcc_unary
6471 || code == COMPONENT_REF || code == INDIRECT_REF)
6472 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6475 else if (TREE_CODE_CLASS (code) == tcc_binary
6476 || TREE_CODE_CLASS (code) == tcc_comparison
6477 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6479 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6480 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6483 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6484 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6485 /* If the second operand has no side effects, just evaluate
6487 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6489 else if (code == BIT_FIELD_REF)
6491 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6492 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6493 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6500 /* If will do cse, generate all results into pseudo registers
6501 since 1) that allows cse to find more things
6502 and 2) otherwise cse could produce an insn the machine
6503 cannot support. An exception is a CONSTRUCTOR into a multi-word
6504 MEM: that's much more likely to be most efficient into the MEM.
6505 Another is a CALL_EXPR which must return in memory. */
6507 if (! cse_not_expected && mode != BLKmode && target
6508 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6509 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6510 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6517 tree function = decl_function_context (exp);
6519 temp = label_rtx (exp);
6520 temp = gen_rtx_LABEL_REF (Pmode, temp);
6522 if (function != current_function_decl
6524 LABEL_REF_NONLOCAL_P (temp) = 1;
6526 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6531 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6536 /* If a static var's type was incomplete when the decl was written,
6537 but the type is complete now, lay out the decl now. */
6538 if (DECL_SIZE (exp) == 0
6539 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6540 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6541 layout_decl (exp, 0);
6543 /* ... fall through ... */
6547 gcc_assert (DECL_RTL (exp));
6549 /* Ensure variable marked as used even if it doesn't go through
6550 a parser. If it hasn't be used yet, write out an external
6552 if (! TREE_USED (exp))
6554 assemble_external (exp);
6555 TREE_USED (exp) = 1;
6558 /* Show we haven't gotten RTL for this yet. */
6561 /* Variables inherited from containing functions should have
6562 been lowered by this point. */
6563 context = decl_function_context (exp);
6564 gcc_assert (!context
6565 || context == current_function_decl
6566 || TREE_STATIC (exp)
6567 /* ??? C++ creates functions that are not TREE_STATIC. */
6568 || TREE_CODE (exp) == FUNCTION_DECL);
6570 /* This is the case of an array whose size is to be determined
6571 from its initializer, while the initializer is still being parsed.
6574 if (MEM_P (DECL_RTL (exp))
6575 && REG_P (XEXP (DECL_RTL (exp), 0)))
6576 temp = validize_mem (DECL_RTL (exp));
6578 /* If DECL_RTL is memory, we are in the normal case and either
6579 the address is not valid or it is not a register and -fforce-addr
6580 is specified, get the address into a register. */
6582 else if (MEM_P (DECL_RTL (exp))
6583 && modifier != EXPAND_CONST_ADDRESS
6584 && modifier != EXPAND_SUM
6585 && modifier != EXPAND_INITIALIZER
6586 && (! memory_address_p (DECL_MODE (exp),
6587 XEXP (DECL_RTL (exp), 0))
6589 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6592 *alt_rtl = DECL_RTL (exp);
6593 temp = replace_equiv_address (DECL_RTL (exp),
6594 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6597 /* If we got something, return it. But first, set the alignment
6598 if the address is a register. */
6601 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6602 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6607 /* If the mode of DECL_RTL does not match that of the decl, it
6608 must be a promoted value. We return a SUBREG of the wanted mode,
6609 but mark it so that we know that it was already extended. */
6611 if (REG_P (DECL_RTL (exp))
6612 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6614 enum machine_mode pmode;
6616 /* Get the signedness used for this variable. Ensure we get the
6617 same mode we got when the variable was declared. */
6618 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6619 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6620 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6622 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6623 SUBREG_PROMOTED_VAR_P (temp) = 1;
6624 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6628 return DECL_RTL (exp);
6631 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6632 TREE_INT_CST_HIGH (exp), mode);
6634 /* ??? If overflow is set, fold will have done an incomplete job,
6635 which can result in (plus xx (const_int 0)), which can get
6636 simplified by validate_replace_rtx during virtual register
6637 instantiation, which can result in unrecognizable insns.
6638 Avoid this by forcing all overflows into registers. */
6639 if (TREE_CONSTANT_OVERFLOW (exp)
6640 && modifier != EXPAND_INITIALIZER)
6641 temp = force_reg (mode, temp);
6646 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6647 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6648 return const_vector_from_tree (exp);
6650 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6651 TREE_VECTOR_CST_ELTS (exp)),
6652 ignore ? const0_rtx : target, tmode, modifier);
6655 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6658 /* If optimized, generate immediate CONST_DOUBLE
6659 which will be turned into memory by reload if necessary.
6661 We used to force a register so that loop.c could see it. But
6662 this does not allow gen_* patterns to perform optimizations with
6663 the constants. It also produces two insns in cases like "x = 1.0;".
6664 On most machines, floating-point constants are not permitted in
6665 many insns, so we'd end up copying it to a register in any case.
6667 Now, we do the copying in expand_binop, if appropriate. */
6668 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6669 TYPE_MODE (TREE_TYPE (exp)));
6672 /* Handle evaluating a complex constant in a CONCAT target. */
6673 if (original_target && GET_CODE (original_target) == CONCAT)
6675 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6678 rtarg = XEXP (original_target, 0);
6679 itarg = XEXP (original_target, 1);
6681 /* Move the real and imaginary parts separately. */
6682 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6683 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6686 emit_move_insn (rtarg, op0);
6688 emit_move_insn (itarg, op1);
6690 return original_target;
6693 /* ... fall through ... */
6696 temp = output_constant_def (exp, 1);
6698 /* temp contains a constant address.
6699 On RISC machines where a constant address isn't valid,
6700 make some insns to get that address into a register. */
6701 if (modifier != EXPAND_CONST_ADDRESS
6702 && modifier != EXPAND_INITIALIZER
6703 && modifier != EXPAND_SUM
6704 && (! memory_address_p (mode, XEXP (temp, 0))
6705 || flag_force_addr))
6706 return replace_equiv_address (temp,
6707 copy_rtx (XEXP (temp, 0)));
6712 tree val = TREE_OPERAND (exp, 0);
6713 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6715 if (!SAVE_EXPR_RESOLVED_P (exp))
6717 /* We can indeed still hit this case, typically via builtin
6718 expanders calling save_expr immediately before expanding
6719 something. Assume this means that we only have to deal
6720 with non-BLKmode values. */
6721 gcc_assert (GET_MODE (ret) != BLKmode);
6723 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6724 DECL_ARTIFICIAL (val) = 1;
6725 DECL_IGNORED_P (val) = 1;
6726 TREE_OPERAND (exp, 0) = val;
6727 SAVE_EXPR_RESOLVED_P (exp) = 1;
6729 if (!CONSTANT_P (ret))
6730 ret = copy_to_reg (ret);
6731 SET_DECL_RTL (val, ret);
6738 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6739 expand_goto (TREE_OPERAND (exp, 0));
6741 expand_computed_goto (TREE_OPERAND (exp, 0));
6745 /* If we don't need the result, just ensure we evaluate any
6751 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6752 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6757 /* All elts simple constants => refer to a constant in memory. But
6758 if this is a non-BLKmode mode, let it store a field at a time
6759 since that should make a CONST_INT or CONST_DOUBLE when we
6760 fold. Likewise, if we have a target we can use, it is best to
6761 store directly into the target unless the type is large enough
6762 that memcpy will be used. If we are making an initializer and
6763 all operands are constant, put it in memory as well.
6765 FIXME: Avoid trying to fill vector constructors piece-meal.
6766 Output them with output_constant_def below unless we're sure
6767 they're zeros. This should go away when vector initializers
6768 are treated like VECTOR_CST instead of arrays.
6770 else if ((TREE_STATIC (exp)
6771 && ((mode == BLKmode
6772 && ! (target != 0 && safe_from_p (target, exp, 1)))
6773 || TREE_ADDRESSABLE (exp)
6774 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6775 && (! MOVE_BY_PIECES_P
6776 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6778 && ! mostly_zeros_p (exp))))
6779 || ((modifier == EXPAND_INITIALIZER
6780 || modifier == EXPAND_CONST_ADDRESS)
6781 && TREE_CONSTANT (exp)))
6783 rtx constructor = output_constant_def (exp, 1);
6785 if (modifier != EXPAND_CONST_ADDRESS
6786 && modifier != EXPAND_INITIALIZER
6787 && modifier != EXPAND_SUM)
6788 constructor = validize_mem (constructor);
6794 /* Handle calls that pass values in multiple non-contiguous
6795 locations. The Irix 6 ABI has examples of this. */
6796 if (target == 0 || ! safe_from_p (target, exp, 1)
6797 || GET_CODE (target) == PARALLEL
6798 || modifier == EXPAND_STACK_PARM)
6800 = assign_temp (build_qualified_type (type,
6802 | (TREE_READONLY (exp)
6803 * TYPE_QUAL_CONST))),
6804 0, TREE_ADDRESSABLE (exp), 1);
6806 store_constructor (exp, target, 0, int_expr_size (exp));
6810 case MISALIGNED_INDIRECT_REF:
6811 case ALIGN_INDIRECT_REF:
6814 tree exp1 = TREE_OPERAND (exp, 0);
6817 if (code == MISALIGNED_INDIRECT_REF
6818 && !targetm.vectorize.misaligned_mem_ok (mode))
6821 if (modifier != EXPAND_WRITE)
6825 t = fold_read_from_constant_string (exp);
6827 return expand_expr (t, target, tmode, modifier);
6830 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6831 op0 = memory_address (mode, op0);
6833 if (code == ALIGN_INDIRECT_REF)
6835 int align = TYPE_ALIGN_UNIT (type);
6836 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6837 op0 = memory_address (mode, op0);
6840 temp = gen_rtx_MEM (mode, op0);
6842 orig = REF_ORIGINAL (exp);
6845 set_mem_attributes (temp, orig, 0);
6853 tree array = TREE_OPERAND (exp, 0);
6854 tree index = TREE_OPERAND (exp, 1);
6856 /* Fold an expression like: "foo"[2].
6857 This is not done in fold so it won't happen inside &.
6858 Don't fold if this is for wide characters since it's too
6859 difficult to do correctly and this is a very rare case. */
6861 if (modifier != EXPAND_CONST_ADDRESS
6862 && modifier != EXPAND_INITIALIZER
6863 && modifier != EXPAND_MEMORY)
6865 tree t = fold_read_from_constant_string (exp);
6868 return expand_expr (t, target, tmode, modifier);
6871 /* If this is a constant index into a constant array,
6872 just get the value from the array. Handle both the cases when
6873 we have an explicit constructor and when our operand is a variable
6874 that was declared const. */
6876 if (modifier != EXPAND_CONST_ADDRESS
6877 && modifier != EXPAND_INITIALIZER
6878 && modifier != EXPAND_MEMORY
6879 && TREE_CODE (array) == CONSTRUCTOR
6880 && ! TREE_SIDE_EFFECTS (array)
6881 && TREE_CODE (index) == INTEGER_CST)
6885 for (elem = CONSTRUCTOR_ELTS (array);
6886 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6887 elem = TREE_CHAIN (elem))
6890 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6891 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6895 else if (optimize >= 1
6896 && modifier != EXPAND_CONST_ADDRESS
6897 && modifier != EXPAND_INITIALIZER
6898 && modifier != EXPAND_MEMORY
6899 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6900 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6901 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6902 && targetm.binds_local_p (array))
6904 if (TREE_CODE (index) == INTEGER_CST)
6906 tree init = DECL_INITIAL (array);
6908 if (TREE_CODE (init) == CONSTRUCTOR)
6912 for (elem = CONSTRUCTOR_ELTS (init);
6914 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6915 elem = TREE_CHAIN (elem))
6918 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6919 return expand_expr (fold (TREE_VALUE (elem)), target,
6922 else if (TREE_CODE (init) == STRING_CST
6923 && 0 > compare_tree_int (index,
6924 TREE_STRING_LENGTH (init)))
6926 tree type = TREE_TYPE (TREE_TYPE (init));
6927 enum machine_mode mode = TYPE_MODE (type);
6929 if (GET_MODE_CLASS (mode) == MODE_INT
6930 && GET_MODE_SIZE (mode) == 1)
6931 return gen_int_mode (TREE_STRING_POINTER (init)
6932 [TREE_INT_CST_LOW (index)], mode);
6937 goto normal_inner_ref;
6940 /* If the operand is a CONSTRUCTOR, we can just extract the
6941 appropriate field if it is present. */
6942 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6946 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6947 elt = TREE_CHAIN (elt))
6948 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6949 /* We can normally use the value of the field in the
6950 CONSTRUCTOR. However, if this is a bitfield in
6951 an integral mode that we can fit in a HOST_WIDE_INT,
6952 we must mask only the number of bits in the bitfield,
6953 since this is done implicitly by the constructor. If
6954 the bitfield does not meet either of those conditions,
6955 we can't do this optimization. */
6956 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6957 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6959 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6960 <= HOST_BITS_PER_WIDE_INT))))
6962 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6963 && modifier == EXPAND_STACK_PARM)
6965 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6966 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6968 HOST_WIDE_INT bitsize
6969 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6970 enum machine_mode imode
6971 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6973 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6975 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6976 op0 = expand_and (imode, op0, op1, target);
6981 = build_int_cst (NULL_TREE,
6982 GET_MODE_BITSIZE (imode) - bitsize);
6984 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6986 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6994 goto normal_inner_ref;
6997 case ARRAY_RANGE_REF:
7000 enum machine_mode mode1;
7001 HOST_WIDE_INT bitsize, bitpos;
7004 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7005 &mode1, &unsignedp, &volatilep);
7008 /* If we got back the original object, something is wrong. Perhaps
7009 we are evaluating an expression too early. In any event, don't
7010 infinitely recurse. */
7011 gcc_assert (tem != exp);
7013 /* If TEM's type is a union of variable size, pass TARGET to the inner
7014 computation, since it will need a temporary and TARGET is known
7015 to have to do. This occurs in unchecked conversion in Ada. */
7019 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7020 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7022 && modifier != EXPAND_STACK_PARM
7023 ? target : NULL_RTX),
7025 (modifier == EXPAND_INITIALIZER
7026 || modifier == EXPAND_CONST_ADDRESS
7027 || modifier == EXPAND_STACK_PARM)
7028 ? modifier : EXPAND_NORMAL);
7030 /* If this is a constant, put it into a register if it is a
7031 legitimate constant and OFFSET is 0 and memory if it isn't. */
7032 if (CONSTANT_P (op0))
7034 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7035 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7037 op0 = force_reg (mode, op0);
7039 op0 = validize_mem (force_const_mem (mode, op0));
7042 /* Otherwise, if this object not in memory and we either have an
7043 offset or a BLKmode result, put it there. This case can't occur in
7044 C, but can in Ada if we have unchecked conversion of an expression
7045 from a scalar type to an array or record type or for an
7046 ARRAY_RANGE_REF whose type is BLKmode. */
7047 else if (!MEM_P (op0)
7049 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7051 tree nt = build_qualified_type (TREE_TYPE (tem),
7052 (TYPE_QUALS (TREE_TYPE (tem))
7053 | TYPE_QUAL_CONST));
7054 rtx memloc = assign_temp (nt, 1, 1, 1);
7056 emit_move_insn (memloc, op0);
7062 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7065 gcc_assert (MEM_P (op0));
7067 #ifdef POINTERS_EXTEND_UNSIGNED
7068 if (GET_MODE (offset_rtx) != Pmode)
7069 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7071 if (GET_MODE (offset_rtx) != ptr_mode)
7072 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7075 if (GET_MODE (op0) == BLKmode
7076 /* A constant address in OP0 can have VOIDmode, we must
7077 not try to call force_reg in that case. */
7078 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7080 && (bitpos % bitsize) == 0
7081 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7082 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7084 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7088 op0 = offset_address (op0, offset_rtx,
7089 highest_pow2_factor (offset));
7092 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7093 record its alignment as BIGGEST_ALIGNMENT. */
7094 if (MEM_P (op0) && bitpos == 0 && offset != 0
7095 && is_aligning_offset (offset, tem))
7096 set_mem_align (op0, BIGGEST_ALIGNMENT);
7098 /* Don't forget about volatility even if this is a bitfield. */
7099 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7101 if (op0 == orig_op0)
7102 op0 = copy_rtx (op0);
7104 MEM_VOLATILE_P (op0) = 1;
7107 /* The following code doesn't handle CONCAT.
7108 Assume only bitpos == 0 can be used for CONCAT, due to
7109 one element arrays having the same mode as its element. */
7110 if (GET_CODE (op0) == CONCAT)
7112 gcc_assert (bitpos == 0
7113 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7117 /* In cases where an aligned union has an unaligned object
7118 as a field, we might be extracting a BLKmode value from
7119 an integer-mode (e.g., SImode) object. Handle this case
7120 by doing the extract into an object as wide as the field
7121 (which we know to be the width of a basic mode), then
7122 storing into memory, and changing the mode to BLKmode. */
7123 if (mode1 == VOIDmode
7124 || REG_P (op0) || GET_CODE (op0) == SUBREG
7125 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7126 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7127 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7128 && modifier != EXPAND_CONST_ADDRESS
7129 && modifier != EXPAND_INITIALIZER)
7130 /* If the field isn't aligned enough to fetch as a memref,
7131 fetch it as a bit field. */
7132 || (mode1 != BLKmode
7133 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7134 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7136 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7137 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7138 && ((modifier == EXPAND_CONST_ADDRESS
7139 || modifier == EXPAND_INITIALIZER)
7141 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7142 || (bitpos % BITS_PER_UNIT != 0)))
7143 /* If the type and the field are a constant size and the
7144 size of the type isn't the same size as the bitfield,
7145 we must use bitfield operations. */
7147 && TYPE_SIZE (TREE_TYPE (exp))
7148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7149 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7152 enum machine_mode ext_mode = mode;
7154 if (ext_mode == BLKmode
7155 && ! (target != 0 && MEM_P (op0)
7157 && bitpos % BITS_PER_UNIT == 0))
7158 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7160 if (ext_mode == BLKmode)
7163 target = assign_temp (type, 0, 1, 1);
7168 /* In this case, BITPOS must start at a byte boundary and
7169 TARGET, if specified, must be a MEM. */
7170 gcc_assert (MEM_P (op0)
7171 && (!target || MEM_P (target))
7172 && !(bitpos % BITS_PER_UNIT));
7174 emit_block_move (target,
7175 adjust_address (op0, VOIDmode,
7176 bitpos / BITS_PER_UNIT),
7177 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7179 (modifier == EXPAND_STACK_PARM
7180 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7185 op0 = validize_mem (op0);
7187 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7188 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7190 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7191 (modifier == EXPAND_STACK_PARM
7192 ? NULL_RTX : target),
7193 ext_mode, ext_mode);
7195 /* If the result is a record type and BITSIZE is narrower than
7196 the mode of OP0, an integral mode, and this is a big endian
7197 machine, we must put the field into the high-order bits. */
7198 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7199 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7200 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7201 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7202 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7206 /* If the result type is BLKmode, store the data into a temporary
7207 of the appropriate type, but with the mode corresponding to the
7208 mode for the data we have (op0's mode). It's tempting to make
7209 this a constant type, since we know it's only being stored once,
7210 but that can cause problems if we are taking the address of this
7211 COMPONENT_REF because the MEM of any reference via that address
7212 will have flags corresponding to the type, which will not
7213 necessarily be constant. */
7214 if (mode == BLKmode)
7217 = assign_stack_temp_for_type
7218 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7220 emit_move_insn (new, op0);
7221 op0 = copy_rtx (new);
7222 PUT_MODE (op0, BLKmode);
7223 set_mem_attributes (op0, exp, 1);
7229 /* If the result is BLKmode, use that to access the object
7231 if (mode == BLKmode)
7234 /* Get a reference to just this component. */
7235 if (modifier == EXPAND_CONST_ADDRESS
7236 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7237 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7239 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7241 if (op0 == orig_op0)
7242 op0 = copy_rtx (op0);
7244 set_mem_attributes (op0, exp, 0);
7245 if (REG_P (XEXP (op0, 0)))
7246 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7248 MEM_VOLATILE_P (op0) |= volatilep;
7249 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7250 || modifier == EXPAND_CONST_ADDRESS
7251 || modifier == EXPAND_INITIALIZER)
7253 else if (target == 0)
7254 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7256 convert_move (target, op0, unsignedp);
7261 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7264 /* Check for a built-in function. */
7265 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7266 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7268 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7270 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7271 == BUILT_IN_FRONTEND)
7272 return lang_hooks.expand_expr (exp, original_target,
7276 return expand_builtin (exp, target, subtarget, tmode, ignore);
7279 return expand_call (exp, target, ignore);
7281 case NON_LVALUE_EXPR:
7284 if (TREE_OPERAND (exp, 0) == error_mark_node)
7287 if (TREE_CODE (type) == UNION_TYPE)
7289 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7291 /* If both input and output are BLKmode, this conversion isn't doing
7292 anything except possibly changing memory attribute. */
7293 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7295 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7298 result = copy_rtx (result);
7299 set_mem_attributes (result, exp, 0);
7305 if (TYPE_MODE (type) != BLKmode)
7306 target = gen_reg_rtx (TYPE_MODE (type));
7308 target = assign_temp (type, 0, 1, 1);
7312 /* Store data into beginning of memory target. */
7313 store_expr (TREE_OPERAND (exp, 0),
7314 adjust_address (target, TYPE_MODE (valtype), 0),
7315 modifier == EXPAND_STACK_PARM);
7319 gcc_assert (REG_P (target));
7321 /* Store this field into a union of the proper type. */
7322 store_field (target,
7323 MIN ((int_size_in_bytes (TREE_TYPE
7324 (TREE_OPERAND (exp, 0)))
7326 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7327 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7331 /* Return the entire union. */
7335 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7337 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7340 /* If the signedness of the conversion differs and OP0 is
7341 a promoted SUBREG, clear that indication since we now
7342 have to do the proper extension. */
7343 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7344 && GET_CODE (op0) == SUBREG)
7345 SUBREG_PROMOTED_VAR_P (op0) = 0;
7347 return REDUCE_BIT_FIELD (op0);
7350 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7351 if (GET_MODE (op0) == mode)
7354 /* If OP0 is a constant, just convert it into the proper mode. */
7355 else if (CONSTANT_P (op0))
7357 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7358 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7360 if (modifier == EXPAND_INITIALIZER)
7361 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7362 subreg_lowpart_offset (mode,
7365 op0= convert_modes (mode, inner_mode, op0,
7366 TYPE_UNSIGNED (inner_type));
7369 else if (modifier == EXPAND_INITIALIZER)
7370 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7372 else if (target == 0)
7373 op0 = convert_to_mode (mode, op0,
7374 TYPE_UNSIGNED (TREE_TYPE
7375 (TREE_OPERAND (exp, 0))));
7378 convert_move (target, op0,
7379 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7383 return REDUCE_BIT_FIELD (op0);
7385 case VIEW_CONVERT_EXPR:
7386 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7388 /* If the input and output modes are both the same, we are done.
7389 Otherwise, if neither mode is BLKmode and both are integral and within
7390 a word, we can use gen_lowpart. If neither is true, make sure the
7391 operand is in memory and convert the MEM to the new mode. */
7392 if (TYPE_MODE (type) == GET_MODE (op0))
7394 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7395 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7396 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7397 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7398 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7399 op0 = gen_lowpart (TYPE_MODE (type), op0);
7400 else if (!MEM_P (op0))
7402 /* If the operand is not a MEM, force it into memory. Since we
7403 are going to be be changing the mode of the MEM, don't call
7404 force_const_mem for constants because we don't allow pool
7405 constants to change mode. */
7406 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7408 gcc_assert (!TREE_ADDRESSABLE (exp));
7410 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7412 = assign_stack_temp_for_type
7413 (TYPE_MODE (inner_type),
7414 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7416 emit_move_insn (target, op0);
7420 /* At this point, OP0 is in the correct mode. If the output type is such
7421 that the operand is known to be aligned, indicate that it is.
7422 Otherwise, we need only be concerned about alignment for non-BLKmode
7426 op0 = copy_rtx (op0);
7428 if (TYPE_ALIGN_OK (type))
7429 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7430 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7431 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7433 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7434 HOST_WIDE_INT temp_size
7435 = MAX (int_size_in_bytes (inner_type),
7436 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7437 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7438 temp_size, 0, type);
7439 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7441 gcc_assert (!TREE_ADDRESSABLE (exp));
7443 if (GET_MODE (op0) == BLKmode)
7444 emit_block_move (new_with_op0_mode, op0,
7445 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7446 (modifier == EXPAND_STACK_PARM
7447 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7449 emit_move_insn (new_with_op0_mode, op0);
7454 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7460 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7461 something else, make sure we add the register to the constant and
7462 then to the other thing. This case can occur during strength
7463 reduction and doing it this way will produce better code if the
7464 frame pointer or argument pointer is eliminated.
7466 fold-const.c will ensure that the constant is always in the inner
7467 PLUS_EXPR, so the only case we need to do anything about is if
7468 sp, ap, or fp is our second argument, in which case we must swap
7469 the innermost first argument and our second argument. */
7471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7473 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7474 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7475 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7476 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7478 tree t = TREE_OPERAND (exp, 1);
7480 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7481 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7484 /* If the result is to be ptr_mode and we are adding an integer to
7485 something, we might be forming a constant. So try to use
7486 plus_constant. If it produces a sum and we can't accept it,
7487 use force_operand. This allows P = &ARR[const] to generate
7488 efficient code on machines where a SYMBOL_REF is not a valid
7491 If this is an EXPAND_SUM call, always return the sum. */
7492 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7493 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7495 if (modifier == EXPAND_STACK_PARM)
7497 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7498 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7499 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7503 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7505 /* Use immed_double_const to ensure that the constant is
7506 truncated according to the mode of OP1, then sign extended
7507 to a HOST_WIDE_INT. Using the constant directly can result
7508 in non-canonical RTL in a 64x32 cross compile. */
7510 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7512 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7513 op1 = plus_constant (op1, INTVAL (constant_part));
7514 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7515 op1 = force_operand (op1, target);
7516 return REDUCE_BIT_FIELD (op1);
7519 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7520 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7521 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7526 (modifier == EXPAND_INITIALIZER
7527 ? EXPAND_INITIALIZER : EXPAND_SUM));
7528 if (! CONSTANT_P (op0))
7530 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7531 VOIDmode, modifier);
7532 /* Return a PLUS if modifier says it's OK. */
7533 if (modifier == EXPAND_SUM
7534 || modifier == EXPAND_INITIALIZER)
7535 return simplify_gen_binary (PLUS, mode, op0, op1);
7538 /* Use immed_double_const to ensure that the constant is
7539 truncated according to the mode of OP1, then sign extended
7540 to a HOST_WIDE_INT. Using the constant directly can result
7541 in non-canonical RTL in a 64x32 cross compile. */
7543 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7545 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7546 op0 = plus_constant (op0, INTVAL (constant_part));
7547 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7548 op0 = force_operand (op0, target);
7549 return REDUCE_BIT_FIELD (op0);
7553 /* No sense saving up arithmetic to be done
7554 if it's all in the wrong mode to form part of an address.
7555 And force_operand won't know whether to sign-extend or
7557 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7558 || mode != ptr_mode)
7560 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7561 subtarget, &op0, &op1, 0);
7562 if (op0 == const0_rtx)
7564 if (op1 == const0_rtx)
7569 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7570 subtarget, &op0, &op1, modifier);
7571 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7574 /* For initializers, we are allowed to return a MINUS of two
7575 symbolic constants. Here we handle all cases when both operands
7577 /* Handle difference of two symbolic constants,
7578 for the sake of an initializer. */
7579 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7580 && really_constant_p (TREE_OPERAND (exp, 0))
7581 && really_constant_p (TREE_OPERAND (exp, 1)))
7583 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7584 NULL_RTX, &op0, &op1, modifier);
7586 /* If the last operand is a CONST_INT, use plus_constant of
7587 the negated constant. Else make the MINUS. */
7588 if (GET_CODE (op1) == CONST_INT)
7589 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7591 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7594 /* No sense saving up arithmetic to be done
7595 if it's all in the wrong mode to form part of an address.
7596 And force_operand won't know whether to sign-extend or
7598 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7599 || mode != ptr_mode)
7602 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7603 subtarget, &op0, &op1, modifier);
7605 /* Convert A - const to A + (-const). */
7606 if (GET_CODE (op1) == CONST_INT)
7608 op1 = negate_rtx (mode, op1);
7609 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7615 /* If first operand is constant, swap them.
7616 Thus the following special case checks need only
7617 check the second operand. */
7618 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7620 tree t1 = TREE_OPERAND (exp, 0);
7621 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7622 TREE_OPERAND (exp, 1) = t1;
7625 /* Attempt to return something suitable for generating an
7626 indexed address, for machines that support that. */
7628 if (modifier == EXPAND_SUM && mode == ptr_mode
7629 && host_integerp (TREE_OPERAND (exp, 1), 0))
7631 tree exp1 = TREE_OPERAND (exp, 1);
7633 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7637 op0 = force_operand (op0, NULL_RTX);
7639 op0 = copy_to_mode_reg (mode, op0);
7641 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7642 gen_int_mode (tree_low_cst (exp1, 0),
7643 TYPE_MODE (TREE_TYPE (exp1)))));
7646 if (modifier == EXPAND_STACK_PARM)
7649 /* Check for multiplying things that have been extended
7650 from a narrower type. If this machine supports multiplying
7651 in that narrower type with a result in the desired type,
7652 do it that way, and avoid the explicit type-conversion. */
7653 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7654 && TREE_CODE (type) == INTEGER_TYPE
7655 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7656 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7657 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7658 && int_fits_type_p (TREE_OPERAND (exp, 1),
7659 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7660 /* Don't use a widening multiply if a shift will do. */
7661 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7662 > HOST_BITS_PER_WIDE_INT)
7663 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7665 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7666 && (TYPE_PRECISION (TREE_TYPE
7667 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7668 == TYPE_PRECISION (TREE_TYPE
7670 (TREE_OPERAND (exp, 0), 0))))
7671 /* If both operands are extended, they must either both
7672 be zero-extended or both be sign-extended. */
7673 && (TYPE_UNSIGNED (TREE_TYPE
7674 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7675 == TYPE_UNSIGNED (TREE_TYPE
7677 (TREE_OPERAND (exp, 0), 0)))))))
7679 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7680 enum machine_mode innermode = TYPE_MODE (op0type);
7681 bool zextend_p = TYPE_UNSIGNED (op0type);
7682 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7683 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7685 if (mode == GET_MODE_WIDER_MODE (innermode))
7687 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7689 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7690 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7691 TREE_OPERAND (exp, 1),
7692 NULL_RTX, &op0, &op1, 0);
7694 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7695 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7696 NULL_RTX, &op0, &op1, 0);
7699 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7700 && innermode == word_mode)
7703 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7704 NULL_RTX, VOIDmode, 0);
7705 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7706 op1 = convert_modes (innermode, mode,
7707 expand_expr (TREE_OPERAND (exp, 1),
7708 NULL_RTX, VOIDmode, 0),
7711 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7712 NULL_RTX, VOIDmode, 0);
7713 temp = expand_binop (mode, other_optab, op0, op1, target,
7714 unsignedp, OPTAB_LIB_WIDEN);
7715 hipart = gen_highpart (innermode, temp);
7716 htem = expand_mult_highpart_adjust (innermode, hipart,
7720 emit_move_insn (hipart, htem);
7721 return REDUCE_BIT_FIELD (temp);
7725 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7726 subtarget, &op0, &op1, 0);
7727 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7729 case TRUNC_DIV_EXPR:
7730 case FLOOR_DIV_EXPR:
7732 case ROUND_DIV_EXPR:
7733 case EXACT_DIV_EXPR:
7734 if (modifier == EXPAND_STACK_PARM)
7736 /* Possible optimization: compute the dividend with EXPAND_SUM
7737 then if the divisor is constant can optimize the case
7738 where some terms of the dividend have coeffs divisible by it. */
7739 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7740 subtarget, &op0, &op1, 0);
7741 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7744 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7745 expensive divide. If not, combine will rebuild the original
7747 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7748 && TREE_CODE (type) == REAL_TYPE
7749 && !real_onep (TREE_OPERAND (exp, 0)))
7750 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7751 build2 (RDIV_EXPR, type,
7752 build_real (type, dconst1),
7753 TREE_OPERAND (exp, 1))),
7754 target, tmode, modifier);
7758 case TRUNC_MOD_EXPR:
7759 case FLOOR_MOD_EXPR:
7761 case ROUND_MOD_EXPR:
7762 if (modifier == EXPAND_STACK_PARM)
7764 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7765 subtarget, &op0, &op1, 0);
7766 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7768 case FIX_ROUND_EXPR:
7769 case FIX_FLOOR_EXPR:
7771 gcc_unreachable (); /* Not used for C. */
7773 case FIX_TRUNC_EXPR:
7774 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7775 if (target == 0 || modifier == EXPAND_STACK_PARM)
7776 target = gen_reg_rtx (mode);
7777 expand_fix (target, op0, unsignedp);
7781 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7782 if (target == 0 || modifier == EXPAND_STACK_PARM)
7783 target = gen_reg_rtx (mode);
7784 /* expand_float can't figure out what to do if FROM has VOIDmode.
7785 So give it the correct mode. With -O, cse will optimize this. */
7786 if (GET_MODE (op0) == VOIDmode)
7787 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7789 expand_float (target, op0,
7790 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7794 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7795 if (modifier == EXPAND_STACK_PARM)
7797 temp = expand_unop (mode,
7798 optab_for_tree_code (NEGATE_EXPR, type),
7801 return REDUCE_BIT_FIELD (temp);
7804 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7805 if (modifier == EXPAND_STACK_PARM)
7808 /* ABS_EXPR is not valid for complex arguments. */
7809 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7810 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7812 /* Unsigned abs is simply the operand. Testing here means we don't
7813 risk generating incorrect code below. */
7814 if (TYPE_UNSIGNED (type))
7817 return expand_abs (mode, op0, target, unsignedp,
7818 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7822 target = original_target;
7824 || modifier == EXPAND_STACK_PARM
7825 || (MEM_P (target) && MEM_VOLATILE_P (target))
7826 || GET_MODE (target) != mode
7828 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7829 target = gen_reg_rtx (mode);
7830 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7831 target, &op0, &op1, 0);
7833 /* First try to do it with a special MIN or MAX instruction.
7834 If that does not win, use a conditional jump to select the proper
7836 this_optab = optab_for_tree_code (code, type);
7837 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7842 /* At this point, a MEM target is no longer useful; we will get better
7846 target = gen_reg_rtx (mode);
7848 /* If op1 was placed in target, swap op0 and op1. */
7849 if (target != op0 && target == op1)
7857 emit_move_insn (target, op0);
7859 op0 = gen_label_rtx ();
7861 /* If this mode is an integer too wide to compare properly,
7862 compare word by word. Rely on cse to optimize constant cases. */
7863 if (GET_MODE_CLASS (mode) == MODE_INT
7864 && ! can_compare_p (GE, mode, ccp_jump))
7866 if (code == MAX_EXPR)
7867 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7870 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7875 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7876 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7878 emit_move_insn (target, op1);
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7884 if (modifier == EXPAND_STACK_PARM)
7886 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7890 /* ??? Can optimize bitwise operations with one arg constant.
7891 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7892 and (a bitwise1 b) bitwise2 b (etc)
7893 but that is probably not worth while. */
7895 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7896 boolean values when we want in all cases to compute both of them. In
7897 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7898 as actual zero-or-1 values and then bitwise anding. In cases where
7899 there cannot be any side effects, better code would be made by
7900 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7901 how to recognize those cases. */
7903 case TRUTH_AND_EXPR:
7904 code = BIT_AND_EXPR;
7909 code = BIT_IOR_EXPR;
7913 case TRUTH_XOR_EXPR:
7914 code = BIT_XOR_EXPR;
7922 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7924 if (modifier == EXPAND_STACK_PARM)
7926 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7927 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7930 /* Could determine the answer when only additive constants differ. Also,
7931 the addition of one can be handled by changing the condition. */
7938 case UNORDERED_EXPR:
7946 temp = do_store_flag (exp,
7947 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7948 tmode != VOIDmode ? tmode : mode, 0);
7952 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7953 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7955 && REG_P (original_target)
7956 && (GET_MODE (original_target)
7957 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7959 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7962 /* If temp is constant, we can just compute the result. */
7963 if (GET_CODE (temp) == CONST_INT)
7965 if (INTVAL (temp) != 0)
7966 emit_move_insn (target, const1_rtx);
7968 emit_move_insn (target, const0_rtx);
7973 if (temp != original_target)
7975 enum machine_mode mode1 = GET_MODE (temp);
7976 if (mode1 == VOIDmode)
7977 mode1 = tmode != VOIDmode ? tmode : mode;
7979 temp = copy_to_mode_reg (mode1, temp);
7982 op1 = gen_label_rtx ();
7983 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7984 GET_MODE (temp), unsignedp, op1);
7985 emit_move_insn (temp, const1_rtx);
7990 /* If no set-flag instruction, must generate a conditional store
7991 into a temporary variable. Drop through and handle this
7996 || modifier == EXPAND_STACK_PARM
7997 || ! safe_from_p (target, exp, 1)
7998 /* Make sure we don't have a hard reg (such as function's return
7999 value) live across basic blocks, if not optimizing. */
8000 || (!optimize && REG_P (target)
8001 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8002 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8005 emit_move_insn (target, const0_rtx);
8007 op1 = gen_label_rtx ();
8008 jumpifnot (exp, op1);
8011 emit_move_insn (target, const1_rtx);
8014 return ignore ? const0_rtx : target;
8016 case TRUTH_NOT_EXPR:
8017 if (modifier == EXPAND_STACK_PARM)
8019 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8020 /* The parser is careful to generate TRUTH_NOT_EXPR
8021 only with operands that are always zero or one. */
8022 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8023 target, 1, OPTAB_LIB_WIDEN);
8027 case STATEMENT_LIST:
8029 tree_stmt_iterator iter;
8031 gcc_assert (ignore);
8033 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8034 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8039 /* If it's void, we don't need to worry about computing a value. */
8040 if (VOID_TYPE_P (TREE_TYPE (exp)))
8042 tree pred = TREE_OPERAND (exp, 0);
8043 tree then_ = TREE_OPERAND (exp, 1);
8044 tree else_ = TREE_OPERAND (exp, 2);
8046 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
8047 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
8048 && TREE_CODE (else_) == GOTO_EXPR
8049 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
8051 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8052 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8055 /* Note that COND_EXPRs whose type is a structure or union
8056 are required to be constructed to contain assignments of
8057 a temporary variable, so that we can evaluate them here
8058 for side effect only. If type is void, we must do likewise. */
8060 gcc_assert (!TREE_ADDRESSABLE (type)
8062 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
8063 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
8065 /* If we are not to produce a result, we have no target. Otherwise,
8066 if a target was specified use it; it will not be used as an
8067 intermediate target unless it is safe. If no target, use a
8070 if (modifier != EXPAND_STACK_PARM
8072 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8073 && GET_MODE (original_target) == mode
8074 #ifdef HAVE_conditional_move
8075 && (! can_conditionally_move_p (mode)
8076 || REG_P (original_target))
8078 && !MEM_P (original_target))
8079 temp = original_target;
8081 temp = assign_temp (type, 0, 0, 1);
8083 do_pending_stack_adjust ();
8085 op0 = gen_label_rtx ();
8086 op1 = gen_label_rtx ();
8087 jumpifnot (TREE_OPERAND (exp, 0), op0);
8088 store_expr (TREE_OPERAND (exp, 1), temp,
8089 modifier == EXPAND_STACK_PARM);
8091 emit_jump_insn (gen_jump (op1));
8094 store_expr (TREE_OPERAND (exp, 2), temp,
8095 modifier == EXPAND_STACK_PARM);
8102 target = expand_vec_cond_expr (exp, target);
8107 tree lhs = TREE_OPERAND (exp, 0);
8108 tree rhs = TREE_OPERAND (exp, 1);
8110 gcc_assert (ignore);
8112 /* Check for |= or &= of a bitfield of size one into another bitfield
8113 of size 1. In this case, (unless we need the result of the
8114 assignment) we can do this more efficiently with a
8115 test followed by an assignment, if necessary.
8117 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8118 things change so we do, this code should be enhanced to
8120 if (TREE_CODE (lhs) == COMPONENT_REF
8121 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8122 || TREE_CODE (rhs) == BIT_AND_EXPR)
8123 && TREE_OPERAND (rhs, 0) == lhs
8124 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8125 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8126 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8128 rtx label = gen_label_rtx ();
8130 do_jump (TREE_OPERAND (rhs, 1),
8131 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8132 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8133 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8134 (TREE_CODE (rhs) == BIT_IOR_EXPR
8136 : integer_zero_node)));
8137 do_pending_stack_adjust ();
8142 expand_assignment (lhs, rhs);
8148 if (!TREE_OPERAND (exp, 0))
8149 expand_null_return ();
8151 expand_return (TREE_OPERAND (exp, 0));
8155 return expand_expr_addr_expr (exp, target, tmode, modifier);
8158 /* Get the rtx code of the operands. */
8159 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8160 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8163 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8165 /* Move the real (op0) and imaginary (op1) parts to their location. */
8166 write_complex_part (target, op0, false);
8167 write_complex_part (target, op1, true);
8172 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8173 return read_complex_part (op0, false);
8176 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8177 return read_complex_part (op0, true);
8180 expand_resx_expr (exp);
8183 case TRY_CATCH_EXPR:
8185 case EH_FILTER_EXPR:
8186 case TRY_FINALLY_EXPR:
8187 /* Lowered by tree-eh.c. */
8190 case WITH_CLEANUP_EXPR:
8191 case CLEANUP_POINT_EXPR:
8193 case CASE_LABEL_EXPR:
8199 case PREINCREMENT_EXPR:
8200 case PREDECREMENT_EXPR:
8201 case POSTINCREMENT_EXPR:
8202 case POSTDECREMENT_EXPR:
8205 case TRUTH_ANDIF_EXPR:
8206 case TRUTH_ORIF_EXPR:
8207 /* Lowered by gimplify.c. */
8211 return get_exception_pointer (cfun);
8214 return get_exception_filter (cfun);
8217 /* Function descriptors are not valid except for as
8218 initialization constants, and should not be expanded. */
8226 expand_label (TREE_OPERAND (exp, 0));
8230 expand_asm_expr (exp);
8233 case WITH_SIZE_EXPR:
8234 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8235 have pulled out the size to use in whatever context it needed. */
8236 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8239 case REALIGN_LOAD_EXPR:
8241 tree oprnd0 = TREE_OPERAND (exp, 0);
8242 tree oprnd1 = TREE_OPERAND (exp, 1);
8243 tree oprnd2 = TREE_OPERAND (exp, 2);
8246 this_optab = optab_for_tree_code (code, type);
8247 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8248 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8249 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8258 return lang_hooks.expand_expr (exp, original_target, tmode,
8262 /* Here to do an ordinary binary operator. */
8264 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8265 subtarget, &op0, &op1, 0);
8267 this_optab = optab_for_tree_code (code, type);
8269 if (modifier == EXPAND_STACK_PARM)
8271 temp = expand_binop (mode, this_optab, op0, op1, target,
8272 unsignedp, OPTAB_LIB_WIDEN);
8274 return REDUCE_BIT_FIELD (temp);
8276 #undef REDUCE_BIT_FIELD
8278 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8279 signedness of TYPE), possibly returning the result in TARGET. */
8281 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8283 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8284 if (target && GET_MODE (target) != GET_MODE (exp))
8286 if (TYPE_UNSIGNED (type))
8289 if (prec < HOST_BITS_PER_WIDE_INT)
8290 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8293 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8294 ((unsigned HOST_WIDE_INT) 1
8295 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8297 return expand_and (GET_MODE (exp), exp, mask, target);
8301 tree count = build_int_cst (NULL_TREE,
8302 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8303 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8304 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8308 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8309 when applied to the address of EXP produces an address known to be
8310 aligned more than BIGGEST_ALIGNMENT. */
8313 is_aligning_offset (tree offset, tree exp)
8315 /* Strip off any conversions. */
8316 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8317 || TREE_CODE (offset) == NOP_EXPR
8318 || TREE_CODE (offset) == CONVERT_EXPR)
8319 offset = TREE_OPERAND (offset, 0);
8321 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8322 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8323 if (TREE_CODE (offset) != BIT_AND_EXPR
8324 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8325 || compare_tree_int (TREE_OPERAND (offset, 1),
8326 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8327 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8330 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8331 It must be NEGATE_EXPR. Then strip any more conversions. */
8332 offset = TREE_OPERAND (offset, 0);
8333 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8334 || TREE_CODE (offset) == NOP_EXPR
8335 || TREE_CODE (offset) == CONVERT_EXPR)
8336 offset = TREE_OPERAND (offset, 0);
8338 if (TREE_CODE (offset) != NEGATE_EXPR)
8341 offset = TREE_OPERAND (offset, 0);
8342 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8343 || TREE_CODE (offset) == NOP_EXPR
8344 || TREE_CODE (offset) == CONVERT_EXPR)
8345 offset = TREE_OPERAND (offset, 0);
8347 /* This must now be the address of EXP. */
8348 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8351 /* Return the tree node if an ARG corresponds to a string constant or zero
8352 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8353 in bytes within the string that ARG is accessing. The type of the
8354 offset will be `sizetype'. */
8357 string_constant (tree arg, tree *ptr_offset)
8362 if (TREE_CODE (arg) == ADDR_EXPR)
8364 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8366 *ptr_offset = size_zero_node;
8367 return TREE_OPERAND (arg, 0);
8369 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8371 array = TREE_OPERAND (arg, 0);
8372 offset = size_zero_node;
8374 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8376 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8377 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8378 if (TREE_CODE (array) != STRING_CST
8379 && TREE_CODE (array) != VAR_DECL)
8385 else if (TREE_CODE (arg) == PLUS_EXPR)
8387 tree arg0 = TREE_OPERAND (arg, 0);
8388 tree arg1 = TREE_OPERAND (arg, 1);
8393 if (TREE_CODE (arg0) == ADDR_EXPR
8394 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8395 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8397 array = TREE_OPERAND (arg0, 0);
8400 else if (TREE_CODE (arg1) == ADDR_EXPR
8401 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8402 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8404 array = TREE_OPERAND (arg1, 0);
8413 if (TREE_CODE (array) == STRING_CST)
8415 *ptr_offset = convert (sizetype, offset);
8418 else if (TREE_CODE (array) == VAR_DECL)
8422 /* Variables initialized to string literals can be handled too. */
8423 if (DECL_INITIAL (array) == NULL_TREE
8424 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8427 /* If they are read-only, non-volatile and bind locally. */
8428 if (! TREE_READONLY (array)
8429 || TREE_SIDE_EFFECTS (array)
8430 || ! targetm.binds_local_p (array))
8433 /* Avoid const char foo[4] = "abcde"; */
8434 if (DECL_SIZE_UNIT (array) == NULL_TREE
8435 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8436 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8437 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8440 /* If variable is bigger than the string literal, OFFSET must be constant
8441 and inside of the bounds of the string literal. */
8442 offset = convert (sizetype, offset);
8443 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8444 && (! host_integerp (offset, 1)
8445 || compare_tree_int (offset, length) >= 0))
8448 *ptr_offset = offset;
8449 return DECL_INITIAL (array);
8455 /* Generate code to calculate EXP using a store-flag instruction
8456 and return an rtx for the result. EXP is either a comparison
8457 or a TRUTH_NOT_EXPR whose operand is a comparison.
8459 If TARGET is nonzero, store the result there if convenient.
8461 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8464 Return zero if there is no suitable set-flag instruction
8465 available on this machine.
8467 Once expand_expr has been called on the arguments of the comparison,
8468 we are committed to doing the store flag, since it is not safe to
8469 re-evaluate the expression. We emit the store-flag insn by calling
8470 emit_store_flag, but only expand the arguments if we have a reason
8471 to believe that emit_store_flag will be successful. If we think that
8472 it will, but it isn't, we have to simulate the store-flag with a
8473 set/jump/set sequence. */
8476 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8479 tree arg0, arg1, type;
8481 enum machine_mode operand_mode;
8485 enum insn_code icode;
8486 rtx subtarget = target;
8489 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8490 result at the end. We can't simply invert the test since it would
8491 have already been inverted if it were valid. This case occurs for
8492 some floating-point comparisons. */
8494 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8495 invert = 1, exp = TREE_OPERAND (exp, 0);
8497 arg0 = TREE_OPERAND (exp, 0);
8498 arg1 = TREE_OPERAND (exp, 1);
8500 /* Don't crash if the comparison was erroneous. */
8501 if (arg0 == error_mark_node || arg1 == error_mark_node)
8504 type = TREE_TYPE (arg0);
8505 operand_mode = TYPE_MODE (type);
8506 unsignedp = TYPE_UNSIGNED (type);
8508 /* We won't bother with BLKmode store-flag operations because it would mean
8509 passing a lot of information to emit_store_flag. */
8510 if (operand_mode == BLKmode)
8513 /* We won't bother with store-flag operations involving function pointers
8514 when function pointers must be canonicalized before comparisons. */
8515 #ifdef HAVE_canonicalize_funcptr_for_compare
8516 if (HAVE_canonicalize_funcptr_for_compare
8517 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8518 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8520 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8521 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8522 == FUNCTION_TYPE))))
8529 /* Get the rtx comparison code to use. We know that EXP is a comparison
8530 operation of some type. Some comparisons against 1 and -1 can be
8531 converted to comparisons with zero. Do so here so that the tests
8532 below will be aware that we have a comparison with zero. These
8533 tests will not catch constants in the first operand, but constants
8534 are rarely passed as the first operand. */
8536 switch (TREE_CODE (exp))
8545 if (integer_onep (arg1))
8546 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8548 code = unsignedp ? LTU : LT;
8551 if (! unsignedp && integer_all_onesp (arg1))
8552 arg1 = integer_zero_node, code = LT;
8554 code = unsignedp ? LEU : LE;
8557 if (! unsignedp && integer_all_onesp (arg1))
8558 arg1 = integer_zero_node, code = GE;
8560 code = unsignedp ? GTU : GT;
8563 if (integer_onep (arg1))
8564 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8566 code = unsignedp ? GEU : GE;
8569 case UNORDERED_EXPR:
8598 /* Put a constant second. */
8599 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8601 tem = arg0; arg0 = arg1; arg1 = tem;
8602 code = swap_condition (code);
8605 /* If this is an equality or inequality test of a single bit, we can
8606 do this by shifting the bit being tested to the low-order bit and
8607 masking the result with the constant 1. If the condition was EQ,
8608 we xor it with 1. This does not require an scc insn and is faster
8609 than an scc insn even if we have it.
8611 The code to make this transformation was moved into fold_single_bit_test,
8612 so we just call into the folder and expand its result. */
8614 if ((code == NE || code == EQ)
8615 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8616 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8618 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8619 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8621 target, VOIDmode, EXPAND_NORMAL);
8624 /* Now see if we are likely to be able to do this. Return if not. */
8625 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8628 icode = setcc_gen_code[(int) code];
8629 if (icode == CODE_FOR_nothing
8630 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8632 /* We can only do this if it is one of the special cases that
8633 can be handled without an scc insn. */
8634 if ((code == LT && integer_zerop (arg1))
8635 || (! only_cheap && code == GE && integer_zerop (arg1)))
8637 else if (BRANCH_COST >= 0
8638 && ! only_cheap && (code == NE || code == EQ)
8639 && TREE_CODE (type) != REAL_TYPE
8640 && ((abs_optab->handlers[(int) operand_mode].insn_code
8641 != CODE_FOR_nothing)
8642 || (ffs_optab->handlers[(int) operand_mode].insn_code
8643 != CODE_FOR_nothing)))
8649 if (! get_subtarget (target)
8650 || GET_MODE (subtarget) != operand_mode)
8653 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8656 target = gen_reg_rtx (mode);
8658 result = emit_store_flag (target, code, op0, op1,
8659 operand_mode, unsignedp, 1);
8664 result = expand_binop (mode, xor_optab, result, const1_rtx,
8665 result, 0, OPTAB_LIB_WIDEN);
8669 /* If this failed, we have to do this with set/compare/jump/set code. */
8671 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8672 target = gen_reg_rtx (GET_MODE (target));
8674 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8675 result = compare_from_rtx (op0, op1, code, unsignedp,
8676 operand_mode, NULL_RTX);
8677 if (GET_CODE (result) == CONST_INT)
8678 return (((result == const0_rtx && ! invert)
8679 || (result != const0_rtx && invert))
8680 ? const0_rtx : const1_rtx);
8682 /* The code of RESULT may not match CODE if compare_from_rtx
8683 decided to swap its operands and reverse the original code.
8685 We know that compare_from_rtx returns either a CONST_INT or
8686 a new comparison code, so it is safe to just extract the
8687 code from RESULT. */
8688 code = GET_CODE (result);
8690 label = gen_label_rtx ();
8691 gcc_assert (bcc_gen_fctn[(int) code]);
8693 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8694 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8701 /* Stubs in case we haven't got a casesi insn. */
8703 # define HAVE_casesi 0
8704 # define gen_casesi(a, b, c, d, e) (0)
8705 # define CODE_FOR_casesi CODE_FOR_nothing
8708 /* If the machine does not have a case insn that compares the bounds,
8709 this means extra overhead for dispatch tables, which raises the
8710 threshold for using them. */
8711 #ifndef CASE_VALUES_THRESHOLD
8712 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8713 #endif /* CASE_VALUES_THRESHOLD */
8716 case_values_threshold (void)
8718 return CASE_VALUES_THRESHOLD;
8721 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8722 0 otherwise (i.e. if there is no casesi instruction). */
8724 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8725 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8727 enum machine_mode index_mode = SImode;
8728 int index_bits = GET_MODE_BITSIZE (index_mode);
8729 rtx op1, op2, index;
8730 enum machine_mode op_mode;
8735 /* Convert the index to SImode. */
8736 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8738 enum machine_mode omode = TYPE_MODE (index_type);
8739 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8741 /* We must handle the endpoints in the original mode. */
8742 index_expr = build2 (MINUS_EXPR, index_type,
8743 index_expr, minval);
8744 minval = integer_zero_node;
8745 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8746 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8747 omode, 1, default_label);
8748 /* Now we can safely truncate. */
8749 index = convert_to_mode (index_mode, index, 0);
8753 if (TYPE_MODE (index_type) != index_mode)
8755 index_expr = convert (lang_hooks.types.type_for_size
8756 (index_bits, 0), index_expr);
8757 index_type = TREE_TYPE (index_expr);
8760 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8763 do_pending_stack_adjust ();
8765 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8766 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8768 index = copy_to_mode_reg (op_mode, index);
8770 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8772 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8773 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8774 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8775 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8777 op1 = copy_to_mode_reg (op_mode, op1);
8779 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8781 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8782 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8783 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8784 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8786 op2 = copy_to_mode_reg (op_mode, op2);
8788 emit_jump_insn (gen_casesi (index, op1, op2,
8789 table_label, default_label));
8793 /* Attempt to generate a tablejump instruction; same concept. */
8794 #ifndef HAVE_tablejump
8795 #define HAVE_tablejump 0
8796 #define gen_tablejump(x, y) (0)
8799 /* Subroutine of the next function.
8801 INDEX is the value being switched on, with the lowest value
8802 in the table already subtracted.
8803 MODE is its expected mode (needed if INDEX is constant).
8804 RANGE is the length of the jump table.
8805 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8807 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8808 index value is out of range. */
8811 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8816 if (INTVAL (range) > cfun->max_jumptable_ents)
8817 cfun->max_jumptable_ents = INTVAL (range);
8819 /* Do an unsigned comparison (in the proper mode) between the index
8820 expression and the value which represents the length of the range.
8821 Since we just finished subtracting the lower bound of the range
8822 from the index expression, this comparison allows us to simultaneously
8823 check that the original index expression value is both greater than
8824 or equal to the minimum value of the range and less than or equal to
8825 the maximum value of the range. */
8827 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8830 /* If index is in range, it must fit in Pmode.
8831 Convert to Pmode so we can index with it. */
8833 index = convert_to_mode (Pmode, index, 1);
8835 /* Don't let a MEM slip through, because then INDEX that comes
8836 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8837 and break_out_memory_refs will go to work on it and mess it up. */
8838 #ifdef PIC_CASE_VECTOR_ADDRESS
8839 if (flag_pic && !REG_P (index))
8840 index = copy_to_mode_reg (Pmode, index);
8843 /* If flag_force_addr were to affect this address
8844 it could interfere with the tricky assumptions made
8845 about addresses that contain label-refs,
8846 which may be valid only very near the tablejump itself. */
8847 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8848 GET_MODE_SIZE, because this indicates how large insns are. The other
8849 uses should all be Pmode, because they are addresses. This code
8850 could fail if addresses and insns are not the same size. */
8851 index = gen_rtx_PLUS (Pmode,
8852 gen_rtx_MULT (Pmode, index,
8853 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8854 gen_rtx_LABEL_REF (Pmode, table_label));
8855 #ifdef PIC_CASE_VECTOR_ADDRESS
8857 index = PIC_CASE_VECTOR_ADDRESS (index);
8860 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8861 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8862 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8863 convert_move (temp, vector, 0);
8865 emit_jump_insn (gen_tablejump (temp, table_label));
8867 /* If we are generating PIC code or if the table is PC-relative, the
8868 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8869 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8874 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8875 rtx table_label, rtx default_label)
8879 if (! HAVE_tablejump)
8882 index_expr = fold (build2 (MINUS_EXPR, index_type,
8883 convert (index_type, index_expr),
8884 convert (index_type, minval)));
8885 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8886 do_pending_stack_adjust ();
8888 do_tablejump (index, TYPE_MODE (index_type),
8889 convert_modes (TYPE_MODE (index_type),
8890 TYPE_MODE (TREE_TYPE (range)),
8891 expand_expr (range, NULL_RTX,
8893 TYPE_UNSIGNED (TREE_TYPE (range))),
8894 table_label, default_label);
8898 /* Nonzero if the mode is a valid vector mode for this architecture.
8899 This returns nonzero even if there is no hardware support for the
8900 vector mode, but we can emulate with narrower modes. */
8903 vector_mode_valid_p (enum machine_mode mode)
8905 enum mode_class class = GET_MODE_CLASS (mode);
8906 enum machine_mode innermode;
8908 /* Doh! What's going on? */
8909 if (class != MODE_VECTOR_INT
8910 && class != MODE_VECTOR_FLOAT)
8913 /* Hardware support. Woo hoo! */
8914 if (targetm.vector_mode_supported_p (mode))
8917 innermode = GET_MODE_INNER (mode);
8919 /* We should probably return 1 if requesting V4DI and we have no DI,
8920 but we have V2DI, but this is probably very unlikely. */
8922 /* If we have support for the inner mode, we can safely emulate it.
8923 We may not have V2DI, but me can emulate with a pair of DIs. */
8924 return targetm.scalar_mode_supported_p (innermode);
8927 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8929 const_vector_from_tree (tree exp)
8934 enum machine_mode inner, mode;
8936 mode = TYPE_MODE (TREE_TYPE (exp));
8938 if (initializer_zerop (exp))
8939 return CONST0_RTX (mode);
8941 units = GET_MODE_NUNITS (mode);
8942 inner = GET_MODE_INNER (mode);
8944 v = rtvec_alloc (units);
8946 link = TREE_VECTOR_CST_ELTS (exp);
8947 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8949 elt = TREE_VALUE (link);
8951 if (TREE_CODE (elt) == REAL_CST)
8952 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8955 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8956 TREE_INT_CST_HIGH (elt),
8960 /* Initialize remaining elements to 0. */
8961 for (; i < units; ++i)
8962 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8964 return gen_rtx_CONST_VECTOR (mode, v);
8966 #include "gt-expr.h"