1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1561 where DST is non-consecutive registers represented by a PARALLEL.
1562 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1566 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1570 enum machine_mode m = GET_MODE (orig_src);
1572 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 if (!SCALAR_INT_MODE_P (m) && m != BLKmode)
1576 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1577 if (imode == BLKmode)
1578 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1580 src = gen_reg_rtx (imode);
1581 if (imode != BLKmode)
1582 src = gen_lowpart (GET_MODE (orig_src), src);
1583 emit_move_insn (src, orig_src);
1584 /* ...and back again. */
1585 if (imode != BLKmode)
1586 src = gen_lowpart (imode, src);
1587 emit_group_load (dst, src, type, ssize);
1591 /* Check for a NULL entry, used to indicate that the parameter goes
1592 both on the stack and in registers. */
1593 if (XEXP (XVECEXP (dst, 0, 0), 0))
1598 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = adjust_address (mem, mode, 0);
1682 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1683 SIMD register, which is currently broken. While we get GCC
1684 to emit proper RTL for these cases, let's dump to memory. */
1685 else if (VECTOR_MODE_P (GET_MODE (dst))
1688 int slen = GET_MODE_SIZE (GET_MODE (src));
1691 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1692 emit_move_insn (mem, src);
1693 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1695 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1696 && XVECLEN (dst, 0) > 1)
1697 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1698 else if (CONSTANT_P (src)
1699 || (REG_P (src) && GET_MODE (src) == mode))
1702 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1703 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1707 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1708 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1711 /* Copy the extracted pieces into the proper (probable) hard regs. */
1712 for (i = start; i < XVECLEN (dst, 0); i++)
1713 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1716 /* Emit code to move a block SRC to block DST, where SRC and DST are
1717 non-consecutive groups of registers, each represented by a PARALLEL. */
1720 emit_group_move (rtx dst, rtx src)
1724 gcc_assert (GET_CODE (src) == PARALLEL
1725 && GET_CODE (dst) == PARALLEL
1726 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1728 /* Skip first entry if NULL. */
1729 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1730 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1731 XEXP (XVECEXP (src, 0, i), 0));
1734 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1735 where SRC is non-consecutive registers represented by a PARALLEL.
1736 SSIZE represents the total size of block ORIG_DST, or -1 if not
1740 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1744 enum machine_mode m = GET_MODE (orig_dst);
1746 gcc_assert (GET_CODE (src) == PARALLEL);
1748 if (!SCALAR_INT_MODE_P (m) && m != BLKmode)
1750 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1751 if (imode == BLKmode)
1752 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1754 dst = gen_reg_rtx (imode);
1755 emit_group_store (dst, src, type, ssize);
1756 if (imode != BLKmode)
1757 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1758 emit_move_insn (orig_dst, dst);
1762 /* Check for a NULL entry, used to indicate that the parameter goes
1763 both on the stack and in registers. */
1764 if (XEXP (XVECEXP (src, 0, 0), 0))
1769 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1771 /* Copy the (probable) hard regs into pseudos. */
1772 for (i = start; i < XVECLEN (src, 0); i++)
1774 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1775 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1776 emit_move_insn (tmps[i], reg);
1779 /* If we won't be storing directly into memory, protect the real destination
1780 from strange tricks we might play. */
1782 if (GET_CODE (dst) == PARALLEL)
1786 /* We can get a PARALLEL dst if there is a conditional expression in
1787 a return statement. In that case, the dst and src are the same,
1788 so no action is necessary. */
1789 if (rtx_equal_p (dst, src))
1792 /* It is unclear if we can ever reach here, but we may as well handle
1793 it. Allocate a temporary, and split this into a store/load to/from
1796 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1797 emit_group_store (temp, src, type, ssize);
1798 emit_group_load (dst, temp, type, ssize);
1801 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1803 dst = gen_reg_rtx (GET_MODE (orig_dst));
1804 /* Make life a bit easier for combine. */
1805 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1808 /* Process the pieces. */
1809 for (i = start; i < XVECLEN (src, 0); i++)
1811 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1812 enum machine_mode mode = GET_MODE (tmps[i]);
1813 unsigned int bytelen = GET_MODE_SIZE (mode);
1816 /* Handle trailing fragments that run over the size of the struct. */
1817 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1819 /* store_bit_field always takes its value from the lsb.
1820 Move the fragment to the lsb if it's not already there. */
1822 #ifdef BLOCK_REG_PADDING
1823 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1824 == (BYTES_BIG_ENDIAN ? upward : downward)
1830 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1831 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1832 build_int_cst (NULL_TREE, shift),
1835 bytelen = ssize - bytepos;
1838 if (GET_CODE (dst) == CONCAT)
1840 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1841 dest = XEXP (dst, 0);
1842 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1844 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1845 dest = XEXP (dst, 1);
1849 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1850 dest = assign_stack_temp (GET_MODE (dest),
1851 GET_MODE_SIZE (GET_MODE (dest)), 0);
1852 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1859 /* Optimize the access just a bit. */
1861 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1862 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1863 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1864 && bytelen == GET_MODE_SIZE (mode))
1865 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1867 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1871 /* Copy from the pseudo into the (probable) hard reg. */
1872 if (orig_dst != dst)
1873 emit_move_insn (orig_dst, dst);
1876 /* Generate code to copy a BLKmode object of TYPE out of a
1877 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1878 is null, a stack temporary is created. TGTBLK is returned.
1880 The purpose of this routine is to handle functions that return
1881 BLKmode structures in registers. Some machines (the PA for example)
1882 want to return all small structures in registers regardless of the
1883 structure's alignment. */
1886 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1888 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1889 rtx src = NULL, dst = NULL;
1890 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1891 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1895 tgtblk = assign_temp (build_qualified_type (type,
1897 | TYPE_QUAL_CONST)),
1899 preserve_temp_slots (tgtblk);
1902 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1903 into a new pseudo which is a full word. */
1905 if (GET_MODE (srcreg) != BLKmode
1906 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1907 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1909 /* If the structure doesn't take up a whole number of words, see whether
1910 SRCREG is padded on the left or on the right. If it's on the left,
1911 set PADDING_CORRECTION to the number of bits to skip.
1913 In most ABIs, the structure will be returned at the least end of
1914 the register, which translates to right padding on little-endian
1915 targets and left padding on big-endian targets. The opposite
1916 holds if the structure is returned at the most significant
1917 end of the register. */
1918 if (bytes % UNITS_PER_WORD != 0
1919 && (targetm.calls.return_in_msb (type)
1921 : BYTES_BIG_ENDIAN))
1923 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1925 /* Copy the structure BITSIZE bites at a time.
1927 We could probably emit more efficient code for machines which do not use
1928 strict alignment, but it doesn't seem worth the effort at the current
1930 for (bitpos = 0, xbitpos = padding_correction;
1931 bitpos < bytes * BITS_PER_UNIT;
1932 bitpos += bitsize, xbitpos += bitsize)
1934 /* We need a new source operand each time xbitpos is on a
1935 word boundary and when xbitpos == padding_correction
1936 (the first time through). */
1937 if (xbitpos % BITS_PER_WORD == 0
1938 || xbitpos == padding_correction)
1939 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1942 /* We need a new destination operand each time bitpos is on
1944 if (bitpos % BITS_PER_WORD == 0)
1945 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1947 /* Use xbitpos for the source extraction (right justified) and
1948 xbitpos for the destination store (left justified). */
1949 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1950 extract_bit_field (src, bitsize,
1951 xbitpos % BITS_PER_WORD, 1,
1952 NULL_RTX, word_mode, word_mode));
1958 /* Add a USE expression for REG to the (possibly empty) list pointed
1959 to by CALL_FUSAGE. REG must denote a hard register. */
1962 use_reg (rtx *call_fusage, rtx reg)
1964 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
1967 = gen_rtx_EXPR_LIST (VOIDmode,
1968 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1971 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1972 starting at REGNO. All of these registers must be hard registers. */
1975 use_regs (rtx *call_fusage, int regno, int nregs)
1979 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
1981 for (i = 0; i < nregs; i++)
1982 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1985 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1986 PARALLEL REGS. This is for calls that pass values in multiple
1987 non-contiguous locations. The Irix 6 ABI has examples of this. */
1990 use_group_regs (rtx *call_fusage, rtx regs)
1994 for (i = 0; i < XVECLEN (regs, 0); i++)
1996 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1998 /* A NULL entry means the parameter goes both on the stack and in
1999 registers. This can also be a MEM for targets that pass values
2000 partially on the stack and partially in registers. */
2001 if (reg != 0 && REG_P (reg))
2002 use_reg (call_fusage, reg);
2007 /* Determine whether the LEN bytes generated by CONSTFUN can be
2008 stored to memory using several move instructions. CONSTFUNDATA is
2009 a pointer which will be passed as argument in every CONSTFUN call.
2010 ALIGN is maximum alignment we can assume. Return nonzero if a
2011 call to store_by_pieces should succeed. */
2014 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2015 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2016 void *constfundata, unsigned int align)
2018 unsigned HOST_WIDE_INT l;
2019 unsigned int max_size;
2020 HOST_WIDE_INT offset = 0;
2021 enum machine_mode mode, tmode;
2022 enum insn_code icode;
2029 if (! STORE_BY_PIECES_P (len, align))
2032 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2033 if (align >= GET_MODE_ALIGNMENT (tmode))
2034 align = GET_MODE_ALIGNMENT (tmode);
2037 enum machine_mode xmode;
2039 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2041 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2042 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2043 || SLOW_UNALIGNED_ACCESS (tmode, align))
2046 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2049 /* We would first store what we can in the largest integer mode, then go to
2050 successively smaller modes. */
2053 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2058 max_size = STORE_MAX_PIECES + 1;
2059 while (max_size > 1)
2061 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2062 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2063 if (GET_MODE_SIZE (tmode) < max_size)
2066 if (mode == VOIDmode)
2069 icode = mov_optab->handlers[(int) mode].insn_code;
2070 if (icode != CODE_FOR_nothing
2071 && align >= GET_MODE_ALIGNMENT (mode))
2073 unsigned int size = GET_MODE_SIZE (mode);
2080 cst = (*constfun) (constfundata, offset, mode);
2081 if (!LEGITIMATE_CONSTANT_P (cst))
2091 max_size = GET_MODE_SIZE (mode);
2094 /* The code above should have handled everything. */
2101 /* Generate several move instructions to store LEN bytes generated by
2102 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2103 pointer which will be passed as argument in every CONSTFUN call.
2104 ALIGN is maximum alignment we can assume.
2105 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2106 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2110 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2111 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2112 void *constfundata, unsigned int align, int endp)
2114 struct store_by_pieces data;
2118 gcc_assert (endp != 2);
2122 gcc_assert (STORE_BY_PIECES_P (len, align));
2123 data.constfun = constfun;
2124 data.constfundata = constfundata;
2127 store_by_pieces_1 (&data, align);
2132 gcc_assert (!data.reverse);
2137 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2138 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2140 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2143 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2150 to1 = adjust_address (data.to, QImode, data.offset);
2158 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2159 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2162 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2164 struct store_by_pieces data;
2169 data.constfun = clear_by_pieces_1;
2170 data.constfundata = NULL;
2173 store_by_pieces_1 (&data, align);
2176 /* Callback routine for clear_by_pieces.
2177 Return const0_rtx unconditionally. */
2180 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2181 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2182 enum machine_mode mode ATTRIBUTE_UNUSED)
2187 /* Subroutine of clear_by_pieces and store_by_pieces.
2188 Generate several move instructions to store LEN bytes of block TO. (A MEM
2189 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2192 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2193 unsigned int align ATTRIBUTE_UNUSED)
2195 rtx to_addr = XEXP (data->to, 0);
2196 unsigned int max_size = STORE_MAX_PIECES + 1;
2197 enum machine_mode mode = VOIDmode, tmode;
2198 enum insn_code icode;
2201 data->to_addr = to_addr;
2203 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2204 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2206 data->explicit_inc_to = 0;
2208 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2210 data->offset = data->len;
2212 /* If storing requires more than two move insns,
2213 copy addresses to registers (to make displacements shorter)
2214 and use post-increment if available. */
2215 if (!data->autinc_to
2216 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2218 /* Determine the main mode we'll be using. */
2219 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2220 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2221 if (GET_MODE_SIZE (tmode) < max_size)
2224 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2226 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2227 data->autinc_to = 1;
2228 data->explicit_inc_to = -1;
2231 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2232 && ! data->autinc_to)
2234 data->to_addr = copy_addr_to_reg (to_addr);
2235 data->autinc_to = 1;
2236 data->explicit_inc_to = 1;
2239 if ( !data->autinc_to && CONSTANT_P (to_addr))
2240 data->to_addr = copy_addr_to_reg (to_addr);
2243 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2244 if (align >= GET_MODE_ALIGNMENT (tmode))
2245 align = GET_MODE_ALIGNMENT (tmode);
2248 enum machine_mode xmode;
2250 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2252 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2253 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2254 || SLOW_UNALIGNED_ACCESS (tmode, align))
2257 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2260 /* First store what we can in the largest integer mode, then go to
2261 successively smaller modes. */
2263 while (max_size > 1)
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2270 if (mode == VOIDmode)
2273 icode = mov_optab->handlers[(int) mode].insn_code;
2274 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2275 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2277 max_size = GET_MODE_SIZE (mode);
2280 /* The code above should have handled everything. */
2281 gcc_assert (!data->len);
2284 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2285 with move instructions for mode MODE. GENFUN is the gen_... function
2286 to make a move insn for that mode. DATA has all the other info. */
2289 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2290 struct store_by_pieces *data)
2292 unsigned int size = GET_MODE_SIZE (mode);
2295 while (data->len >= size)
2298 data->offset -= size;
2300 if (data->autinc_to)
2301 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2304 to1 = adjust_address (data->to, mode, data->offset);
2306 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2307 emit_insn (gen_add2_insn (data->to_addr,
2308 GEN_INT (-(HOST_WIDE_INT) size)));
2310 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2311 emit_insn ((*genfun) (to1, cst));
2313 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2316 if (! data->reverse)
2317 data->offset += size;
2323 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2324 its length in bytes. */
2327 clear_storage (rtx object, rtx size)
2330 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2331 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2333 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2334 just move a zero. Otherwise, do this a piece at a time. */
2335 if (GET_MODE (object) != BLKmode
2336 && GET_CODE (size) == CONST_INT
2337 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2338 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2341 if (size == const0_rtx)
2343 else if (GET_CODE (size) == CONST_INT
2344 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2345 clear_by_pieces (object, INTVAL (size), align);
2346 else if (clear_storage_via_clrmem (object, size, align))
2349 retval = clear_storage_via_libcall (object, size);
2355 /* A subroutine of clear_storage. Expand a clrmem pattern;
2356 return true if successful. */
2359 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2361 /* Try the most limited insn first, because there's no point
2362 including more than one in the machine description unless
2363 the more limited one has some advantage. */
2365 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2366 enum machine_mode mode;
2368 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2369 mode = GET_MODE_WIDER_MODE (mode))
2371 enum insn_code code = clrmem_optab[(int) mode];
2372 insn_operand_predicate_fn pred;
2374 if (code != CODE_FOR_nothing
2375 /* We don't need MODE to be narrower than
2376 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2377 the mode mask, as it is returned by the macro, it will
2378 definitely be less than the actual mode mask. */
2379 && ((GET_CODE (size) == CONST_INT
2380 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2381 <= (GET_MODE_MASK (mode) >> 1)))
2382 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2383 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2384 || (*pred) (object, BLKmode))
2385 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2386 || (*pred) (opalign, VOIDmode)))
2389 rtx last = get_last_insn ();
2392 op1 = convert_to_mode (mode, size, 1);
2393 pred = insn_data[(int) code].operand[1].predicate;
2394 if (pred != 0 && ! (*pred) (op1, mode))
2395 op1 = copy_to_mode_reg (mode, op1);
2397 pat = GEN_FCN ((int) code) (object, op1, opalign);
2404 delete_insns_since (last);
2411 /* A subroutine of clear_storage. Expand a call to memset.
2412 Return the return value of memset, 0 otherwise. */
2415 clear_storage_via_libcall (rtx object, rtx size)
2417 tree call_expr, arg_list, fn, object_tree, size_tree;
2418 enum machine_mode size_mode;
2421 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2422 place those into new pseudos into a VAR_DECL and use them later. */
2424 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2426 size_mode = TYPE_MODE (sizetype);
2427 size = convert_to_mode (size_mode, size, 1);
2428 size = copy_to_mode_reg (size_mode, size);
2430 /* It is incorrect to use the libcall calling conventions to call
2431 memset in this context. This could be a user call to memset and
2432 the user may wish to examine the return value from memset. For
2433 targets where libcalls and normal calls have different conventions
2434 for returning pointers, we could end up generating incorrect code. */
2436 object_tree = make_tree (ptr_type_node, object);
2437 size_tree = make_tree (sizetype, size);
2439 fn = clear_storage_libcall_fn (true);
2440 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2441 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2442 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2444 /* Now we have to build up the CALL_EXPR itself. */
2445 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2446 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2447 call_expr, arg_list, NULL_TREE);
2449 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2454 /* A subroutine of clear_storage_via_libcall. Create the tree node
2455 for the function we use for block clears. The first time FOR_CALL
2456 is true, we call assemble_external. */
2458 static GTY(()) tree block_clear_fn;
2461 init_block_clear_fn (const char *asmspec)
2463 if (!block_clear_fn)
2467 fn = get_identifier ("memset");
2468 args = build_function_type_list (ptr_type_node, ptr_type_node,
2469 integer_type_node, sizetype,
2472 fn = build_decl (FUNCTION_DECL, fn, args);
2473 DECL_EXTERNAL (fn) = 1;
2474 TREE_PUBLIC (fn) = 1;
2475 DECL_ARTIFICIAL (fn) = 1;
2476 TREE_NOTHROW (fn) = 1;
2478 block_clear_fn = fn;
2482 set_user_assembler_name (block_clear_fn, asmspec);
2486 clear_storage_libcall_fn (int for_call)
2488 static bool emitted_extern;
2490 if (!block_clear_fn)
2491 init_block_clear_fn (NULL);
2493 if (for_call && !emitted_extern)
2495 emitted_extern = true;
2496 make_decl_rtl (block_clear_fn);
2497 assemble_external (block_clear_fn);
2500 return block_clear_fn;
2503 /* Generate code to copy Y into X.
2504 Both Y and X must have the same mode, except that
2505 Y can be a constant with VOIDmode.
2506 This mode cannot be BLKmode; use emit_block_move for that.
2508 Return the last instruction emitted. */
2511 emit_move_insn (rtx x, rtx y)
2513 enum machine_mode mode = GET_MODE (x);
2514 rtx y_cst = NULL_RTX;
2517 gcc_assert (mode != BLKmode
2518 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2523 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2524 && (last_insn = compress_float_constant (x, y)))
2529 if (!LEGITIMATE_CONSTANT_P (y))
2531 y = force_const_mem (mode, y);
2533 /* If the target's cannot_force_const_mem prevented the spill,
2534 assume that the target's move expanders will also take care
2535 of the non-legitimate constant. */
2541 /* If X or Y are memory references, verify that their addresses are valid
2544 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2545 && ! push_operand (x, GET_MODE (x)))
2547 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2548 x = validize_mem (x);
2551 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2553 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2554 y = validize_mem (y);
2556 gcc_assert (mode != BLKmode);
2558 last_insn = emit_move_insn_1 (x, y);
2560 if (y_cst && REG_P (x)
2561 && (set = single_set (last_insn)) != NULL_RTX
2562 && SET_DEST (set) == x
2563 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2564 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2569 /* Low level part of emit_move_insn.
2570 Called just like emit_move_insn, but assumes X and Y
2571 are basically valid. */
2574 emit_move_insn_1 (rtx x, rtx y)
2576 enum machine_mode mode = GET_MODE (x);
2577 enum machine_mode submode;
2578 enum mode_class class = GET_MODE_CLASS (mode);
2580 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2582 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2584 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2586 /* Expand complex moves by moving real part and imag part, if possible. */
2587 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2588 && BLKmode != (submode = GET_MODE_INNER (mode))
2589 && (mov_optab->handlers[(int) submode].insn_code
2590 != CODE_FOR_nothing))
2592 /* Don't split destination if it is a stack push. */
2593 int stack = push_operand (x, GET_MODE (x));
2595 #ifdef PUSH_ROUNDING
2596 /* In case we output to the stack, but the size is smaller than the
2597 machine can push exactly, we need to use move instructions. */
2599 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2600 != GET_MODE_SIZE (submode)))
2603 HOST_WIDE_INT offset1, offset2;
2605 /* Do not use anti_adjust_stack, since we don't want to update
2606 stack_pointer_delta. */
2607 temp = expand_binop (Pmode,
2608 #ifdef STACK_GROWS_DOWNWARD
2616 (GET_MODE_SIZE (GET_MODE (x)))),
2617 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2619 if (temp != stack_pointer_rtx)
2620 emit_move_insn (stack_pointer_rtx, temp);
2622 #ifdef STACK_GROWS_DOWNWARD
2624 offset2 = GET_MODE_SIZE (submode);
2626 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2627 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2628 + GET_MODE_SIZE (submode));
2631 emit_move_insn (change_address (x, submode,
2632 gen_rtx_PLUS (Pmode,
2634 GEN_INT (offset1))),
2635 gen_realpart (submode, y));
2636 emit_move_insn (change_address (x, submode,
2637 gen_rtx_PLUS (Pmode,
2639 GEN_INT (offset2))),
2640 gen_imagpart (submode, y));
2644 /* If this is a stack, push the highpart first, so it
2645 will be in the argument order.
2647 In that case, change_address is used only to convert
2648 the mode, not to change the address. */
2651 /* Note that the real part always precedes the imag part in memory
2652 regardless of machine's endianness. */
2653 #ifdef STACK_GROWS_DOWNWARD
2654 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2655 gen_imagpart (submode, y));
2656 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2657 gen_realpart (submode, y));
2659 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2660 gen_realpart (submode, y));
2661 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2662 gen_imagpart (submode, y));
2667 rtx realpart_x, realpart_y;
2668 rtx imagpart_x, imagpart_y;
2670 /* If this is a complex value with each part being smaller than a
2671 word, the usual calling sequence will likely pack the pieces into
2672 a single register. Unfortunately, SUBREG of hard registers only
2673 deals in terms of words, so we have a problem converting input
2674 arguments to the CONCAT of two registers that is used elsewhere
2675 for complex values. If this is before reload, we can copy it into
2676 memory and reload. FIXME, we should see about using extract and
2677 insert on integer registers, but complex short and complex char
2678 variables should be rarely used. */
2679 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2680 && (reload_in_progress | reload_completed) == 0)
2683 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2685 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2687 if (packed_dest_p || packed_src_p)
2689 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2690 ? MODE_FLOAT : MODE_INT);
2692 enum machine_mode reg_mode
2693 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2695 if (reg_mode != BLKmode)
2697 rtx mem = assign_stack_temp (reg_mode,
2698 GET_MODE_SIZE (mode), 0);
2699 rtx cmem = adjust_address (mem, mode, 0);
2703 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2705 emit_move_insn_1 (cmem, y);
2706 return emit_move_insn_1 (sreg, mem);
2710 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2712 emit_move_insn_1 (mem, sreg);
2713 return emit_move_insn_1 (x, cmem);
2719 realpart_x = gen_realpart (submode, x);
2720 realpart_y = gen_realpart (submode, y);
2721 imagpart_x = gen_imagpart (submode, x);
2722 imagpart_y = gen_imagpart (submode, y);
2724 /* Show the output dies here. This is necessary for SUBREGs
2725 of pseudos since we cannot track their lifetimes correctly;
2726 hard regs shouldn't appear here except as return values.
2727 We never want to emit such a clobber after reload. */
2729 && ! (reload_in_progress || reload_completed)
2730 && (GET_CODE (realpart_x) == SUBREG
2731 || GET_CODE (imagpart_x) == SUBREG))
2732 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2734 emit_move_insn (realpart_x, realpart_y);
2735 emit_move_insn (imagpart_x, imagpart_y);
2738 return get_last_insn ();
2741 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2742 find a mode to do it in. If we have a movcc, use it. Otherwise,
2743 find the MODE_INT mode of the same width. */
2744 else if (GET_MODE_CLASS (mode) == MODE_CC
2745 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2747 enum insn_code insn_code;
2748 enum machine_mode tmode = VOIDmode;
2752 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2755 for (tmode = QImode; tmode != VOIDmode;
2756 tmode = GET_MODE_WIDER_MODE (tmode))
2757 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2760 gcc_assert (tmode != VOIDmode);
2762 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2763 may call change_address which is not appropriate if we were
2764 called when a reload was in progress. We don't have to worry
2765 about changing the address since the size in bytes is supposed to
2766 be the same. Copy the MEM to change the mode and move any
2767 substitutions from the old MEM to the new one. */
2769 if (reload_in_progress)
2771 x = gen_lowpart_common (tmode, x1);
2772 if (x == 0 && MEM_P (x1))
2774 x = adjust_address_nv (x1, tmode, 0);
2775 copy_replacements (x1, x);
2778 y = gen_lowpart_common (tmode, y1);
2779 if (y == 0 && MEM_P (y1))
2781 y = adjust_address_nv (y1, tmode, 0);
2782 copy_replacements (y1, y);
2787 x = gen_lowpart (tmode, x);
2788 y = gen_lowpart (tmode, y);
2791 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2792 return emit_insn (GEN_FCN (insn_code) (x, y));
2795 /* Try using a move pattern for the corresponding integer mode. This is
2796 only safe when simplify_subreg can convert MODE constants into integer
2797 constants. At present, it can only do this reliably if the value
2798 fits within a HOST_WIDE_INT. */
2799 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2800 && (submode = int_mode_for_mode (mode)) != BLKmode
2801 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2802 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2803 (simplify_gen_subreg (submode, x, mode, 0),
2804 simplify_gen_subreg (submode, y, mode, 0)));
2806 /* This will handle any multi-word or full-word mode that lacks a move_insn
2807 pattern. However, you will get better code if you define such patterns,
2808 even if they must turn into multiple assembler instructions. */
2816 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2818 #ifdef PUSH_ROUNDING
2820 /* If X is a push on the stack, do the push now and replace
2821 X with a reference to the stack pointer. */
2822 if (push_operand (x, GET_MODE (x)))
2827 /* Do not use anti_adjust_stack, since we don't want to update
2828 stack_pointer_delta. */
2829 temp = expand_binop (Pmode,
2830 #ifdef STACK_GROWS_DOWNWARD
2838 (GET_MODE_SIZE (GET_MODE (x)))),
2839 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2841 if (temp != stack_pointer_rtx)
2842 emit_move_insn (stack_pointer_rtx, temp);
2844 code = GET_CODE (XEXP (x, 0));
2846 /* Just hope that small offsets off SP are OK. */
2847 if (code == POST_INC)
2848 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2849 GEN_INT (-((HOST_WIDE_INT)
2850 GET_MODE_SIZE (GET_MODE (x)))));
2851 else if (code == POST_DEC)
2852 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2853 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2855 temp = stack_pointer_rtx;
2857 x = change_address (x, VOIDmode, temp);
2861 /* If we are in reload, see if either operand is a MEM whose address
2862 is scheduled for replacement. */
2863 if (reload_in_progress && MEM_P (x)
2864 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2865 x = replace_equiv_address_nv (x, inner);
2866 if (reload_in_progress && MEM_P (y)
2867 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2868 y = replace_equiv_address_nv (y, inner);
2874 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2877 rtx xpart = operand_subword (x, i, 1, mode);
2878 rtx ypart = operand_subword (y, i, 1, mode);
2880 /* If we can't get a part of Y, put Y into memory if it is a
2881 constant. Otherwise, force it into a register. If we still
2882 can't get a part of Y, abort. */
2883 if (ypart == 0 && CONSTANT_P (y))
2885 y = force_const_mem (mode, y);
2886 ypart = operand_subword (y, i, 1, mode);
2888 else if (ypart == 0)
2889 ypart = operand_subword_force (y, i, mode);
2891 gcc_assert (xpart && ypart);
2893 need_clobber |= (GET_CODE (xpart) == SUBREG);
2895 last_insn = emit_move_insn (xpart, ypart);
2901 /* Show the output dies here. This is necessary for SUBREGs
2902 of pseudos since we cannot track their lifetimes correctly;
2903 hard regs shouldn't appear here except as return values.
2904 We never want to emit such a clobber after reload. */
2906 && ! (reload_in_progress || reload_completed)
2907 && need_clobber != 0)
2908 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2916 /* If Y is representable exactly in a narrower mode, and the target can
2917 perform the extension directly from constant or memory, then emit the
2918 move as an extension. */
2921 compress_float_constant (rtx x, rtx y)
2923 enum machine_mode dstmode = GET_MODE (x);
2924 enum machine_mode orig_srcmode = GET_MODE (y);
2925 enum machine_mode srcmode;
2928 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2930 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2931 srcmode != orig_srcmode;
2932 srcmode = GET_MODE_WIDER_MODE (srcmode))
2935 rtx trunc_y, last_insn;
2937 /* Skip if the target can't extend this way. */
2938 ic = can_extend_p (dstmode, srcmode, 0);
2939 if (ic == CODE_FOR_nothing)
2942 /* Skip if the narrowed value isn't exact. */
2943 if (! exact_real_truncate (srcmode, &r))
2946 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2948 if (LEGITIMATE_CONSTANT_P (trunc_y))
2950 /* Skip if the target needs extra instructions to perform
2952 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2955 else if (float_extend_from_mem[dstmode][srcmode])
2956 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2960 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2961 last_insn = get_last_insn ();
2964 set_unique_reg_note (last_insn, REG_EQUAL, y);
2972 /* Pushing data onto the stack. */
2974 /* Push a block of length SIZE (perhaps variable)
2975 and return an rtx to address the beginning of the block.
2976 The value may be virtual_outgoing_args_rtx.
2978 EXTRA is the number of bytes of padding to push in addition to SIZE.
2979 BELOW nonzero means this padding comes at low addresses;
2980 otherwise, the padding comes at high addresses. */
2983 push_block (rtx size, int extra, int below)
2987 size = convert_modes (Pmode, ptr_mode, size, 1);
2988 if (CONSTANT_P (size))
2989 anti_adjust_stack (plus_constant (size, extra));
2990 else if (REG_P (size) && extra == 0)
2991 anti_adjust_stack (size);
2994 temp = copy_to_mode_reg (Pmode, size);
2996 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
2997 temp, 0, OPTAB_LIB_WIDEN);
2998 anti_adjust_stack (temp);
3001 #ifndef STACK_GROWS_DOWNWARD
3007 temp = virtual_outgoing_args_rtx;
3008 if (extra != 0 && below)
3009 temp = plus_constant (temp, extra);
3013 if (GET_CODE (size) == CONST_INT)
3014 temp = plus_constant (virtual_outgoing_args_rtx,
3015 -INTVAL (size) - (below ? 0 : extra));
3016 else if (extra != 0 && !below)
3017 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3018 negate_rtx (Pmode, plus_constant (size, extra)));
3020 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3021 negate_rtx (Pmode, size));
3024 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3027 #ifdef PUSH_ROUNDING
3029 /* Emit single push insn. */
3032 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3035 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3037 enum insn_code icode;
3038 insn_operand_predicate_fn pred;
3040 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3041 /* If there is push pattern, use it. Otherwise try old way of throwing
3042 MEM representing push operation to move expander. */
3043 icode = push_optab->handlers[(int) mode].insn_code;
3044 if (icode != CODE_FOR_nothing)
3046 if (((pred = insn_data[(int) icode].operand[0].predicate)
3047 && !((*pred) (x, mode))))
3048 x = force_reg (mode, x);
3049 emit_insn (GEN_FCN (icode) (x));
3052 if (GET_MODE_SIZE (mode) == rounded_size)
3053 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3054 /* If we are to pad downward, adjust the stack pointer first and
3055 then store X into the stack location using an offset. This is
3056 because emit_move_insn does not know how to pad; it does not have
3058 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3060 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3061 HOST_WIDE_INT offset;
3063 emit_move_insn (stack_pointer_rtx,
3064 expand_binop (Pmode,
3065 #ifdef STACK_GROWS_DOWNWARD
3071 GEN_INT (rounded_size),
3072 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3074 offset = (HOST_WIDE_INT) padding_size;
3075 #ifdef STACK_GROWS_DOWNWARD
3076 if (STACK_PUSH_CODE == POST_DEC)
3077 /* We have already decremented the stack pointer, so get the
3079 offset += (HOST_WIDE_INT) rounded_size;
3081 if (STACK_PUSH_CODE == POST_INC)
3082 /* We have already incremented the stack pointer, so get the
3084 offset -= (HOST_WIDE_INT) rounded_size;
3086 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3090 #ifdef STACK_GROWS_DOWNWARD
3091 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3092 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3093 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3095 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3096 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3097 GEN_INT (rounded_size));
3099 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3102 dest = gen_rtx_MEM (mode, dest_addr);
3106 set_mem_attributes (dest, type, 1);
3108 if (flag_optimize_sibling_calls)
3109 /* Function incoming arguments may overlap with sibling call
3110 outgoing arguments and we cannot allow reordering of reads
3111 from function arguments with stores to outgoing arguments
3112 of sibling calls. */
3113 set_mem_alias_set (dest, 0);
3115 emit_move_insn (dest, x);
3119 /* Generate code to push X onto the stack, assuming it has mode MODE and
3121 MODE is redundant except when X is a CONST_INT (since they don't
3123 SIZE is an rtx for the size of data to be copied (in bytes),
3124 needed only if X is BLKmode.
3126 ALIGN (in bits) is maximum alignment we can assume.
3128 If PARTIAL and REG are both nonzero, then copy that many of the first
3129 words of X into registers starting with REG, and push the rest of X.
3130 The amount of space pushed is decreased by PARTIAL words,
3131 rounded *down* to a multiple of PARM_BOUNDARY.
3132 REG must be a hard register in this case.
3133 If REG is zero but PARTIAL is not, take any all others actions for an
3134 argument partially in registers, but do not actually load any
3137 EXTRA is the amount in bytes of extra space to leave next to this arg.
3138 This is ignored if an argument block has already been allocated.
3140 On a machine that lacks real push insns, ARGS_ADDR is the address of
3141 the bottom of the argument block for this call. We use indexing off there
3142 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3143 argument block has not been preallocated.
3145 ARGS_SO_FAR is the size of args previously pushed for this call.
3147 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3148 for arguments passed in registers. If nonzero, it will be the number
3149 of bytes required. */
3152 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3153 unsigned int align, int partial, rtx reg, int extra,
3154 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3158 enum direction stack_direction
3159 #ifdef STACK_GROWS_DOWNWARD
3165 /* Decide where to pad the argument: `downward' for below,
3166 `upward' for above, or `none' for don't pad it.
3167 Default is below for small data on big-endian machines; else above. */
3168 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3170 /* Invert direction if stack is post-decrement.
3172 if (STACK_PUSH_CODE == POST_DEC)
3173 if (where_pad != none)
3174 where_pad = (where_pad == downward ? upward : downward);
3178 if (mode == BLKmode)
3180 /* Copy a block into the stack, entirely or partially. */
3183 int used = partial * UNITS_PER_WORD;
3187 if (reg && GET_CODE (reg) == PARALLEL)
3189 /* Use the size of the elt to compute offset. */
3190 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3191 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3192 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3195 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3201 /* USED is now the # of bytes we need not copy to the stack
3202 because registers will take care of them. */
3205 xinner = adjust_address (xinner, BLKmode, used);
3207 /* If the partial register-part of the arg counts in its stack size,
3208 skip the part of stack space corresponding to the registers.
3209 Otherwise, start copying to the beginning of the stack space,
3210 by setting SKIP to 0. */
3211 skip = (reg_parm_stack_space == 0) ? 0 : used;
3213 #ifdef PUSH_ROUNDING
3214 /* Do it with several push insns if that doesn't take lots of insns
3215 and if there is no difficulty with push insns that skip bytes
3216 on the stack for alignment purposes. */
3219 && GET_CODE (size) == CONST_INT
3221 && MEM_ALIGN (xinner) >= align
3222 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3223 /* Here we avoid the case of a structure whose weak alignment
3224 forces many pushes of a small amount of data,
3225 and such small pushes do rounding that causes trouble. */
3226 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3227 || align >= BIGGEST_ALIGNMENT
3228 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3229 == (align / BITS_PER_UNIT)))
3230 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3232 /* Push padding now if padding above and stack grows down,
3233 or if padding below and stack grows up.
3234 But if space already allocated, this has already been done. */
3235 if (extra && args_addr == 0
3236 && where_pad != none && where_pad != stack_direction)
3237 anti_adjust_stack (GEN_INT (extra));
3239 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3242 #endif /* PUSH_ROUNDING */
3246 /* Otherwise make space on the stack and copy the data
3247 to the address of that space. */
3249 /* Deduct words put into registers from the size we must copy. */
3252 if (GET_CODE (size) == CONST_INT)
3253 size = GEN_INT (INTVAL (size) - used);
3255 size = expand_binop (GET_MODE (size), sub_optab, size,
3256 GEN_INT (used), NULL_RTX, 0,
3260 /* Get the address of the stack space.
3261 In this case, we do not deal with EXTRA separately.
3262 A single stack adjust will do. */
3265 temp = push_block (size, extra, where_pad == downward);
3268 else if (GET_CODE (args_so_far) == CONST_INT)
3269 temp = memory_address (BLKmode,
3270 plus_constant (args_addr,
3271 skip + INTVAL (args_so_far)));
3273 temp = memory_address (BLKmode,
3274 plus_constant (gen_rtx_PLUS (Pmode,
3279 if (!ACCUMULATE_OUTGOING_ARGS)
3281 /* If the source is referenced relative to the stack pointer,
3282 copy it to another register to stabilize it. We do not need
3283 to do this if we know that we won't be changing sp. */
3285 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3286 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3287 temp = copy_to_reg (temp);
3290 target = gen_rtx_MEM (BLKmode, temp);
3292 /* We do *not* set_mem_attributes here, because incoming arguments
3293 may overlap with sibling call outgoing arguments and we cannot
3294 allow reordering of reads from function arguments with stores
3295 to outgoing arguments of sibling calls. We do, however, want
3296 to record the alignment of the stack slot. */
3297 /* ALIGN may well be better aligned than TYPE, e.g. due to
3298 PARM_BOUNDARY. Assume the caller isn't lying. */
3299 set_mem_align (target, align);
3301 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3304 else if (partial > 0)
3306 /* Scalar partly in registers. */
3308 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3311 /* # words of start of argument
3312 that we must make space for but need not store. */
3313 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3314 int args_offset = INTVAL (args_so_far);
3317 /* Push padding now if padding above and stack grows down,
3318 or if padding below and stack grows up.
3319 But if space already allocated, this has already been done. */
3320 if (extra && args_addr == 0
3321 && where_pad != none && where_pad != stack_direction)
3322 anti_adjust_stack (GEN_INT (extra));
3324 /* If we make space by pushing it, we might as well push
3325 the real data. Otherwise, we can leave OFFSET nonzero
3326 and leave the space uninitialized. */
3330 /* Now NOT_STACK gets the number of words that we don't need to
3331 allocate on the stack. */
3332 not_stack = partial - offset;
3334 /* If the partial register-part of the arg counts in its stack size,
3335 skip the part of stack space corresponding to the registers.
3336 Otherwise, start copying to the beginning of the stack space,
3337 by setting SKIP to 0. */
3338 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3340 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3341 x = validize_mem (force_const_mem (mode, x));
3343 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3344 SUBREGs of such registers are not allowed. */
3345 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3346 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3347 x = copy_to_reg (x);
3349 /* Loop over all the words allocated on the stack for this arg. */
3350 /* We can do it by words, because any scalar bigger than a word
3351 has a size a multiple of a word. */
3352 #ifndef PUSH_ARGS_REVERSED
3353 for (i = not_stack; i < size; i++)
3355 for (i = size - 1; i >= not_stack; i--)
3357 if (i >= not_stack + offset)
3358 emit_push_insn (operand_subword_force (x, i, mode),
3359 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3361 GEN_INT (args_offset + ((i - not_stack + skip)
3363 reg_parm_stack_space, alignment_pad);
3370 /* Push padding now if padding above and stack grows down,
3371 or if padding below and stack grows up.
3372 But if space already allocated, this has already been done. */
3373 if (extra && args_addr == 0
3374 && where_pad != none && where_pad != stack_direction)
3375 anti_adjust_stack (GEN_INT (extra));
3377 #ifdef PUSH_ROUNDING
3378 if (args_addr == 0 && PUSH_ARGS)
3379 emit_single_push_insn (mode, x, type);
3383 if (GET_CODE (args_so_far) == CONST_INT)
3385 = memory_address (mode,
3386 plus_constant (args_addr,
3387 INTVAL (args_so_far)));
3389 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3391 dest = gen_rtx_MEM (mode, addr);
3393 /* We do *not* set_mem_attributes here, because incoming arguments
3394 may overlap with sibling call outgoing arguments and we cannot
3395 allow reordering of reads from function arguments with stores
3396 to outgoing arguments of sibling calls. We do, however, want
3397 to record the alignment of the stack slot. */
3398 /* ALIGN may well be better aligned than TYPE, e.g. due to
3399 PARM_BOUNDARY. Assume the caller isn't lying. */
3400 set_mem_align (dest, align);
3402 emit_move_insn (dest, x);
3406 /* If part should go in registers, copy that part
3407 into the appropriate registers. Do this now, at the end,
3408 since mem-to-mem copies above may do function calls. */
3409 if (partial > 0 && reg != 0)
3411 /* Handle calls that pass values in multiple non-contiguous locations.
3412 The Irix 6 ABI has examples of this. */
3413 if (GET_CODE (reg) == PARALLEL)
3414 emit_group_load (reg, x, type, -1);
3416 move_block_to_reg (REGNO (reg), x, partial, mode);
3419 if (extra && args_addr == 0 && where_pad == stack_direction)
3420 anti_adjust_stack (GEN_INT (extra));
3422 if (alignment_pad && args_addr == 0)
3423 anti_adjust_stack (alignment_pad);
3426 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3430 get_subtarget (rtx x)
3434 /* Only registers can be subtargets. */
3436 /* Don't use hard regs to avoid extending their life. */
3437 || REGNO (x) < FIRST_PSEUDO_REGISTER
3441 /* Expand an assignment that stores the value of FROM into TO. */
3444 expand_assignment (tree to, tree from)
3449 /* Don't crash if the lhs of the assignment was erroneous. */
3451 if (TREE_CODE (to) == ERROR_MARK)
3453 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3457 /* Assignment of a structure component needs special treatment
3458 if the structure component's rtx is not simply a MEM.
3459 Assignment of an array element at a constant index, and assignment of
3460 an array element in an unaligned packed structure field, has the same
3463 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3464 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3465 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3467 enum machine_mode mode1;
3468 HOST_WIDE_INT bitsize, bitpos;
3476 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3477 &unsignedp, &volatilep);
3479 /* If we are going to use store_bit_field and extract_bit_field,
3480 make sure to_rtx will be safe for multiple use. */
3482 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3486 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3488 gcc_assert (MEM_P (to_rtx));
3490 #ifdef POINTERS_EXTEND_UNSIGNED
3491 if (GET_MODE (offset_rtx) != Pmode)
3492 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3494 if (GET_MODE (offset_rtx) != ptr_mode)
3495 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3498 /* A constant address in TO_RTX can have VOIDmode, we must not try
3499 to call force_reg for that case. Avoid that case. */
3501 && GET_MODE (to_rtx) == BLKmode
3502 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3504 && (bitpos % bitsize) == 0
3505 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3506 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3508 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3512 to_rtx = offset_address (to_rtx, offset_rtx,
3513 highest_pow2_factor_for_target (to,
3519 /* If the field is at offset zero, we could have been given the
3520 DECL_RTX of the parent struct. Don't munge it. */
3521 to_rtx = shallow_copy_rtx (to_rtx);
3523 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3526 /* Deal with volatile and readonly fields. The former is only done
3527 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3528 if (volatilep && MEM_P (to_rtx))
3530 if (to_rtx == orig_to_rtx)
3531 to_rtx = copy_rtx (to_rtx);
3532 MEM_VOLATILE_P (to_rtx) = 1;
3535 if (MEM_P (to_rtx) && ! can_address_p (to))
3537 if (to_rtx == orig_to_rtx)
3538 to_rtx = copy_rtx (to_rtx);
3539 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3542 /* Optimize bitfld op= val in certain cases. */
3543 while (mode1 == VOIDmode
3544 && bitsize > 0 && bitsize < BITS_PER_WORD
3545 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3546 && !TREE_SIDE_EFFECTS (to)
3547 && !TREE_THIS_VOLATILE (to))
3550 rtx value, str_rtx = to_rtx;
3551 HOST_WIDE_INT bitpos1 = bitpos;
3556 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3557 || !BINARY_CLASS_P (src))
3560 op0 = TREE_OPERAND (src, 0);
3561 op1 = TREE_OPERAND (src, 1);
3564 if (! operand_equal_p (to, op0, 0))
3567 if (MEM_P (str_rtx))
3569 enum machine_mode mode = GET_MODE (str_rtx);
3570 HOST_WIDE_INT offset1;
3572 if (GET_MODE_BITSIZE (mode) == 0
3573 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3575 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3577 if (mode == VOIDmode)
3581 bitpos1 %= GET_MODE_BITSIZE (mode);
3582 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3583 str_rtx = adjust_address (str_rtx, mode, offset1);
3585 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3588 /* If the bit field covers the whole REG/MEM, store_field
3589 will likely generate better code. */
3590 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3593 /* We can't handle fields split across multiple entities. */
3594 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3597 if (BYTES_BIG_ENDIAN)
3598 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3601 /* Special case some bitfield op= exp. */
3602 switch (TREE_CODE (src))
3606 /* For now, just optimize the case of the topmost bitfield
3607 where we don't need to do any masking and also
3608 1 bit bitfields where xor can be used.
3609 We might win by one instruction for the other bitfields
3610 too if insv/extv instructions aren't used, so that
3611 can be added later. */
3612 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3613 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3615 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3616 value = convert_modes (GET_MODE (str_rtx),
3617 TYPE_MODE (TREE_TYPE (op1)), value,
3618 TYPE_UNSIGNED (TREE_TYPE (op1)));
3620 /* We may be accessing data outside the field, which means
3621 we can alias adjacent data. */
3622 if (MEM_P (str_rtx))
3624 str_rtx = shallow_copy_rtx (str_rtx);
3625 set_mem_alias_set (str_rtx, 0);
3626 set_mem_expr (str_rtx, 0);
3629 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3631 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3633 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3637 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3638 build_int_cst (NULL_TREE, bitpos1),
3640 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3641 value, str_rtx, 1, OPTAB_WIDEN);
3642 if (result != str_rtx)
3643 emit_move_insn (str_rtx, result);
3655 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3656 TREE_TYPE (tem), get_alias_set (to));
3658 preserve_temp_slots (result);
3662 /* If the value is meaningful, convert RESULT to the proper mode.
3663 Otherwise, return nothing. */
3667 /* If the rhs is a function call and its value is not an aggregate,
3668 call the function before we start to compute the lhs.
3669 This is needed for correct code for cases such as
3670 val = setjmp (buf) on machines where reference to val
3671 requires loading up part of an address in a separate insn.
3673 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3674 since it might be a promoted variable where the zero- or sign- extension
3675 needs to be done. Handling this in the normal way is safe because no
3676 computation is done before the call. */
3677 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3678 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3679 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3680 && REG_P (DECL_RTL (to))))
3685 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3687 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3689 /* Handle calls that return values in multiple non-contiguous locations.
3690 The Irix 6 ABI has examples of this. */
3691 if (GET_CODE (to_rtx) == PARALLEL)
3692 emit_group_load (to_rtx, value, TREE_TYPE (from),
3693 int_size_in_bytes (TREE_TYPE (from)));
3694 else if (GET_MODE (to_rtx) == BLKmode)
3695 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3698 if (POINTER_TYPE_P (TREE_TYPE (to)))
3699 value = convert_memory_address (GET_MODE (to_rtx), value);
3700 emit_move_insn (to_rtx, value);
3702 preserve_temp_slots (to_rtx);
3708 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3709 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3712 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3714 /* Don't move directly into a return register. */
3715 if (TREE_CODE (to) == RESULT_DECL
3716 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3721 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3723 if (GET_CODE (to_rtx) == PARALLEL)
3724 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3725 int_size_in_bytes (TREE_TYPE (from)));
3727 emit_move_insn (to_rtx, temp);
3729 preserve_temp_slots (to_rtx);
3735 /* In case we are returning the contents of an object which overlaps
3736 the place the value is being stored, use a safe function when copying
3737 a value through a pointer into a structure value return block. */
3738 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3739 && current_function_returns_struct
3740 && !current_function_returns_pcc_struct)
3745 size = expr_size (from);
3746 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3748 emit_library_call (memmove_libfunc, LCT_NORMAL,
3749 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3750 XEXP (from_rtx, 0), Pmode,
3751 convert_to_mode (TYPE_MODE (sizetype),
3752 size, TYPE_UNSIGNED (sizetype)),
3753 TYPE_MODE (sizetype));
3755 preserve_temp_slots (to_rtx);
3761 /* Compute FROM and store the value in the rtx we got. */
3764 result = store_expr (from, to_rtx, 0);
3765 preserve_temp_slots (result);
3771 /* Generate code for computing expression EXP,
3772 and storing the value into TARGET.
3774 If the mode is BLKmode then we may return TARGET itself.
3775 It turns out that in BLKmode it doesn't cause a problem.
3776 because C has no operators that could combine two different
3777 assignments into the same BLKmode object with different values
3778 with no sequence point. Will other languages need this to
3781 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3782 stack, and block moves may need to be treated specially. */
3785 store_expr (tree exp, rtx target, int call_param_p)
3788 rtx alt_rtl = NULL_RTX;
3789 int dont_return_target = 0;
3791 if (VOID_TYPE_P (TREE_TYPE (exp)))
3793 /* C++ can generate ?: expressions with a throw expression in one
3794 branch and an rvalue in the other. Here, we resolve attempts to
3795 store the throw expression's nonexistent result. */
3796 gcc_assert (!call_param_p);
3797 expand_expr (exp, const0_rtx, VOIDmode, 0);
3800 if (TREE_CODE (exp) == COMPOUND_EXPR)
3802 /* Perform first part of compound expression, then assign from second
3804 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3805 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3806 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3808 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3810 /* For conditional expression, get safe form of the target. Then
3811 test the condition, doing the appropriate assignment on either
3812 side. This avoids the creation of unnecessary temporaries.
3813 For non-BLKmode, it is more efficient not to do this. */
3815 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3817 do_pending_stack_adjust ();
3819 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3820 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3821 emit_jump_insn (gen_jump (lab2));
3824 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3830 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3831 /* If this is a scalar in a register that is stored in a wider mode
3832 than the declared mode, compute the result into its declared mode
3833 and then convert to the wider mode. Our value is the computed
3836 rtx inner_target = 0;
3838 /* We can do the conversion inside EXP, which will often result
3839 in some optimizations. Do the conversion in two steps: first
3840 change the signedness, if needed, then the extend. But don't
3841 do this if the type of EXP is a subtype of something else
3842 since then the conversion might involve more than just
3843 converting modes. */
3844 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3845 && TREE_TYPE (TREE_TYPE (exp)) == 0
3846 && (!lang_hooks.reduce_bit_field_operations
3847 || (GET_MODE_PRECISION (GET_MODE (target))
3848 == TYPE_PRECISION (TREE_TYPE (exp)))))
3850 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3851 != SUBREG_PROMOTED_UNSIGNED_P (target))
3853 (lang_hooks.types.signed_or_unsigned_type
3854 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3856 exp = convert (lang_hooks.types.type_for_mode
3857 (GET_MODE (SUBREG_REG (target)),
3858 SUBREG_PROMOTED_UNSIGNED_P (target)),
3861 inner_target = SUBREG_REG (target);
3864 temp = expand_expr (exp, inner_target, VOIDmode,
3865 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3867 /* If TEMP is a VOIDmode constant, use convert_modes to make
3868 sure that we properly convert it. */
3869 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3871 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3872 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3873 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3874 GET_MODE (target), temp,
3875 SUBREG_PROMOTED_UNSIGNED_P (target));
3878 convert_move (SUBREG_REG (target), temp,
3879 SUBREG_PROMOTED_UNSIGNED_P (target));
3885 temp = expand_expr_real (exp, target, GET_MODE (target),
3887 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3889 /* Return TARGET if it's a specified hardware register.
3890 If TARGET is a volatile mem ref, either return TARGET
3891 or return a reg copied *from* TARGET; ANSI requires this.
3893 Otherwise, if TEMP is not TARGET, return TEMP
3894 if it is constant (for efficiency),
3895 or if we really want the correct value. */
3896 if (!(target && REG_P (target)
3897 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3898 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3899 && ! rtx_equal_p (temp, target)
3900 && CONSTANT_P (temp))
3901 dont_return_target = 1;
3904 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3905 the same as that of TARGET, adjust the constant. This is needed, for
3906 example, in case it is a CONST_DOUBLE and we want only a word-sized
3908 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3909 && TREE_CODE (exp) != ERROR_MARK
3910 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3911 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3912 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3914 /* If value was not generated in the target, store it there.
3915 Convert the value to TARGET's type first if necessary and emit the
3916 pending incrementations that have been queued when expanding EXP.
3917 Note that we cannot emit the whole queue blindly because this will
3918 effectively disable the POST_INC optimization later.
3920 If TEMP and TARGET compare equal according to rtx_equal_p, but
3921 one or both of them are volatile memory refs, we have to distinguish
3923 - expand_expr has used TARGET. In this case, we must not generate
3924 another copy. This can be detected by TARGET being equal according
3926 - expand_expr has not used TARGET - that means that the source just
3927 happens to have the same RTX form. Since temp will have been created
3928 by expand_expr, it will compare unequal according to == .
3929 We must generate a copy in this case, to reach the correct number
3930 of volatile memory references. */
3932 if ((! rtx_equal_p (temp, target)
3933 || (temp != target && (side_effects_p (temp)
3934 || side_effects_p (target))))
3935 && TREE_CODE (exp) != ERROR_MARK
3936 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3937 but TARGET is not valid memory reference, TEMP will differ
3938 from TARGET although it is really the same location. */
3939 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3940 /* If there's nothing to copy, don't bother. Don't call expr_size
3941 unless necessary, because some front-ends (C++) expr_size-hook
3942 aborts on objects that are not supposed to be bit-copied or
3944 && expr_size (exp) != const0_rtx)
3946 if (GET_MODE (temp) != GET_MODE (target)
3947 && GET_MODE (temp) != VOIDmode)
3949 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3950 if (dont_return_target)
3952 /* In this case, we will return TEMP,
3953 so make sure it has the proper mode.
3954 But don't forget to store the value into TARGET. */
3955 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3956 emit_move_insn (target, temp);
3959 convert_move (target, temp, unsignedp);
3962 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3964 /* Handle copying a string constant into an array. The string
3965 constant may be shorter than the array. So copy just the string's
3966 actual length, and clear the rest. First get the size of the data
3967 type of the string, which is actually the size of the target. */
3968 rtx size = expr_size (exp);
3970 if (GET_CODE (size) == CONST_INT
3971 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3972 emit_block_move (target, temp, size,
3974 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3977 /* Compute the size of the data to copy from the string. */
3979 = size_binop (MIN_EXPR,
3980 make_tree (sizetype, size),
3981 size_int (TREE_STRING_LENGTH (exp)));
3983 = expand_expr (copy_size, NULL_RTX, VOIDmode,
3985 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
3988 /* Copy that much. */
3989 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
3990 TYPE_UNSIGNED (sizetype));
3991 emit_block_move (target, temp, copy_size_rtx,
3993 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3995 /* Figure out how much is left in TARGET that we have to clear.
3996 Do all calculations in ptr_mode. */
3997 if (GET_CODE (copy_size_rtx) == CONST_INT)
3999 size = plus_constant (size, -INTVAL (copy_size_rtx));
4000 target = adjust_address (target, BLKmode,
4001 INTVAL (copy_size_rtx));
4005 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4006 copy_size_rtx, NULL_RTX, 0,
4009 #ifdef POINTERS_EXTEND_UNSIGNED
4010 if (GET_MODE (copy_size_rtx) != Pmode)
4011 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4012 TYPE_UNSIGNED (sizetype));
4015 target = offset_address (target, copy_size_rtx,
4016 highest_pow2_factor (copy_size));
4017 label = gen_label_rtx ();
4018 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4019 GET_MODE (size), 0, label);
4022 if (size != const0_rtx)
4023 clear_storage (target, size);
4029 /* Handle calls that return values in multiple non-contiguous locations.
4030 The Irix 6 ABI has examples of this. */
4031 else if (GET_CODE (target) == PARALLEL)
4032 emit_group_load (target, temp, TREE_TYPE (exp),
4033 int_size_in_bytes (TREE_TYPE (exp)));
4034 else if (GET_MODE (temp) == BLKmode)
4035 emit_block_move (target, temp, expr_size (exp),
4037 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4040 temp = force_operand (temp, target);
4042 emit_move_insn (target, temp);
4049 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4050 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4051 are set to non-constant values and place it in *P_NC_ELTS. */
4054 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4055 HOST_WIDE_INT *p_nc_elts)
4057 HOST_WIDE_INT nz_elts, nc_elts;
4063 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4065 tree value = TREE_VALUE (list);
4066 tree purpose = TREE_PURPOSE (list);
4070 if (TREE_CODE (purpose) == RANGE_EXPR)
4072 tree lo_index = TREE_OPERAND (purpose, 0);
4073 tree hi_index = TREE_OPERAND (purpose, 1);
4075 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4076 mult = (tree_low_cst (hi_index, 1)
4077 - tree_low_cst (lo_index, 1) + 1);
4080 switch (TREE_CODE (value))
4084 HOST_WIDE_INT nz = 0, nc = 0;
4085 categorize_ctor_elements_1 (value, &nz, &nc);
4086 nz_elts += mult * nz;
4087 nc_elts += mult * nc;
4093 if (!initializer_zerop (value))
4097 if (!initializer_zerop (TREE_REALPART (value)))
4099 if (!initializer_zerop (TREE_IMAGPART (value)))
4105 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4106 if (!initializer_zerop (TREE_VALUE (v)))
4113 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4119 *p_nz_elts += nz_elts;
4120 *p_nc_elts += nc_elts;
4124 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4125 HOST_WIDE_INT *p_nc_elts)
4129 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4132 /* Count the number of scalars in TYPE. Return -1 on overflow or
4136 count_type_elements (tree type)
4138 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4139 switch (TREE_CODE (type))
4143 tree telts = array_type_nelts (type);
4144 if (telts && host_integerp (telts, 1))
4146 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4147 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4150 else if (max / n > m)
4158 HOST_WIDE_INT n = 0, t;
4161 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4162 if (TREE_CODE (f) == FIELD_DECL)
4164 t = count_type_elements (TREE_TYPE (f));
4174 case QUAL_UNION_TYPE:
4176 /* Ho hum. How in the world do we guess here? Clearly it isn't
4177 right to count the fields. Guess based on the number of words. */
4178 HOST_WIDE_INT n = int_size_in_bytes (type);
4181 return n / UNITS_PER_WORD;
4188 return TYPE_VECTOR_SUBPARTS (type);
4197 case REFERENCE_TYPE:
4211 /* Return 1 if EXP contains mostly (3/4) zeros. */
4214 mostly_zeros_p (tree exp)
4216 if (TREE_CODE (exp) == CONSTRUCTOR)
4219 HOST_WIDE_INT nz_elts, nc_elts, elts;
4221 /* If there are no ranges of true bits, it is all zero. */
4222 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4223 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4225 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4226 elts = count_type_elements (TREE_TYPE (exp));
4228 return nz_elts < elts / 4;
4231 return initializer_zerop (exp);
4234 /* Helper function for store_constructor.
4235 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4236 TYPE is the type of the CONSTRUCTOR, not the element type.
4237 CLEARED is as for store_constructor.
4238 ALIAS_SET is the alias set to use for any stores.
4240 This provides a recursive shortcut back to store_constructor when it isn't
4241 necessary to go through store_field. This is so that we can pass through
4242 the cleared field to let store_constructor know that we may not have to
4243 clear a substructure if the outer structure has already been cleared. */
4246 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4247 HOST_WIDE_INT bitpos, enum machine_mode mode,
4248 tree exp, tree type, int cleared, int alias_set)
4250 if (TREE_CODE (exp) == CONSTRUCTOR
4251 /* We can only call store_constructor recursively if the size and
4252 bit position are on a byte boundary. */
4253 && bitpos % BITS_PER_UNIT == 0
4254 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4255 /* If we have a nonzero bitpos for a register target, then we just
4256 let store_field do the bitfield handling. This is unlikely to
4257 generate unnecessary clear instructions anyways. */
4258 && (bitpos == 0 || MEM_P (target)))
4262 = adjust_address (target,
4263 GET_MODE (target) == BLKmode
4265 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4266 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4269 /* Update the alias set, if required. */
4270 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4271 && MEM_ALIAS_SET (target) != 0)
4273 target = copy_rtx (target);
4274 set_mem_alias_set (target, alias_set);
4277 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4280 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4283 /* Store the value of constructor EXP into the rtx TARGET.
4284 TARGET is either a REG or a MEM; we know it cannot conflict, since
4285 safe_from_p has been called.
4286 CLEARED is true if TARGET is known to have been zero'd.
4287 SIZE is the number of bytes of TARGET we are allowed to modify: this
4288 may not be the same as the size of EXP if we are assigning to a field
4289 which has been packed to exclude padding bits. */
4292 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4294 tree type = TREE_TYPE (exp);
4295 #ifdef WORD_REGISTER_OPERATIONS
4296 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4299 switch (TREE_CODE (type))
4303 case QUAL_UNION_TYPE:
4307 /* If size is zero or the target is already cleared, do nothing. */
4308 if (size == 0 || cleared)
4310 /* We either clear the aggregate or indicate the value is dead. */
4311 else if ((TREE_CODE (type) == UNION_TYPE
4312 || TREE_CODE (type) == QUAL_UNION_TYPE)
4313 && ! CONSTRUCTOR_ELTS (exp))
4314 /* If the constructor is empty, clear the union. */
4316 clear_storage (target, expr_size (exp));
4320 /* If we are building a static constructor into a register,
4321 set the initial value as zero so we can fold the value into
4322 a constant. But if more than one register is involved,
4323 this probably loses. */
4324 else if (REG_P (target) && TREE_STATIC (exp)
4325 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4327 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4331 /* If the constructor has fewer fields than the structure or
4332 if we are initializing the structure to mostly zeros, clear
4333 the whole structure first. Don't do this if TARGET is a
4334 register whose mode size isn't equal to SIZE since
4335 clear_storage can't handle this case. */
4337 && ((list_length (CONSTRUCTOR_ELTS (exp))
4338 != fields_length (type))
4339 || mostly_zeros_p (exp))
4341 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4344 clear_storage (target, GEN_INT (size));
4349 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4351 /* Store each element of the constructor into the
4352 corresponding field of TARGET. */
4354 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4356 tree field = TREE_PURPOSE (elt);
4357 tree value = TREE_VALUE (elt);
4358 enum machine_mode mode;
4359 HOST_WIDE_INT bitsize;
4360 HOST_WIDE_INT bitpos = 0;
4362 rtx to_rtx = target;
4364 /* Just ignore missing fields. We cleared the whole
4365 structure, above, if any fields are missing. */
4369 if (cleared && initializer_zerop (value))
4372 if (host_integerp (DECL_SIZE (field), 1))
4373 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4377 mode = DECL_MODE (field);
4378 if (DECL_BIT_FIELD (field))
4381 offset = DECL_FIELD_OFFSET (field);
4382 if (host_integerp (offset, 0)
4383 && host_integerp (bit_position (field), 0))
4385 bitpos = int_bit_position (field);
4389 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4396 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4397 make_tree (TREE_TYPE (exp),
4400 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4401 gcc_assert (MEM_P (to_rtx));
4403 #ifdef POINTERS_EXTEND_UNSIGNED
4404 if (GET_MODE (offset_rtx) != Pmode)
4405 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4407 if (GET_MODE (offset_rtx) != ptr_mode)
4408 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4411 to_rtx = offset_address (to_rtx, offset_rtx,
4412 highest_pow2_factor (offset));
4415 #ifdef WORD_REGISTER_OPERATIONS
4416 /* If this initializes a field that is smaller than a
4417 word, at the start of a word, try to widen it to a full
4418 word. This special case allows us to output C++ member
4419 function initializations in a form that the optimizers
4422 && bitsize < BITS_PER_WORD
4423 && bitpos % BITS_PER_WORD == 0
4424 && GET_MODE_CLASS (mode) == MODE_INT
4425 && TREE_CODE (value) == INTEGER_CST
4427 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4429 tree type = TREE_TYPE (value);
4431 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4433 type = lang_hooks.types.type_for_size
4434 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4435 value = convert (type, value);
4438 if (BYTES_BIG_ENDIAN)
4440 = fold (build2 (LSHIFT_EXPR, type, value,
4441 build_int_cst (NULL_TREE,
4442 BITS_PER_WORD - bitsize)));
4443 bitsize = BITS_PER_WORD;
4448 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4449 && DECL_NONADDRESSABLE_P (field))
4451 to_rtx = copy_rtx (to_rtx);
4452 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4455 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4456 value, type, cleared,
4457 get_alias_set (TREE_TYPE (field)));
4467 tree elttype = TREE_TYPE (type);
4469 HOST_WIDE_INT minelt = 0;
4470 HOST_WIDE_INT maxelt = 0;
4472 domain = TYPE_DOMAIN (type);
4473 const_bounds_p = (TYPE_MIN_VALUE (domain)
4474 && TYPE_MAX_VALUE (domain)
4475 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4476 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4478 /* If we have constant bounds for the range of the type, get them. */
4481 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4482 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4485 /* If the constructor has fewer elements than the array, clear
4486 the whole array first. Similarly if this is static
4487 constructor of a non-BLKmode object. */
4490 else if (REG_P (target) && TREE_STATIC (exp))
4494 HOST_WIDE_INT count = 0, zero_count = 0;
4495 need_to_clear = ! const_bounds_p;
4497 /* This loop is a more accurate version of the loop in
4498 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4499 is also needed to check for missing elements. */
4500 for (elt = CONSTRUCTOR_ELTS (exp);
4501 elt != NULL_TREE && ! need_to_clear;
4502 elt = TREE_CHAIN (elt))
4504 tree index = TREE_PURPOSE (elt);
4505 HOST_WIDE_INT this_node_count;
4507 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4509 tree lo_index = TREE_OPERAND (index, 0);
4510 tree hi_index = TREE_OPERAND (index, 1);
4512 if (! host_integerp (lo_index, 1)
4513 || ! host_integerp (hi_index, 1))
4519 this_node_count = (tree_low_cst (hi_index, 1)
4520 - tree_low_cst (lo_index, 1) + 1);
4523 this_node_count = 1;
4525 count += this_node_count;
4526 if (mostly_zeros_p (TREE_VALUE (elt)))
4527 zero_count += this_node_count;
4530 /* Clear the entire array first if there are any missing
4531 elements, or if the incidence of zero elements is >=
4534 && (count < maxelt - minelt + 1
4535 || 4 * zero_count >= 3 * count))
4539 if (need_to_clear && size > 0)
4542 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4544 clear_storage (target, GEN_INT (size));
4548 if (!cleared && REG_P (target))
4549 /* Inform later passes that the old value is dead. */
4550 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4552 /* Store each element of the constructor into the
4553 corresponding element of TARGET, determined by counting the
4555 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4557 elt = TREE_CHAIN (elt), i++)
4559 enum machine_mode mode;
4560 HOST_WIDE_INT bitsize;
4561 HOST_WIDE_INT bitpos;
4563 tree value = TREE_VALUE (elt);
4564 tree index = TREE_PURPOSE (elt);
4565 rtx xtarget = target;
4567 if (cleared && initializer_zerop (value))
4570 unsignedp = TYPE_UNSIGNED (elttype);
4571 mode = TYPE_MODE (elttype);
4572 if (mode == BLKmode)
4573 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4574 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4577 bitsize = GET_MODE_BITSIZE (mode);
4579 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4581 tree lo_index = TREE_OPERAND (index, 0);
4582 tree hi_index = TREE_OPERAND (index, 1);
4583 rtx index_r, pos_rtx;
4584 HOST_WIDE_INT lo, hi, count;
4587 /* If the range is constant and "small", unroll the loop. */
4589 && host_integerp (lo_index, 0)
4590 && host_integerp (hi_index, 0)
4591 && (lo = tree_low_cst (lo_index, 0),
4592 hi = tree_low_cst (hi_index, 0),
4593 count = hi - lo + 1,
4596 || (host_integerp (TYPE_SIZE (elttype), 1)
4597 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4600 lo -= minelt; hi -= minelt;
4601 for (; lo <= hi; lo++)
4603 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4606 && !MEM_KEEP_ALIAS_SET_P (target)
4607 && TREE_CODE (type) == ARRAY_TYPE
4608 && TYPE_NONALIASED_COMPONENT (type))
4610 target = copy_rtx (target);
4611 MEM_KEEP_ALIAS_SET_P (target) = 1;
4614 store_constructor_field
4615 (target, bitsize, bitpos, mode, value, type, cleared,
4616 get_alias_set (elttype));
4621 rtx loop_start = gen_label_rtx ();
4622 rtx loop_end = gen_label_rtx ();
4625 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4626 unsignedp = TYPE_UNSIGNED (domain);
4628 index = build_decl (VAR_DECL, NULL_TREE, domain);
4631 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4633 SET_DECL_RTL (index, index_r);
4634 store_expr (lo_index, index_r, 0);
4636 /* Build the head of the loop. */
4637 do_pending_stack_adjust ();
4638 emit_label (loop_start);
4640 /* Assign value to element index. */
4642 = convert (ssizetype,
4643 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4644 index, TYPE_MIN_VALUE (domain))));
4645 position = size_binop (MULT_EXPR, position,
4647 TYPE_SIZE_UNIT (elttype)));
4649 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4650 xtarget = offset_address (target, pos_rtx,
4651 highest_pow2_factor (position));
4652 xtarget = adjust_address (xtarget, mode, 0);
4653 if (TREE_CODE (value) == CONSTRUCTOR)
4654 store_constructor (value, xtarget, cleared,
4655 bitsize / BITS_PER_UNIT);
4657 store_expr (value, xtarget, 0);
4659 /* Generate a conditional jump to exit the loop. */
4660 exit_cond = build2 (LT_EXPR, integer_type_node,
4662 jumpif (exit_cond, loop_end);
4664 /* Update the loop counter, and jump to the head of
4666 expand_assignment (index,
4667 build2 (PLUS_EXPR, TREE_TYPE (index),
4668 index, integer_one_node));
4670 emit_jump (loop_start);
4672 /* Build the end of the loop. */
4673 emit_label (loop_end);
4676 else if ((index != 0 && ! host_integerp (index, 0))
4677 || ! host_integerp (TYPE_SIZE (elttype), 1))
4682 index = ssize_int (1);
4685 index = fold_convert (ssizetype,
4686 fold (build2 (MINUS_EXPR,
4689 TYPE_MIN_VALUE (domain))));
4691 position = size_binop (MULT_EXPR, index,
4693 TYPE_SIZE_UNIT (elttype)));
4694 xtarget = offset_address (target,
4695 expand_expr (position, 0, VOIDmode, 0),
4696 highest_pow2_factor (position));
4697 xtarget = adjust_address (xtarget, mode, 0);
4698 store_expr (value, xtarget, 0);
4703 bitpos = ((tree_low_cst (index, 0) - minelt)
4704 * tree_low_cst (TYPE_SIZE (elttype), 1));
4706 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4708 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4709 && TREE_CODE (type) == ARRAY_TYPE
4710 && TYPE_NONALIASED_COMPONENT (type))
4712 target = copy_rtx (target);
4713 MEM_KEEP_ALIAS_SET_P (target) = 1;
4715 store_constructor_field (target, bitsize, bitpos, mode, value,
4716 type, cleared, get_alias_set (elttype));
4728 tree elttype = TREE_TYPE (type);
4729 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4730 enum machine_mode eltmode = TYPE_MODE (elttype);
4731 HOST_WIDE_INT bitsize;
4732 HOST_WIDE_INT bitpos;
4736 gcc_assert (eltmode != BLKmode);
4738 n_elts = TYPE_VECTOR_SUBPARTS (type);
4739 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4741 enum machine_mode mode = GET_MODE (target);
4743 icode = (int) vec_init_optab->handlers[mode].insn_code;
4744 if (icode != CODE_FOR_nothing)
4748 vector = alloca (n_elts);
4749 for (i = 0; i < n_elts; i++)
4750 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4754 /* If the constructor has fewer elements than the vector,
4755 clear the whole array first. Similarly if this is static
4756 constructor of a non-BLKmode object. */
4759 else if (REG_P (target) && TREE_STATIC (exp))
4763 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4765 for (elt = CONSTRUCTOR_ELTS (exp);
4767 elt = TREE_CHAIN (elt))
4769 int n_elts_here = tree_low_cst
4770 (int_const_binop (TRUNC_DIV_EXPR,
4771 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4772 TYPE_SIZE (elttype), 0), 1);
4774 count += n_elts_here;
4775 if (mostly_zeros_p (TREE_VALUE (elt)))
4776 zero_count += n_elts_here;
4779 /* Clear the entire vector first if there are any missing elements,
4780 or if the incidence of zero elements is >= 75%. */
4781 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4784 if (need_to_clear && size > 0 && !vector)
4787 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4789 clear_storage (target, GEN_INT (size));
4793 if (!cleared && REG_P (target))
4794 /* Inform later passes that the old value is dead. */
4795 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4797 /* Store each element of the constructor into the corresponding
4798 element of TARGET, determined by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4803 tree value = TREE_VALUE (elt);
4804 tree index = TREE_PURPOSE (elt);
4805 HOST_WIDE_INT eltpos;
4807 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4808 if (cleared && initializer_zerop (value))
4812 eltpos = tree_low_cst (index, 1);
4818 /* Vector CONSTRUCTORs should only be built from smaller
4819 vectors in the case of BLKmode vectors. */
4820 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4821 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4825 enum machine_mode value_mode =
4826 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4827 ? TYPE_MODE (TREE_TYPE (value))
4829 bitpos = eltpos * elt_size;
4830 store_constructor_field (target, bitsize, bitpos,
4831 value_mode, value, type,
4832 cleared, get_alias_set (elttype));
4837 emit_insn (GEN_FCN (icode)
4839 gen_rtx_PARALLEL (GET_MODE (target),
4840 gen_rtvec_v (n_elts, vector))));
4844 /* Set constructor assignments. */
4847 tree elt = CONSTRUCTOR_ELTS (exp);
4848 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4849 tree domain = TYPE_DOMAIN (type);
4850 tree domain_min, domain_max, bitlength;
4852 /* The default implementation strategy is to extract the
4853 constant parts of the constructor, use that to initialize
4854 the target, and then "or" in whatever non-constant ranges
4855 we need in addition.
4857 If a large set is all zero or all ones, it is probably
4858 better to set it using memset. Also, if a large set has
4859 just a single range, it may also be better to first clear
4860 all the first clear the set (using memset), and set the
4863 /* Check for all zeros. */
4864 if (elt == NULL_TREE && size > 0)
4867 clear_storage (target, GEN_INT (size));
4871 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4872 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4873 bitlength = size_binop (PLUS_EXPR,
4874 size_diffop (domain_max, domain_min),
4877 nbits = tree_low_cst (bitlength, 1);
4879 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4880 that are "complicated" (more than one range), initialize
4881 (the constant parts) by copying from a constant. */
4882 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4883 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4885 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4886 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4887 char *bit_buffer = alloca (nbits);
4888 HOST_WIDE_INT word = 0;
4889 unsigned int bit_pos = 0;
4890 unsigned int ibit = 0;
4891 unsigned int offset = 0; /* In bytes from beginning of set. */
4893 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4896 if (bit_buffer[ibit])
4898 if (BYTES_BIG_ENDIAN)
4899 word |= (1 << (set_word_size - 1 - bit_pos));
4901 word |= 1 << bit_pos;
4905 if (bit_pos >= set_word_size || ibit == nbits)
4907 if (word != 0 || ! cleared)
4909 rtx datum = gen_int_mode (word, mode);
4912 /* The assumption here is that it is safe to
4913 use XEXP if the set is multi-word, but not
4914 if it's single-word. */
4916 to_rtx = adjust_address (target, mode, offset);
4919 gcc_assert (!offset);
4922 emit_move_insn (to_rtx, datum);
4929 offset += set_word_size / BITS_PER_UNIT;
4934 /* Don't bother clearing storage if the set is all ones. */
4935 if (TREE_CHAIN (elt) != NULL_TREE
4936 || (TREE_PURPOSE (elt) == NULL_TREE
4938 : ( ! host_integerp (TREE_VALUE (elt), 0)
4939 || ! host_integerp (TREE_PURPOSE (elt), 0)
4940 || (tree_low_cst (TREE_VALUE (elt), 0)
4941 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4942 != (HOST_WIDE_INT) nbits))))
4943 clear_storage (target, expr_size (exp));
4945 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4947 /* Start of range of element or NULL. */
4948 tree startbit = TREE_PURPOSE (elt);
4949 /* End of range of element, or element value. */
4950 tree endbit = TREE_VALUE (elt);
4951 HOST_WIDE_INT startb, endb;
4952 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4954 bitlength_rtx = expand_expr (bitlength,
4955 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4957 /* Handle non-range tuple element like [ expr ]. */
4958 if (startbit == NULL_TREE)
4960 startbit = save_expr (endbit);
4964 startbit = convert (sizetype, startbit);
4965 endbit = convert (sizetype, endbit);
4966 if (! integer_zerop (domain_min))
4968 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4969 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4971 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4972 EXPAND_CONST_ADDRESS);
4973 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4974 EXPAND_CONST_ADDRESS);
4980 ((build_qualified_type (lang_hooks.types.type_for_mode
4981 (GET_MODE (target), 0),
4984 emit_move_insn (targetx, target);
4989 gcc_assert (MEM_P (target));
4993 /* Optimization: If startbit and endbit are constants divisible
4994 by BITS_PER_UNIT, call memset instead. */
4995 if (TREE_CODE (startbit) == INTEGER_CST
4996 && TREE_CODE (endbit) == INTEGER_CST
4997 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
4998 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5000 emit_library_call (memset_libfunc, LCT_NORMAL,
5002 plus_constant (XEXP (targetx, 0),
5003 startb / BITS_PER_UNIT),
5005 constm1_rtx, TYPE_MODE (integer_type_node),
5006 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5007 TYPE_MODE (sizetype));
5010 emit_library_call (setbits_libfunc, LCT_NORMAL,
5011 VOIDmode, 4, XEXP (targetx, 0),
5012 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5013 startbit_rtx, TYPE_MODE (sizetype),
5014 endbit_rtx, TYPE_MODE (sizetype));
5017 emit_move_insn (target, targetx);
5026 /* Store the value of EXP (an expression tree)
5027 into a subfield of TARGET which has mode MODE and occupies
5028 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5029 If MODE is VOIDmode, it means that we are storing into a bit-field.
5031 Always return const0_rtx unless we have something particular to
5034 TYPE is the type of the underlying object,
5036 ALIAS_SET is the alias set for the destination. This value will
5037 (in general) be different from that for TARGET, since TARGET is a
5038 reference to the containing structure. */
5041 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5042 enum machine_mode mode, tree exp, tree type, int alias_set)
5044 HOST_WIDE_INT width_mask = 0;
5046 if (TREE_CODE (exp) == ERROR_MARK)
5049 /* If we have nothing to store, do nothing unless the expression has
5052 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5053 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5054 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5056 /* If we are storing into an unaligned field of an aligned union that is
5057 in a register, we may have the mode of TARGET being an integer mode but
5058 MODE == BLKmode. In that case, get an aligned object whose size and
5059 alignment are the same as TARGET and store TARGET into it (we can avoid
5060 the store if the field being stored is the entire width of TARGET). Then
5061 call ourselves recursively to store the field into a BLKmode version of
5062 that object. Finally, load from the object into TARGET. This is not
5063 very efficient in general, but should only be slightly more expensive
5064 than the otherwise-required unaligned accesses. Perhaps this can be
5065 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5066 twice, once with emit_move_insn and once via store_field. */
5069 && (REG_P (target) || GET_CODE (target) == SUBREG))
5071 rtx object = assign_temp (type, 0, 1, 1);
5072 rtx blk_object = adjust_address (object, BLKmode, 0);
5074 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5075 emit_move_insn (object, target);
5077 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5079 emit_move_insn (target, object);
5081 /* We want to return the BLKmode version of the data. */
5085 if (GET_CODE (target) == CONCAT)
5087 /* We're storing into a struct containing a single __complex. */
5089 gcc_assert (!bitpos);
5090 return store_expr (exp, target, 0);
5093 /* If the structure is in a register or if the component
5094 is a bit field, we cannot use addressing to access it.
5095 Use bit-field techniques or SUBREG to store in it. */
5097 if (mode == VOIDmode
5098 || (mode != BLKmode && ! direct_store[(int) mode]
5099 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5100 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5102 || GET_CODE (target) == SUBREG
5103 /* If the field isn't aligned enough to store as an ordinary memref,
5104 store it as a bit field. */
5106 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5107 || bitpos % GET_MODE_ALIGNMENT (mode))
5108 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5109 || (bitpos % BITS_PER_UNIT != 0)))
5110 /* If the RHS and field are a constant size and the size of the
5111 RHS isn't the same size as the bitfield, we must use bitfield
5114 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5115 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5117 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5119 /* If BITSIZE is narrower than the size of the type of EXP
5120 we will be narrowing TEMP. Normally, what's wanted are the
5121 low-order bits. However, if EXP's type is a record and this is
5122 big-endian machine, we want the upper BITSIZE bits. */
5123 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5124 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5125 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5126 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5127 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5131 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5133 if (mode != VOIDmode && mode != BLKmode
5134 && mode != TYPE_MODE (TREE_TYPE (exp)))
5135 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5137 /* If the modes of TARGET and TEMP are both BLKmode, both
5138 must be in memory and BITPOS must be aligned on a byte
5139 boundary. If so, we simply do a block copy. */
5140 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5142 gcc_assert (MEM_P (target) && MEM_P (temp)
5143 && !(bitpos % BITS_PER_UNIT));
5145 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5146 emit_block_move (target, temp,
5147 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5154 /* Store the value in the bitfield. */
5155 store_bit_field (target, bitsize, bitpos, mode, temp);
5161 /* Now build a reference to just the desired component. */
5162 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5164 if (to_rtx == target)
5165 to_rtx = copy_rtx (to_rtx);
5167 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5168 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5169 set_mem_alias_set (to_rtx, alias_set);
5171 return store_expr (exp, to_rtx, 0);
5175 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5176 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5177 codes and find the ultimate containing object, which we return.
5179 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5180 bit position, and *PUNSIGNEDP to the signedness of the field.
5181 If the position of the field is variable, we store a tree
5182 giving the variable offset (in units) in *POFFSET.
5183 This offset is in addition to the bit position.
5184 If the position is not variable, we store 0 in *POFFSET.
5186 If any of the extraction expressions is volatile,
5187 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5189 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5190 is a mode that can be used to access the field. In that case, *PBITSIZE
5193 If the field describes a variable-sized object, *PMODE is set to
5194 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5195 this case, but the address of the object can be found. */
5198 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5199 HOST_WIDE_INT *pbitpos, tree *poffset,
5200 enum machine_mode *pmode, int *punsignedp,
5204 enum machine_mode mode = VOIDmode;
5205 tree offset = size_zero_node;
5206 tree bit_offset = bitsize_zero_node;
5209 /* First get the mode, signedness, and size. We do this from just the
5210 outermost expression. */
5211 if (TREE_CODE (exp) == COMPONENT_REF)
5213 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5214 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5215 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5217 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5219 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5221 size_tree = TREE_OPERAND (exp, 1);
5222 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5226 mode = TYPE_MODE (TREE_TYPE (exp));
5227 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5229 if (mode == BLKmode)
5230 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5232 *pbitsize = GET_MODE_BITSIZE (mode);
5237 if (! host_integerp (size_tree, 1))
5238 mode = BLKmode, *pbitsize = -1;
5240 *pbitsize = tree_low_cst (size_tree, 1);
5243 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5244 and find the ultimate containing object. */
5247 if (TREE_CODE (exp) == BIT_FIELD_REF)
5248 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5249 else if (TREE_CODE (exp) == COMPONENT_REF)
5251 tree field = TREE_OPERAND (exp, 1);
5252 tree this_offset = component_ref_field_offset (exp);
5254 /* If this field hasn't been filled in yet, don't go
5255 past it. This should only happen when folding expressions
5256 made during type construction. */
5257 if (this_offset == 0)
5260 offset = size_binop (PLUS_EXPR, offset, this_offset);
5261 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5262 DECL_FIELD_BIT_OFFSET (field));
5264 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5267 else if (TREE_CODE (exp) == ARRAY_REF
5268 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5270 tree index = TREE_OPERAND (exp, 1);
5271 tree low_bound = array_ref_low_bound (exp);
5272 tree unit_size = array_ref_element_size (exp);
5274 /* We assume all arrays have sizes that are a multiple of a byte.
5275 First subtract the lower bound, if any, in the type of the
5276 index, then convert to sizetype and multiply by the size of the
5278 if (! integer_zerop (low_bound))
5279 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5282 offset = size_binop (PLUS_EXPR, offset,
5283 size_binop (MULT_EXPR,
5284 convert (sizetype, index),
5288 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5289 conversions that don't change the mode, and all view conversions
5290 except those that need to "step up" the alignment. */
5291 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5292 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5293 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5294 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5296 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5297 < BIGGEST_ALIGNMENT)
5298 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5299 || TYPE_ALIGN_OK (TREE_TYPE
5300 (TREE_OPERAND (exp, 0))))))
5301 && ! ((TREE_CODE (exp) == NOP_EXPR
5302 || TREE_CODE (exp) == CONVERT_EXPR)
5303 && (TYPE_MODE (TREE_TYPE (exp))
5304 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5307 /* If any reference in the chain is volatile, the effect is volatile. */
5308 if (TREE_THIS_VOLATILE (exp))
5311 exp = TREE_OPERAND (exp, 0);
5314 /* If OFFSET is constant, see if we can return the whole thing as a
5315 constant bit position. Otherwise, split it up. */
5316 if (host_integerp (offset, 0)
5317 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5319 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5320 && host_integerp (tem, 0))
5321 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5323 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5329 /* Return a tree of sizetype representing the size, in bytes, of the element
5330 of EXP, an ARRAY_REF. */
5333 array_ref_element_size (tree exp)
5335 tree aligned_size = TREE_OPERAND (exp, 3);
5336 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5338 /* If a size was specified in the ARRAY_REF, it's the size measured
5339 in alignment units of the element type. So multiply by that value. */
5342 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5343 sizetype from another type of the same width and signedness. */
5344 if (TREE_TYPE (aligned_size) != sizetype)
5345 aligned_size = fold_convert (sizetype, aligned_size);
5346 return size_binop (MULT_EXPR, aligned_size,
5347 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5350 /* Otherwise, take the size from that of the element type. Substitute
5351 any PLACEHOLDER_EXPR that we have. */
5353 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5356 /* Return a tree representing the lower bound of the array mentioned in
5357 EXP, an ARRAY_REF. */
5360 array_ref_low_bound (tree exp)
5362 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5364 /* If a lower bound is specified in EXP, use it. */
5365 if (TREE_OPERAND (exp, 2))
5366 return TREE_OPERAND (exp, 2);
5368 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5369 substituting for a PLACEHOLDER_EXPR as needed. */
5370 if (domain_type && TYPE_MIN_VALUE (domain_type))
5371 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5373 /* Otherwise, return a zero of the appropriate type. */
5374 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5377 /* Return a tree representing the upper bound of the array mentioned in
5378 EXP, an ARRAY_REF. */
5381 array_ref_up_bound (tree exp)
5383 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5385 /* If there is a domain type and it has an upper bound, use it, substituting
5386 for a PLACEHOLDER_EXPR as needed. */
5387 if (domain_type && TYPE_MAX_VALUE (domain_type))
5388 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5390 /* Otherwise fail. */
5394 /* Return a tree representing the offset, in bytes, of the field referenced
5395 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5398 component_ref_field_offset (tree exp)
5400 tree aligned_offset = TREE_OPERAND (exp, 2);
5401 tree field = TREE_OPERAND (exp, 1);
5403 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5404 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5408 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5409 sizetype from another type of the same width and signedness. */
5410 if (TREE_TYPE (aligned_offset) != sizetype)
5411 aligned_offset = fold_convert (sizetype, aligned_offset);
5412 return size_binop (MULT_EXPR, aligned_offset,
5413 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5416 /* Otherwise, take the offset from that of the field. Substitute
5417 any PLACEHOLDER_EXPR that we have. */
5419 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5422 /* Return 1 if T is an expression that get_inner_reference handles. */
5425 handled_component_p (tree t)
5427 switch (TREE_CODE (t))
5432 case ARRAY_RANGE_REF:
5433 case NON_LVALUE_EXPR:
5434 case VIEW_CONVERT_EXPR:
5437 /* ??? Sure they are handled, but get_inner_reference may return
5438 a different PBITSIZE, depending upon whether the expression is
5439 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5442 return (TYPE_MODE (TREE_TYPE (t))
5443 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5450 /* Given an rtx VALUE that may contain additions and multiplications, return
5451 an equivalent value that just refers to a register, memory, or constant.
5452 This is done by generating instructions to perform the arithmetic and
5453 returning a pseudo-register containing the value.
5455 The returned value may be a REG, SUBREG, MEM or constant. */
5458 force_operand (rtx value, rtx target)
5461 /* Use subtarget as the target for operand 0 of a binary operation. */
5462 rtx subtarget = get_subtarget (target);
5463 enum rtx_code code = GET_CODE (value);
5465 /* Check for subreg applied to an expression produced by loop optimizer. */
5467 && !REG_P (SUBREG_REG (value))
5468 && !MEM_P (SUBREG_REG (value)))
5470 value = simplify_gen_subreg (GET_MODE (value),
5471 force_reg (GET_MODE (SUBREG_REG (value)),
5472 force_operand (SUBREG_REG (value),
5474 GET_MODE (SUBREG_REG (value)),
5475 SUBREG_BYTE (value));
5476 code = GET_CODE (value);
5479 /* Check for a PIC address load. */
5480 if ((code == PLUS || code == MINUS)
5481 && XEXP (value, 0) == pic_offset_table_rtx
5482 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5483 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5484 || GET_CODE (XEXP (value, 1)) == CONST))
5487 subtarget = gen_reg_rtx (GET_MODE (value));
5488 emit_move_insn (subtarget, value);
5492 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5495 target = gen_reg_rtx (GET_MODE (value));
5496 convert_move (target, force_operand (XEXP (value, 0), NULL),
5497 code == ZERO_EXTEND);
5501 if (ARITHMETIC_P (value))
5503 op2 = XEXP (value, 1);
5504 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5506 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5509 op2 = negate_rtx (GET_MODE (value), op2);
5512 /* Check for an addition with OP2 a constant integer and our first
5513 operand a PLUS of a virtual register and something else. In that
5514 case, we want to emit the sum of the virtual register and the
5515 constant first and then add the other value. This allows virtual
5516 register instantiation to simply modify the constant rather than
5517 creating another one around this addition. */
5518 if (code == PLUS && GET_CODE (op2) == CONST_INT
5519 && GET_CODE (XEXP (value, 0)) == PLUS
5520 && REG_P (XEXP (XEXP (value, 0), 0))
5521 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5522 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5524 rtx temp = expand_simple_binop (GET_MODE (value), code,
5525 XEXP (XEXP (value, 0), 0), op2,
5526 subtarget, 0, OPTAB_LIB_WIDEN);
5527 return expand_simple_binop (GET_MODE (value), code, temp,
5528 force_operand (XEXP (XEXP (value,
5530 target, 0, OPTAB_LIB_WIDEN);
5533 op1 = force_operand (XEXP (value, 0), subtarget);
5534 op2 = force_operand (op2, NULL_RTX);
5538 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5540 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5541 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5542 target, 1, OPTAB_LIB_WIDEN);
5544 return expand_divmod (0,
5545 FLOAT_MODE_P (GET_MODE (value))
5546 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5547 GET_MODE (value), op1, op2, target, 0);
5550 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5554 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5558 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5562 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5563 target, 0, OPTAB_LIB_WIDEN);
5566 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5567 target, 1, OPTAB_LIB_WIDEN);
5570 if (UNARY_P (value))
5572 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5573 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5576 #ifdef INSN_SCHEDULING
5577 /* On machines that have insn scheduling, we want all memory reference to be
5578 explicit, so we need to deal with such paradoxical SUBREGs. */
5579 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5580 && (GET_MODE_SIZE (GET_MODE (value))
5581 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5583 = simplify_gen_subreg (GET_MODE (value),
5584 force_reg (GET_MODE (SUBREG_REG (value)),
5585 force_operand (SUBREG_REG (value),
5587 GET_MODE (SUBREG_REG (value)),
5588 SUBREG_BYTE (value));
5594 /* Subroutine of expand_expr: return nonzero iff there is no way that
5595 EXP can reference X, which is being modified. TOP_P is nonzero if this
5596 call is going to be used to determine whether we need a temporary
5597 for EXP, as opposed to a recursive call to this function.
5599 It is always safe for this routine to return zero since it merely
5600 searches for optimization opportunities. */
5603 safe_from_p (rtx x, tree exp, int top_p)
5609 /* If EXP has varying size, we MUST use a target since we currently
5610 have no way of allocating temporaries of variable size
5611 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5612 So we assume here that something at a higher level has prevented a
5613 clash. This is somewhat bogus, but the best we can do. Only
5614 do this when X is BLKmode and when we are at the top level. */
5615 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5616 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5617 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5618 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5619 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5621 && GET_MODE (x) == BLKmode)
5622 /* If X is in the outgoing argument area, it is always safe. */
5624 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5625 || (GET_CODE (XEXP (x, 0)) == PLUS
5626 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5629 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5630 find the underlying pseudo. */
5631 if (GET_CODE (x) == SUBREG)
5634 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5638 /* Now look at our tree code and possibly recurse. */
5639 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5641 case tcc_declaration:
5642 exp_rtl = DECL_RTL_IF_SET (exp);
5648 case tcc_exceptional:
5649 if (TREE_CODE (exp) == TREE_LIST)
5653 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5655 exp = TREE_CHAIN (exp);
5658 if (TREE_CODE (exp) != TREE_LIST)
5659 return safe_from_p (x, exp, 0);
5662 else if (TREE_CODE (exp) == ERROR_MARK)
5663 return 1; /* An already-visited SAVE_EXPR? */
5668 /* The only case we look at here is the DECL_INITIAL inside a
5670 return (TREE_CODE (exp) != DECL_EXPR
5671 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5672 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5673 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5676 case tcc_comparison:
5677 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5682 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5684 case tcc_expression:
5686 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5687 the expression. If it is set, we conflict iff we are that rtx or
5688 both are in memory. Otherwise, we check all operands of the
5689 expression recursively. */
5691 switch (TREE_CODE (exp))
5694 /* If the operand is static or we are static, we can't conflict.
5695 Likewise if we don't conflict with the operand at all. */
5696 if (staticp (TREE_OPERAND (exp, 0))
5697 || TREE_STATIC (exp)
5698 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5701 /* Otherwise, the only way this can conflict is if we are taking
5702 the address of a DECL a that address if part of X, which is
5704 exp = TREE_OPERAND (exp, 0);
5707 if (!DECL_RTL_SET_P (exp)
5708 || !MEM_P (DECL_RTL (exp)))
5711 exp_rtl = XEXP (DECL_RTL (exp), 0);
5715 case MISALIGNED_INDIRECT_REF:
5716 case ALIGN_INDIRECT_REF:
5719 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5720 get_alias_set (exp)))
5725 /* Assume that the call will clobber all hard registers and
5727 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5732 case WITH_CLEANUP_EXPR:
5733 case CLEANUP_POINT_EXPR:
5734 /* Lowered by gimplify.c. */
5738 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5744 /* If we have an rtx, we do not need to scan our operands. */
5748 nops = first_rtl_op (TREE_CODE (exp));
5749 for (i = 0; i < nops; i++)
5750 if (TREE_OPERAND (exp, i) != 0
5751 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5754 /* If this is a language-specific tree code, it may require
5755 special handling. */
5756 if ((unsigned int) TREE_CODE (exp)
5757 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5758 && !lang_hooks.safe_from_p (x, exp))
5763 /* Should never get a type here. */
5767 /* If we have an rtl, find any enclosed object. Then see if we conflict
5771 if (GET_CODE (exp_rtl) == SUBREG)
5773 exp_rtl = SUBREG_REG (exp_rtl);
5775 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5779 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5780 are memory and they conflict. */
5781 return ! (rtx_equal_p (x, exp_rtl)
5782 || (MEM_P (x) && MEM_P (exp_rtl)
5783 && true_dependence (exp_rtl, VOIDmode, x,
5784 rtx_addr_varies_p)));
5787 /* If we reach here, it is safe. */
5792 /* Return the highest power of two that EXP is known to be a multiple of.
5793 This is used in updating alignment of MEMs in array references. */
5795 static unsigned HOST_WIDE_INT
5796 highest_pow2_factor (tree exp)
5798 unsigned HOST_WIDE_INT c0, c1;
5800 switch (TREE_CODE (exp))
5803 /* We can find the lowest bit that's a one. If the low
5804 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5805 We need to handle this case since we can find it in a COND_EXPR,
5806 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5807 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5809 if (TREE_CONSTANT_OVERFLOW (exp))
5810 return BIGGEST_ALIGNMENT;
5813 /* Note: tree_low_cst is intentionally not used here,
5814 we don't care about the upper bits. */
5815 c0 = TREE_INT_CST_LOW (exp);
5817 return c0 ? c0 : BIGGEST_ALIGNMENT;
5821 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5822 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5823 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5824 return MIN (c0, c1);
5827 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5828 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5831 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5833 if (integer_pow2p (TREE_OPERAND (exp, 1))
5834 && host_integerp (TREE_OPERAND (exp, 1), 1))
5836 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5837 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5838 return MAX (1, c0 / c1);
5842 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5844 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5847 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5850 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5851 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5852 return MIN (c0, c1);
5861 /* Similar, except that the alignment requirements of TARGET are
5862 taken into account. Assume it is at least as aligned as its
5863 type, unless it is a COMPONENT_REF in which case the layout of
5864 the structure gives the alignment. */
5866 static unsigned HOST_WIDE_INT
5867 highest_pow2_factor_for_target (tree target, tree exp)
5869 unsigned HOST_WIDE_INT target_align, factor;
5871 factor = highest_pow2_factor (exp);
5872 if (TREE_CODE (target) == COMPONENT_REF)
5873 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5875 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5876 return MAX (factor, target_align);
5879 /* Expands variable VAR. */
5882 expand_var (tree var)
5884 if (DECL_EXTERNAL (var))
5887 if (TREE_STATIC (var))
5888 /* If this is an inlined copy of a static local variable,
5889 look up the original decl. */
5890 var = DECL_ORIGIN (var);
5892 if (TREE_STATIC (var)
5893 ? !TREE_ASM_WRITTEN (var)
5894 : !DECL_RTL_SET_P (var))
5896 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5897 /* Should be ignored. */;
5898 else if (lang_hooks.expand_decl (var))
5900 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5902 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5903 rest_of_decl_compilation (var, 0, 0);
5905 /* No expansion needed. */
5906 gcc_assert (TREE_CODE (var) == TYPE_DECL
5907 || TREE_CODE (var) == CONST_DECL
5908 || TREE_CODE (var) == FUNCTION_DECL
5909 || TREE_CODE (var) == LABEL_DECL);
5913 /* Subroutine of expand_expr. Expand the two operands of a binary
5914 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5915 The value may be stored in TARGET if TARGET is nonzero. The
5916 MODIFIER argument is as documented by expand_expr. */
5919 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5920 enum expand_modifier modifier)
5922 if (! safe_from_p (target, exp1, 1))
5924 if (operand_equal_p (exp0, exp1, 0))
5926 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5927 *op1 = copy_rtx (*op0);
5931 /* If we need to preserve evaluation order, copy exp0 into its own
5932 temporary variable so that it can't be clobbered by exp1. */
5933 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5934 exp0 = save_expr (exp0);
5935 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5936 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5941 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5942 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5945 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5946 enum expand_modifier modifier)
5948 rtx result, subtarget;
5950 HOST_WIDE_INT bitsize, bitpos;
5951 int volatilep, unsignedp;
5952 enum machine_mode mode1;
5954 /* If we are taking the address of a constant and are at the top level,
5955 we have to use output_constant_def since we can't call force_const_mem
5957 /* ??? This should be considered a front-end bug. We should not be
5958 generating ADDR_EXPR of something that isn't an LVALUE. The only
5959 exception here is STRING_CST. */
5960 if (TREE_CODE (exp) == CONSTRUCTOR
5961 || CONSTANT_CLASS_P (exp))
5962 return XEXP (output_constant_def (exp, 0), 0);
5964 /* Everything must be something allowed by is_gimple_addressable. */
5965 switch (TREE_CODE (exp))
5968 /* This case will happen via recursion for &a->b. */
5969 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5972 /* Recurse and make the output_constant_def clause above handle this. */
5973 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5977 /* The real part of the complex number is always first, therefore
5978 the address is the same as the address of the parent object. */
5981 inner = TREE_OPERAND (exp, 0);
5985 /* The imaginary part of the complex number is always second.
5986 The expression is therefore always offset by the size of the
5989 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5990 inner = TREE_OPERAND (exp, 0);
5994 /* If the object is a DECL, then expand it for its rtl. Don't bypass
5995 expand_expr, as that can have various side effects; LABEL_DECLs for
5996 example, may not have their DECL_RTL set yet. Assume language
5997 specific tree nodes can be expanded in some interesting way. */
5999 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6001 result = expand_expr (exp, target, tmode,
6002 modifier == EXPAND_INITIALIZER
6003 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6005 /* If the DECL isn't in memory, then the DECL wasn't properly
6006 marked TREE_ADDRESSABLE, which will be either a front-end
6007 or a tree optimizer bug. */
6008 gcc_assert (GET_CODE (result) == MEM);
6009 result = XEXP (result, 0);
6011 /* ??? Is this needed anymore? */
6012 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6014 assemble_external (exp);
6015 TREE_USED (exp) = 1;
6018 if (modifier != EXPAND_INITIALIZER
6019 && modifier != EXPAND_CONST_ADDRESS)
6020 result = force_operand (result, target);
6024 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6025 &mode1, &unsignedp, &volatilep);
6029 /* We must have made progress. */
6030 gcc_assert (inner != exp);
6032 subtarget = offset || bitpos ? NULL_RTX : target;
6033 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6039 if (modifier != EXPAND_NORMAL)
6040 result = force_operand (result, NULL);
6041 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6043 result = convert_memory_address (tmode, result);
6044 tmp = convert_memory_address (tmode, tmp);
6046 if (modifier == EXPAND_SUM)
6047 result = gen_rtx_PLUS (tmode, result, tmp);
6050 subtarget = bitpos ? NULL_RTX : target;
6051 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6052 1, OPTAB_LIB_WIDEN);
6058 /* Someone beforehand should have rejected taking the address
6059 of such an object. */
6060 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6062 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6063 if (modifier < EXPAND_SUM)
6064 result = force_operand (result, target);
6070 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6071 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6074 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6075 enum expand_modifier modifier)
6077 enum machine_mode rmode;
6080 /* Target mode of VOIDmode says "whatever's natural". */
6081 if (tmode == VOIDmode)
6082 tmode = TYPE_MODE (TREE_TYPE (exp));
6084 /* We can get called with some Weird Things if the user does silliness
6085 like "(short) &a". In that case, convert_memory_address won't do
6086 the right thing, so ignore the given target mode. */
6087 if (tmode != Pmode && tmode != ptr_mode)
6090 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6093 /* Despite expand_expr claims concerning ignoring TMODE when not
6094 strictly convenient, stuff breaks if we don't honor it. Note
6095 that combined with the above, we only do this for pointer modes. */
6096 rmode = GET_MODE (result);
6097 if (rmode == VOIDmode)
6100 result = convert_memory_address (tmode, result);
6106 /* expand_expr: generate code for computing expression EXP.
6107 An rtx for the computed value is returned. The value is never null.
6108 In the case of a void EXP, const0_rtx is returned.
6110 The value may be stored in TARGET if TARGET is nonzero.
6111 TARGET is just a suggestion; callers must assume that
6112 the rtx returned may not be the same as TARGET.
6114 If TARGET is CONST0_RTX, it means that the value will be ignored.
6116 If TMODE is not VOIDmode, it suggests generating the
6117 result in mode TMODE. But this is done only when convenient.
6118 Otherwise, TMODE is ignored and the value generated in its natural mode.
6119 TMODE is just a suggestion; callers must assume that
6120 the rtx returned may not have mode TMODE.
6122 Note that TARGET may have neither TMODE nor MODE. In that case, it
6123 probably will not be used.
6125 If MODIFIER is EXPAND_SUM then when EXP is an addition
6126 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6127 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6128 products as above, or REG or MEM, or constant.
6129 Ordinarily in such cases we would output mul or add instructions
6130 and then return a pseudo reg containing the sum.
6132 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6133 it also marks a label as absolutely required (it can't be dead).
6134 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6135 This is used for outputting expressions used in initializers.
6137 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6138 with a constant address even if that address is not normally legitimate.
6139 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6141 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6142 a call parameter. Such targets require special care as we haven't yet
6143 marked TARGET so that it's safe from being trashed by libcalls. We
6144 don't want to use TARGET for anything but the final result;
6145 Intermediate values must go elsewhere. Additionally, calls to
6146 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6148 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6149 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6150 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6151 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6154 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6155 enum expand_modifier, rtx *);
6158 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6159 enum expand_modifier modifier, rtx *alt_rtl)
6162 rtx ret, last = NULL;
6164 /* Handle ERROR_MARK before anybody tries to access its type. */
6165 if (TREE_CODE (exp) == ERROR_MARK
6166 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6168 ret = CONST0_RTX (tmode);
6169 return ret ? ret : const0_rtx;
6172 if (flag_non_call_exceptions)
6174 rn = lookup_stmt_eh_region (exp);
6175 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6177 last = get_last_insn ();
6180 /* If this is an expression of some kind and it has an associated line
6181 number, then emit the line number before expanding the expression.
6183 We need to save and restore the file and line information so that
6184 errors discovered during expansion are emitted with the right
6185 information. It would be better of the diagnostic routines
6186 used the file/line information embedded in the tree nodes rather
6188 if (cfun && EXPR_HAS_LOCATION (exp))
6190 location_t saved_location = input_location;
6191 input_location = EXPR_LOCATION (exp);
6192 emit_line_note (input_location);
6194 /* Record where the insns produced belong. */
6195 record_block_change (TREE_BLOCK (exp));
6197 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6199 input_location = saved_location;
6203 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6206 /* If using non-call exceptions, mark all insns that may trap.
6207 expand_call() will mark CALL_INSNs before we get to this code,
6208 but it doesn't handle libcalls, and these may trap. */
6212 for (insn = next_real_insn (last); insn;
6213 insn = next_real_insn (insn))
6215 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6216 /* If we want exceptions for non-call insns, any
6217 may_trap_p instruction may throw. */
6218 && GET_CODE (PATTERN (insn)) != CLOBBER
6219 && GET_CODE (PATTERN (insn)) != USE
6220 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6222 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6232 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6233 enum expand_modifier modifier, rtx *alt_rtl)
6236 tree type = TREE_TYPE (exp);
6238 enum machine_mode mode;
6239 enum tree_code code = TREE_CODE (exp);
6241 rtx subtarget, original_target;
6244 bool reduce_bit_field = false;
6245 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6246 ? reduce_to_bit_field_precision ((expr), \
6251 mode = TYPE_MODE (type);
6252 unsignedp = TYPE_UNSIGNED (type);
6253 if (lang_hooks.reduce_bit_field_operations
6254 && TREE_CODE (type) == INTEGER_TYPE
6255 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6257 /* An operation in what may be a bit-field type needs the
6258 result to be reduced to the precision of the bit-field type,
6259 which is narrower than that of the type's mode. */
6260 reduce_bit_field = true;
6261 if (modifier == EXPAND_STACK_PARM)
6265 /* Use subtarget as the target for operand 0 of a binary operation. */
6266 subtarget = get_subtarget (target);
6267 original_target = target;
6268 ignore = (target == const0_rtx
6269 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6270 || code == CONVERT_EXPR || code == COND_EXPR
6271 || code == VIEW_CONVERT_EXPR)
6272 && TREE_CODE (type) == VOID_TYPE));
6274 /* If we are going to ignore this result, we need only do something
6275 if there is a side-effect somewhere in the expression. If there
6276 is, short-circuit the most common cases here. Note that we must
6277 not call expand_expr with anything but const0_rtx in case this
6278 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6282 if (! TREE_SIDE_EFFECTS (exp))
6285 /* Ensure we reference a volatile object even if value is ignored, but
6286 don't do this if all we are doing is taking its address. */
6287 if (TREE_THIS_VOLATILE (exp)
6288 && TREE_CODE (exp) != FUNCTION_DECL
6289 && mode != VOIDmode && mode != BLKmode
6290 && modifier != EXPAND_CONST_ADDRESS)
6292 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6294 temp = copy_to_reg (temp);
6298 if (TREE_CODE_CLASS (code) == tcc_unary
6299 || code == COMPONENT_REF || code == INDIRECT_REF)
6300 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6303 else if (TREE_CODE_CLASS (code) == tcc_binary
6304 || TREE_CODE_CLASS (code) == tcc_comparison
6305 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6308 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6311 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6312 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6313 /* If the second operand has no side effects, just evaluate
6315 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6317 else if (code == BIT_FIELD_REF)
6319 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6320 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6321 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6328 /* If will do cse, generate all results into pseudo registers
6329 since 1) that allows cse to find more things
6330 and 2) otherwise cse could produce an insn the machine
6331 cannot support. An exception is a CONSTRUCTOR into a multi-word
6332 MEM: that's much more likely to be most efficient into the MEM.
6333 Another is a CALL_EXPR which must return in memory. */
6335 if (! cse_not_expected && mode != BLKmode && target
6336 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6337 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6338 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6345 tree function = decl_function_context (exp);
6347 temp = label_rtx (exp);
6348 temp = gen_rtx_LABEL_REF (Pmode, temp);
6350 if (function != current_function_decl
6352 LABEL_REF_NONLOCAL_P (temp) = 1;
6354 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6359 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6364 /* If a static var's type was incomplete when the decl was written,
6365 but the type is complete now, lay out the decl now. */
6366 if (DECL_SIZE (exp) == 0
6367 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6368 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6369 layout_decl (exp, 0);
6371 /* ... fall through ... */
6375 gcc_assert (DECL_RTL (exp));
6377 /* Ensure variable marked as used even if it doesn't go through
6378 a parser. If it hasn't be used yet, write out an external
6380 if (! TREE_USED (exp))
6382 assemble_external (exp);
6383 TREE_USED (exp) = 1;
6386 /* Show we haven't gotten RTL for this yet. */
6389 /* Variables inherited from containing functions should have
6390 been lowered by this point. */
6391 context = decl_function_context (exp);
6392 gcc_assert (!context
6393 || context == current_function_decl
6394 || TREE_STATIC (exp)
6395 /* ??? C++ creates functions that are not TREE_STATIC. */
6396 || TREE_CODE (exp) == FUNCTION_DECL);
6398 /* This is the case of an array whose size is to be determined
6399 from its initializer, while the initializer is still being parsed.
6402 if (MEM_P (DECL_RTL (exp))
6403 && REG_P (XEXP (DECL_RTL (exp), 0)))
6404 temp = validize_mem (DECL_RTL (exp));
6406 /* If DECL_RTL is memory, we are in the normal case and either
6407 the address is not valid or it is not a register and -fforce-addr
6408 is specified, get the address into a register. */
6410 else if (MEM_P (DECL_RTL (exp))
6411 && modifier != EXPAND_CONST_ADDRESS
6412 && modifier != EXPAND_SUM
6413 && modifier != EXPAND_INITIALIZER
6414 && (! memory_address_p (DECL_MODE (exp),
6415 XEXP (DECL_RTL (exp), 0))
6417 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6420 *alt_rtl = DECL_RTL (exp);
6421 temp = replace_equiv_address (DECL_RTL (exp),
6422 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6425 /* If we got something, return it. But first, set the alignment
6426 if the address is a register. */
6429 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6430 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6435 /* If the mode of DECL_RTL does not match that of the decl, it
6436 must be a promoted value. We return a SUBREG of the wanted mode,
6437 but mark it so that we know that it was already extended. */
6439 if (REG_P (DECL_RTL (exp))
6440 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6442 enum machine_mode pmode;
6444 /* Get the signedness used for this variable. Ensure we get the
6445 same mode we got when the variable was declared. */
6446 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6447 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6448 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6450 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6451 SUBREG_PROMOTED_VAR_P (temp) = 1;
6452 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6456 return DECL_RTL (exp);
6459 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6460 TREE_INT_CST_HIGH (exp), mode);
6462 /* ??? If overflow is set, fold will have done an incomplete job,
6463 which can result in (plus xx (const_int 0)), which can get
6464 simplified by validate_replace_rtx during virtual register
6465 instantiation, which can result in unrecognizable insns.
6466 Avoid this by forcing all overflows into registers. */
6467 if (TREE_CONSTANT_OVERFLOW (exp)
6468 && modifier != EXPAND_INITIALIZER)
6469 temp = force_reg (mode, temp);
6474 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6475 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6476 return const_vector_from_tree (exp);
6478 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6479 TREE_VECTOR_CST_ELTS (exp)),
6480 ignore ? const0_rtx : target, tmode, modifier);
6483 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6486 /* If optimized, generate immediate CONST_DOUBLE
6487 which will be turned into memory by reload if necessary.
6489 We used to force a register so that loop.c could see it. But
6490 this does not allow gen_* patterns to perform optimizations with
6491 the constants. It also produces two insns in cases like "x = 1.0;".
6492 On most machines, floating-point constants are not permitted in
6493 many insns, so we'd end up copying it to a register in any case.
6495 Now, we do the copying in expand_binop, if appropriate. */
6496 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6497 TYPE_MODE (TREE_TYPE (exp)));
6500 /* Handle evaluating a complex constant in a CONCAT target. */
6501 if (original_target && GET_CODE (original_target) == CONCAT)
6503 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6506 rtarg = XEXP (original_target, 0);
6507 itarg = XEXP (original_target, 1);
6509 /* Move the real and imaginary parts separately. */
6510 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6511 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6514 emit_move_insn (rtarg, op0);
6516 emit_move_insn (itarg, op1);
6518 return original_target;
6521 /* ... fall through ... */
6524 temp = output_constant_def (exp, 1);
6526 /* temp contains a constant address.
6527 On RISC machines where a constant address isn't valid,
6528 make some insns to get that address into a register. */
6529 if (modifier != EXPAND_CONST_ADDRESS
6530 && modifier != EXPAND_INITIALIZER
6531 && modifier != EXPAND_SUM
6532 && (! memory_address_p (mode, XEXP (temp, 0))
6533 || flag_force_addr))
6534 return replace_equiv_address (temp,
6535 copy_rtx (XEXP (temp, 0)));
6540 tree val = TREE_OPERAND (exp, 0);
6541 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6543 if (!SAVE_EXPR_RESOLVED_P (exp))
6545 /* We can indeed still hit this case, typically via builtin
6546 expanders calling save_expr immediately before expanding
6547 something. Assume this means that we only have to deal
6548 with non-BLKmode values. */
6549 gcc_assert (GET_MODE (ret) != BLKmode);
6551 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6552 DECL_ARTIFICIAL (val) = 1;
6553 DECL_IGNORED_P (val) = 1;
6554 TREE_OPERAND (exp, 0) = val;
6555 SAVE_EXPR_RESOLVED_P (exp) = 1;
6557 if (!CONSTANT_P (ret))
6558 ret = copy_to_reg (ret);
6559 SET_DECL_RTL (val, ret);
6566 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6567 expand_goto (TREE_OPERAND (exp, 0));
6569 expand_computed_goto (TREE_OPERAND (exp, 0));
6573 /* If we don't need the result, just ensure we evaluate any
6579 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6580 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6585 /* All elts simple constants => refer to a constant in memory. But
6586 if this is a non-BLKmode mode, let it store a field at a time
6587 since that should make a CONST_INT or CONST_DOUBLE when we
6588 fold. Likewise, if we have a target we can use, it is best to
6589 store directly into the target unless the type is large enough
6590 that memcpy will be used. If we are making an initializer and
6591 all operands are constant, put it in memory as well.
6593 FIXME: Avoid trying to fill vector constructors piece-meal.
6594 Output them with output_constant_def below unless we're sure
6595 they're zeros. This should go away when vector initializers
6596 are treated like VECTOR_CST instead of arrays.
6598 else if ((TREE_STATIC (exp)
6599 && ((mode == BLKmode
6600 && ! (target != 0 && safe_from_p (target, exp, 1)))
6601 || TREE_ADDRESSABLE (exp)
6602 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6603 && (! MOVE_BY_PIECES_P
6604 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6606 && ! mostly_zeros_p (exp))))
6607 || ((modifier == EXPAND_INITIALIZER
6608 || modifier == EXPAND_CONST_ADDRESS)
6609 && TREE_CONSTANT (exp)))
6611 rtx constructor = output_constant_def (exp, 1);
6613 if (modifier != EXPAND_CONST_ADDRESS
6614 && modifier != EXPAND_INITIALIZER
6615 && modifier != EXPAND_SUM)
6616 constructor = validize_mem (constructor);
6622 /* Handle calls that pass values in multiple non-contiguous
6623 locations. The Irix 6 ABI has examples of this. */
6624 if (target == 0 || ! safe_from_p (target, exp, 1)
6625 || GET_CODE (target) == PARALLEL
6626 || modifier == EXPAND_STACK_PARM)
6628 = assign_temp (build_qualified_type (type,
6630 | (TREE_READONLY (exp)
6631 * TYPE_QUAL_CONST))),
6632 0, TREE_ADDRESSABLE (exp), 1);
6634 store_constructor (exp, target, 0, int_expr_size (exp));
6638 case MISALIGNED_INDIRECT_REF:
6639 case ALIGN_INDIRECT_REF:
6642 tree exp1 = TREE_OPERAND (exp, 0);
6645 if (code == MISALIGNED_INDIRECT_REF
6646 && !targetm.vectorize.misaligned_mem_ok (mode))
6649 if (modifier != EXPAND_WRITE)
6653 t = fold_read_from_constant_string (exp);
6655 return expand_expr (t, target, tmode, modifier);
6658 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6659 op0 = memory_address (mode, op0);
6661 if (code == ALIGN_INDIRECT_REF)
6663 int align = TYPE_ALIGN_UNIT (type);
6664 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6665 op0 = memory_address (mode, op0);
6668 temp = gen_rtx_MEM (mode, op0);
6670 orig = REF_ORIGINAL (exp);
6673 set_mem_attributes (temp, orig, 0);
6681 tree array = TREE_OPERAND (exp, 0);
6682 tree low_bound = array_ref_low_bound (exp);
6683 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6686 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6688 /* Optimize the special-case of a zero lower bound.
6690 We convert the low_bound to sizetype to avoid some problems
6691 with constant folding. (E.g. suppose the lower bound is 1,
6692 and its mode is QI. Without the conversion, (ARRAY
6693 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6694 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6696 if (! integer_zerop (low_bound))
6697 index = size_diffop (index, convert (sizetype, low_bound));
6699 /* Fold an expression like: "foo"[2].
6700 This is not done in fold so it won't happen inside &.
6701 Don't fold if this is for wide characters since it's too
6702 difficult to do correctly and this is a very rare case. */
6704 if (modifier != EXPAND_CONST_ADDRESS
6705 && modifier != EXPAND_INITIALIZER
6706 && modifier != EXPAND_MEMORY)
6708 tree t = fold_read_from_constant_string (exp);
6711 return expand_expr (t, target, tmode, modifier);
6714 /* If this is a constant index into a constant array,
6715 just get the value from the array. Handle both the cases when
6716 we have an explicit constructor and when our operand is a variable
6717 that was declared const. */
6719 if (modifier != EXPAND_CONST_ADDRESS
6720 && modifier != EXPAND_INITIALIZER
6721 && modifier != EXPAND_MEMORY
6722 && TREE_CODE (array) == CONSTRUCTOR
6723 && ! TREE_SIDE_EFFECTS (array)
6724 && TREE_CODE (index) == INTEGER_CST
6725 && 0 > compare_tree_int (index,
6726 list_length (CONSTRUCTOR_ELTS
6727 (TREE_OPERAND (exp, 0)))))
6731 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6732 i = TREE_INT_CST_LOW (index);
6733 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6737 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6741 else if (optimize >= 1
6742 && modifier != EXPAND_CONST_ADDRESS
6743 && modifier != EXPAND_INITIALIZER
6744 && modifier != EXPAND_MEMORY
6745 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6746 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6747 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6748 && targetm.binds_local_p (array))
6750 if (TREE_CODE (index) == INTEGER_CST)
6752 tree init = DECL_INITIAL (array);
6754 if (TREE_CODE (init) == CONSTRUCTOR)
6758 for (elem = CONSTRUCTOR_ELTS (init);
6760 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6761 elem = TREE_CHAIN (elem))
6764 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6765 return expand_expr (fold (TREE_VALUE (elem)), target,
6768 else if (TREE_CODE (init) == STRING_CST
6769 && 0 > compare_tree_int (index,
6770 TREE_STRING_LENGTH (init)))
6772 tree type = TREE_TYPE (TREE_TYPE (init));
6773 enum machine_mode mode = TYPE_MODE (type);
6775 if (GET_MODE_CLASS (mode) == MODE_INT
6776 && GET_MODE_SIZE (mode) == 1)
6777 return gen_int_mode (TREE_STRING_POINTER (init)
6778 [TREE_INT_CST_LOW (index)], mode);
6783 goto normal_inner_ref;
6786 /* If the operand is a CONSTRUCTOR, we can just extract the
6787 appropriate field if it is present. */
6788 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6792 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6793 elt = TREE_CHAIN (elt))
6794 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6795 /* We can normally use the value of the field in the
6796 CONSTRUCTOR. However, if this is a bitfield in
6797 an integral mode that we can fit in a HOST_WIDE_INT,
6798 we must mask only the number of bits in the bitfield,
6799 since this is done implicitly by the constructor. If
6800 the bitfield does not meet either of those conditions,
6801 we can't do this optimization. */
6802 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6803 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6805 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6806 <= HOST_BITS_PER_WIDE_INT))))
6808 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6809 && modifier == EXPAND_STACK_PARM)
6811 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6812 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6814 HOST_WIDE_INT bitsize
6815 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6816 enum machine_mode imode
6817 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6819 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6821 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6822 op0 = expand_and (imode, op0, op1, target);
6827 = build_int_cst (NULL_TREE,
6828 GET_MODE_BITSIZE (imode) - bitsize);
6830 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6832 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6840 goto normal_inner_ref;
6843 case ARRAY_RANGE_REF:
6846 enum machine_mode mode1;
6847 HOST_WIDE_INT bitsize, bitpos;
6850 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6851 &mode1, &unsignedp, &volatilep);
6854 /* If we got back the original object, something is wrong. Perhaps
6855 we are evaluating an expression too early. In any event, don't
6856 infinitely recurse. */
6857 gcc_assert (tem != exp);
6859 /* If TEM's type is a union of variable size, pass TARGET to the inner
6860 computation, since it will need a temporary and TARGET is known
6861 to have to do. This occurs in unchecked conversion in Ada. */
6865 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6866 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6868 && modifier != EXPAND_STACK_PARM
6869 ? target : NULL_RTX),
6871 (modifier == EXPAND_INITIALIZER
6872 || modifier == EXPAND_CONST_ADDRESS
6873 || modifier == EXPAND_STACK_PARM)
6874 ? modifier : EXPAND_NORMAL);
6876 /* If this is a constant, put it into a register if it is a
6877 legitimate constant and OFFSET is 0 and memory if it isn't. */
6878 if (CONSTANT_P (op0))
6880 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6881 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6883 op0 = force_reg (mode, op0);
6885 op0 = validize_mem (force_const_mem (mode, op0));
6888 /* Otherwise, if this object not in memory and we either have an
6889 offset or a BLKmode result, put it there. This case can't occur in
6890 C, but can in Ada if we have unchecked conversion of an expression
6891 from a scalar type to an array or record type or for an
6892 ARRAY_RANGE_REF whose type is BLKmode. */
6893 else if (!MEM_P (op0)
6895 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6897 tree nt = build_qualified_type (TREE_TYPE (tem),
6898 (TYPE_QUALS (TREE_TYPE (tem))
6899 | TYPE_QUAL_CONST));
6900 rtx memloc = assign_temp (nt, 1, 1, 1);
6902 emit_move_insn (memloc, op0);
6908 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6911 gcc_assert (MEM_P (op0));
6913 #ifdef POINTERS_EXTEND_UNSIGNED
6914 if (GET_MODE (offset_rtx) != Pmode)
6915 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6917 if (GET_MODE (offset_rtx) != ptr_mode)
6918 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6921 if (GET_MODE (op0) == BLKmode
6922 /* A constant address in OP0 can have VOIDmode, we must
6923 not try to call force_reg in that case. */
6924 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6926 && (bitpos % bitsize) == 0
6927 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6928 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6930 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6934 op0 = offset_address (op0, offset_rtx,
6935 highest_pow2_factor (offset));
6938 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6939 record its alignment as BIGGEST_ALIGNMENT. */
6940 if (MEM_P (op0) && bitpos == 0 && offset != 0
6941 && is_aligning_offset (offset, tem))
6942 set_mem_align (op0, BIGGEST_ALIGNMENT);
6944 /* Don't forget about volatility even if this is a bitfield. */
6945 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6947 if (op0 == orig_op0)
6948 op0 = copy_rtx (op0);
6950 MEM_VOLATILE_P (op0) = 1;
6953 /* The following code doesn't handle CONCAT.
6954 Assume only bitpos == 0 can be used for CONCAT, due to
6955 one element arrays having the same mode as its element. */
6956 if (GET_CODE (op0) == CONCAT)
6958 gcc_assert (bitpos == 0
6959 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6963 /* In cases where an aligned union has an unaligned object
6964 as a field, we might be extracting a BLKmode value from
6965 an integer-mode (e.g., SImode) object. Handle this case
6966 by doing the extract into an object as wide as the field
6967 (which we know to be the width of a basic mode), then
6968 storing into memory, and changing the mode to BLKmode. */
6969 if (mode1 == VOIDmode
6970 || REG_P (op0) || GET_CODE (op0) == SUBREG
6971 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6972 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6973 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6974 && modifier != EXPAND_CONST_ADDRESS
6975 && modifier != EXPAND_INITIALIZER)
6976 /* If the field isn't aligned enough to fetch as a memref,
6977 fetch it as a bit field. */
6978 || (mode1 != BLKmode
6979 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6980 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6982 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6983 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6984 && ((modifier == EXPAND_CONST_ADDRESS
6985 || modifier == EXPAND_INITIALIZER)
6987 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6988 || (bitpos % BITS_PER_UNIT != 0)))
6989 /* If the type and the field are a constant size and the
6990 size of the type isn't the same size as the bitfield,
6991 we must use bitfield operations. */
6993 && TYPE_SIZE (TREE_TYPE (exp))
6994 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6995 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6998 enum machine_mode ext_mode = mode;
7000 if (ext_mode == BLKmode
7001 && ! (target != 0 && MEM_P (op0)
7003 && bitpos % BITS_PER_UNIT == 0))
7004 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7006 if (ext_mode == BLKmode)
7009 target = assign_temp (type, 0, 1, 1);
7014 /* In this case, BITPOS must start at a byte boundary and
7015 TARGET, if specified, must be a MEM. */
7016 gcc_assert (MEM_P (op0)
7017 && (!target || MEM_P (target))
7018 && !(bitpos % BITS_PER_UNIT));
7020 emit_block_move (target,
7021 adjust_address (op0, VOIDmode,
7022 bitpos / BITS_PER_UNIT),
7023 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7025 (modifier == EXPAND_STACK_PARM
7026 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7031 op0 = validize_mem (op0);
7033 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7034 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7036 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7037 (modifier == EXPAND_STACK_PARM
7038 ? NULL_RTX : target),
7039 ext_mode, ext_mode);
7041 /* If the result is a record type and BITSIZE is narrower than
7042 the mode of OP0, an integral mode, and this is a big endian
7043 machine, we must put the field into the high-order bits. */
7044 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7045 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7046 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7047 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7048 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7052 /* If the result type is BLKmode, store the data into a temporary
7053 of the appropriate type, but with the mode corresponding to the
7054 mode for the data we have (op0's mode). It's tempting to make
7055 this a constant type, since we know it's only being stored once,
7056 but that can cause problems if we are taking the address of this
7057 COMPONENT_REF because the MEM of any reference via that address
7058 will have flags corresponding to the type, which will not
7059 necessarily be constant. */
7060 if (mode == BLKmode)
7063 = assign_stack_temp_for_type
7064 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7066 emit_move_insn (new, op0);
7067 op0 = copy_rtx (new);
7068 PUT_MODE (op0, BLKmode);
7069 set_mem_attributes (op0, exp, 1);
7075 /* If the result is BLKmode, use that to access the object
7077 if (mode == BLKmode)
7080 /* Get a reference to just this component. */
7081 if (modifier == EXPAND_CONST_ADDRESS
7082 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7083 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7085 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7087 if (op0 == orig_op0)
7088 op0 = copy_rtx (op0);
7090 set_mem_attributes (op0, exp, 0);
7091 if (REG_P (XEXP (op0, 0)))
7092 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7094 MEM_VOLATILE_P (op0) |= volatilep;
7095 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7096 || modifier == EXPAND_CONST_ADDRESS
7097 || modifier == EXPAND_INITIALIZER)
7099 else if (target == 0)
7100 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7102 convert_move (target, op0, unsignedp);
7107 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7110 /* Check for a built-in function. */
7111 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7112 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7114 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7116 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7117 == BUILT_IN_FRONTEND)
7118 return lang_hooks.expand_expr (exp, original_target,
7122 return expand_builtin (exp, target, subtarget, tmode, ignore);
7125 return expand_call (exp, target, ignore);
7127 case NON_LVALUE_EXPR:
7130 if (TREE_OPERAND (exp, 0) == error_mark_node)
7133 if (TREE_CODE (type) == UNION_TYPE)
7135 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7137 /* If both input and output are BLKmode, this conversion isn't doing
7138 anything except possibly changing memory attribute. */
7139 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7141 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7144 result = copy_rtx (result);
7145 set_mem_attributes (result, exp, 0);
7151 if (TYPE_MODE (type) != BLKmode)
7152 target = gen_reg_rtx (TYPE_MODE (type));
7154 target = assign_temp (type, 0, 1, 1);
7158 /* Store data into beginning of memory target. */
7159 store_expr (TREE_OPERAND (exp, 0),
7160 adjust_address (target, TYPE_MODE (valtype), 0),
7161 modifier == EXPAND_STACK_PARM);
7165 gcc_assert (REG_P (target));
7167 /* Store this field into a union of the proper type. */
7168 store_field (target,
7169 MIN ((int_size_in_bytes (TREE_TYPE
7170 (TREE_OPERAND (exp, 0)))
7172 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7173 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7177 /* Return the entire union. */
7181 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7183 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7186 /* If the signedness of the conversion differs and OP0 is
7187 a promoted SUBREG, clear that indication since we now
7188 have to do the proper extension. */
7189 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7190 && GET_CODE (op0) == SUBREG)
7191 SUBREG_PROMOTED_VAR_P (op0) = 0;
7193 return REDUCE_BIT_FIELD (op0);
7196 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7197 op0 = REDUCE_BIT_FIELD (op0);
7198 if (GET_MODE (op0) == mode)
7201 /* If OP0 is a constant, just convert it into the proper mode. */
7202 if (CONSTANT_P (op0))
7204 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7205 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7207 if (modifier == EXPAND_INITIALIZER)
7208 return simplify_gen_subreg (mode, op0, inner_mode,
7209 subreg_lowpart_offset (mode,
7212 return convert_modes (mode, inner_mode, op0,
7213 TYPE_UNSIGNED (inner_type));
7216 if (modifier == EXPAND_INITIALIZER)
7217 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7221 convert_to_mode (mode, op0,
7222 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7224 convert_move (target, op0,
7225 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7228 case VIEW_CONVERT_EXPR:
7229 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7231 /* If the input and output modes are both the same, we are done.
7232 Otherwise, if neither mode is BLKmode and both are integral and within
7233 a word, we can use gen_lowpart. If neither is true, make sure the
7234 operand is in memory and convert the MEM to the new mode. */
7235 if (TYPE_MODE (type) == GET_MODE (op0))
7237 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7238 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7239 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7240 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7241 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7242 op0 = gen_lowpart (TYPE_MODE (type), op0);
7243 else if (!MEM_P (op0))
7245 /* If the operand is not a MEM, force it into memory. Since we
7246 are going to be be changing the mode of the MEM, don't call
7247 force_const_mem for constants because we don't allow pool
7248 constants to change mode. */
7249 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7251 gcc_assert (!TREE_ADDRESSABLE (exp));
7253 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7255 = assign_stack_temp_for_type
7256 (TYPE_MODE (inner_type),
7257 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7259 emit_move_insn (target, op0);
7263 /* At this point, OP0 is in the correct mode. If the output type is such
7264 that the operand is known to be aligned, indicate that it is.
7265 Otherwise, we need only be concerned about alignment for non-BLKmode
7269 op0 = copy_rtx (op0);
7271 if (TYPE_ALIGN_OK (type))
7272 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7273 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7274 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7276 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7277 HOST_WIDE_INT temp_size
7278 = MAX (int_size_in_bytes (inner_type),
7279 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7280 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7281 temp_size, 0, type);
7282 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7284 gcc_assert (!TREE_ADDRESSABLE (exp));
7286 if (GET_MODE (op0) == BLKmode)
7287 emit_block_move (new_with_op0_mode, op0,
7288 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7289 (modifier == EXPAND_STACK_PARM
7290 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7292 emit_move_insn (new_with_op0_mode, op0);
7297 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7303 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7304 something else, make sure we add the register to the constant and
7305 then to the other thing. This case can occur during strength
7306 reduction and doing it this way will produce better code if the
7307 frame pointer or argument pointer is eliminated.
7309 fold-const.c will ensure that the constant is always in the inner
7310 PLUS_EXPR, so the only case we need to do anything about is if
7311 sp, ap, or fp is our second argument, in which case we must swap
7312 the innermost first argument and our second argument. */
7314 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7315 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7316 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7317 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7318 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7319 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7321 tree t = TREE_OPERAND (exp, 1);
7323 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7324 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7327 /* If the result is to be ptr_mode and we are adding an integer to
7328 something, we might be forming a constant. So try to use
7329 plus_constant. If it produces a sum and we can't accept it,
7330 use force_operand. This allows P = &ARR[const] to generate
7331 efficient code on machines where a SYMBOL_REF is not a valid
7334 If this is an EXPAND_SUM call, always return the sum. */
7335 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7336 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7338 if (modifier == EXPAND_STACK_PARM)
7340 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7342 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7346 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7348 /* Use immed_double_const to ensure that the constant is
7349 truncated according to the mode of OP1, then sign extended
7350 to a HOST_WIDE_INT. Using the constant directly can result
7351 in non-canonical RTL in a 64x32 cross compile. */
7353 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7355 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7356 op1 = plus_constant (op1, INTVAL (constant_part));
7357 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7358 op1 = force_operand (op1, target);
7359 return REDUCE_BIT_FIELD (op1);
7362 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7363 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7364 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7369 (modifier == EXPAND_INITIALIZER
7370 ? EXPAND_INITIALIZER : EXPAND_SUM));
7371 if (! CONSTANT_P (op0))
7373 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7374 VOIDmode, modifier);
7375 /* Return a PLUS if modifier says it's OK. */
7376 if (modifier == EXPAND_SUM
7377 || modifier == EXPAND_INITIALIZER)
7378 return simplify_gen_binary (PLUS, mode, op0, op1);
7381 /* Use immed_double_const to ensure that the constant is
7382 truncated according to the mode of OP1, then sign extended
7383 to a HOST_WIDE_INT. Using the constant directly can result
7384 in non-canonical RTL in a 64x32 cross compile. */
7386 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7388 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7389 op0 = plus_constant (op0, INTVAL (constant_part));
7390 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7391 op0 = force_operand (op0, target);
7392 return REDUCE_BIT_FIELD (op0);
7396 /* No sense saving up arithmetic to be done
7397 if it's all in the wrong mode to form part of an address.
7398 And force_operand won't know whether to sign-extend or
7400 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7401 || mode != ptr_mode)
7403 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7404 subtarget, &op0, &op1, 0);
7405 if (op0 == const0_rtx)
7407 if (op1 == const0_rtx)
7412 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7413 subtarget, &op0, &op1, modifier);
7414 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7417 /* For initializers, we are allowed to return a MINUS of two
7418 symbolic constants. Here we handle all cases when both operands
7420 /* Handle difference of two symbolic constants,
7421 for the sake of an initializer. */
7422 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7423 && really_constant_p (TREE_OPERAND (exp, 0))
7424 && really_constant_p (TREE_OPERAND (exp, 1)))
7426 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7427 NULL_RTX, &op0, &op1, modifier);
7429 /* If the last operand is a CONST_INT, use plus_constant of
7430 the negated constant. Else make the MINUS. */
7431 if (GET_CODE (op1) == CONST_INT)
7432 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7434 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7437 /* No sense saving up arithmetic to be done
7438 if it's all in the wrong mode to form part of an address.
7439 And force_operand won't know whether to sign-extend or
7441 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7442 || mode != ptr_mode)
7445 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7446 subtarget, &op0, &op1, modifier);
7448 /* Convert A - const to A + (-const). */
7449 if (GET_CODE (op1) == CONST_INT)
7451 op1 = negate_rtx (mode, op1);
7452 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7458 /* If first operand is constant, swap them.
7459 Thus the following special case checks need only
7460 check the second operand. */
7461 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7463 tree t1 = TREE_OPERAND (exp, 0);
7464 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7465 TREE_OPERAND (exp, 1) = t1;
7468 /* Attempt to return something suitable for generating an
7469 indexed address, for machines that support that. */
7471 if (modifier == EXPAND_SUM && mode == ptr_mode
7472 && host_integerp (TREE_OPERAND (exp, 1), 0))
7474 tree exp1 = TREE_OPERAND (exp, 1);
7476 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7480 op0 = force_operand (op0, NULL_RTX);
7482 op0 = copy_to_mode_reg (mode, op0);
7484 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7485 gen_int_mode (tree_low_cst (exp1, 0),
7486 TYPE_MODE (TREE_TYPE (exp1)))));
7489 if (modifier == EXPAND_STACK_PARM)
7492 /* Check for multiplying things that have been extended
7493 from a narrower type. If this machine supports multiplying
7494 in that narrower type with a result in the desired type,
7495 do it that way, and avoid the explicit type-conversion. */
7496 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7497 && TREE_CODE (type) == INTEGER_TYPE
7498 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7499 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7500 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7501 && int_fits_type_p (TREE_OPERAND (exp, 1),
7502 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7503 /* Don't use a widening multiply if a shift will do. */
7504 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7505 > HOST_BITS_PER_WIDE_INT)
7506 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7508 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7509 && (TYPE_PRECISION (TREE_TYPE
7510 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7511 == TYPE_PRECISION (TREE_TYPE
7513 (TREE_OPERAND (exp, 0), 0))))
7514 /* If both operands are extended, they must either both
7515 be zero-extended or both be sign-extended. */
7516 && (TYPE_UNSIGNED (TREE_TYPE
7517 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7518 == TYPE_UNSIGNED (TREE_TYPE
7520 (TREE_OPERAND (exp, 0), 0)))))))
7522 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7523 enum machine_mode innermode = TYPE_MODE (op0type);
7524 bool zextend_p = TYPE_UNSIGNED (op0type);
7525 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7526 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7528 if (mode == GET_MODE_WIDER_MODE (innermode))
7530 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7532 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7533 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7534 TREE_OPERAND (exp, 1),
7535 NULL_RTX, &op0, &op1, 0);
7537 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7538 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7539 NULL_RTX, &op0, &op1, 0);
7542 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7543 && innermode == word_mode)
7546 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7547 NULL_RTX, VOIDmode, 0);
7548 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7549 op1 = convert_modes (innermode, mode,
7550 expand_expr (TREE_OPERAND (exp, 1),
7551 NULL_RTX, VOIDmode, 0),
7554 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7555 NULL_RTX, VOIDmode, 0);
7556 temp = expand_binop (mode, other_optab, op0, op1, target,
7557 unsignedp, OPTAB_LIB_WIDEN);
7558 hipart = gen_highpart (innermode, temp);
7559 htem = expand_mult_highpart_adjust (innermode, hipart,
7563 emit_move_insn (hipart, htem);
7564 return REDUCE_BIT_FIELD (temp);
7568 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7569 subtarget, &op0, &op1, 0);
7570 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7572 case TRUNC_DIV_EXPR:
7573 case FLOOR_DIV_EXPR:
7575 case ROUND_DIV_EXPR:
7576 case EXACT_DIV_EXPR:
7577 if (modifier == EXPAND_STACK_PARM)
7579 /* Possible optimization: compute the dividend with EXPAND_SUM
7580 then if the divisor is constant can optimize the case
7581 where some terms of the dividend have coeffs divisible by it. */
7582 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7583 subtarget, &op0, &op1, 0);
7584 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7587 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7588 expensive divide. If not, combine will rebuild the original
7590 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7591 && TREE_CODE (type) == REAL_TYPE
7592 && !real_onep (TREE_OPERAND (exp, 0)))
7593 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7594 build2 (RDIV_EXPR, type,
7595 build_real (type, dconst1),
7596 TREE_OPERAND (exp, 1))),
7597 target, tmode, modifier);
7601 case TRUNC_MOD_EXPR:
7602 case FLOOR_MOD_EXPR:
7604 case ROUND_MOD_EXPR:
7605 if (modifier == EXPAND_STACK_PARM)
7607 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7608 subtarget, &op0, &op1, 0);
7609 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7611 case FIX_ROUND_EXPR:
7612 case FIX_FLOOR_EXPR:
7614 gcc_unreachable (); /* Not used for C. */
7616 case FIX_TRUNC_EXPR:
7617 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7618 if (target == 0 || modifier == EXPAND_STACK_PARM)
7619 target = gen_reg_rtx (mode);
7620 expand_fix (target, op0, unsignedp);
7624 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7625 if (target == 0 || modifier == EXPAND_STACK_PARM)
7626 target = gen_reg_rtx (mode);
7627 /* expand_float can't figure out what to do if FROM has VOIDmode.
7628 So give it the correct mode. With -O, cse will optimize this. */
7629 if (GET_MODE (op0) == VOIDmode)
7630 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7632 expand_float (target, op0,
7633 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7637 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7638 if (modifier == EXPAND_STACK_PARM)
7640 temp = expand_unop (mode,
7641 optab_for_tree_code (NEGATE_EXPR, type),
7644 return REDUCE_BIT_FIELD (temp);
7647 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7648 if (modifier == EXPAND_STACK_PARM)
7651 /* ABS_EXPR is not valid for complex arguments. */
7652 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7653 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7655 /* Unsigned abs is simply the operand. Testing here means we don't
7656 risk generating incorrect code below. */
7657 if (TYPE_UNSIGNED (type))
7660 return expand_abs (mode, op0, target, unsignedp,
7661 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7665 target = original_target;
7667 || modifier == EXPAND_STACK_PARM
7668 || (MEM_P (target) && MEM_VOLATILE_P (target))
7669 || GET_MODE (target) != mode
7671 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7672 target = gen_reg_rtx (mode);
7673 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7674 target, &op0, &op1, 0);
7676 /* First try to do it with a special MIN or MAX instruction.
7677 If that does not win, use a conditional jump to select the proper
7679 this_optab = optab_for_tree_code (code, type);
7680 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7685 /* At this point, a MEM target is no longer useful; we will get better
7689 target = gen_reg_rtx (mode);
7691 /* If op1 was placed in target, swap op0 and op1. */
7692 if (target != op0 && target == op1)
7700 emit_move_insn (target, op0);
7702 op0 = gen_label_rtx ();
7704 /* If this mode is an integer too wide to compare properly,
7705 compare word by word. Rely on cse to optimize constant cases. */
7706 if (GET_MODE_CLASS (mode) == MODE_INT
7707 && ! can_compare_p (GE, mode, ccp_jump))
7709 if (code == MAX_EXPR)
7710 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7713 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7718 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7719 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7721 emit_move_insn (target, op1);
7726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7727 if (modifier == EXPAND_STACK_PARM)
7729 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7733 /* ??? Can optimize bitwise operations with one arg constant.
7734 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7735 and (a bitwise1 b) bitwise2 b (etc)
7736 but that is probably not worth while. */
7738 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7739 boolean values when we want in all cases to compute both of them. In
7740 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7741 as actual zero-or-1 values and then bitwise anding. In cases where
7742 there cannot be any side effects, better code would be made by
7743 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7744 how to recognize those cases. */
7746 case TRUTH_AND_EXPR:
7747 code = BIT_AND_EXPR;
7752 code = BIT_IOR_EXPR;
7756 case TRUTH_XOR_EXPR:
7757 code = BIT_XOR_EXPR;
7765 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7767 if (modifier == EXPAND_STACK_PARM)
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7770 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7773 /* Could determine the answer when only additive constants differ. Also,
7774 the addition of one can be handled by changing the condition. */
7781 case UNORDERED_EXPR:
7789 temp = do_store_flag (exp,
7790 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7791 tmode != VOIDmode ? tmode : mode, 0);
7795 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7796 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7798 && REG_P (original_target)
7799 && (GET_MODE (original_target)
7800 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7802 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7805 /* If temp is constant, we can just compute the result. */
7806 if (GET_CODE (temp) == CONST_INT)
7808 if (INTVAL (temp) != 0)
7809 emit_move_insn (target, const1_rtx);
7811 emit_move_insn (target, const0_rtx);
7816 if (temp != original_target)
7818 enum machine_mode mode1 = GET_MODE (temp);
7819 if (mode1 == VOIDmode)
7820 mode1 = tmode != VOIDmode ? tmode : mode;
7822 temp = copy_to_mode_reg (mode1, temp);
7825 op1 = gen_label_rtx ();
7826 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7827 GET_MODE (temp), unsignedp, op1);
7828 emit_move_insn (temp, const1_rtx);
7833 /* If no set-flag instruction, must generate a conditional store
7834 into a temporary variable. Drop through and handle this
7839 || modifier == EXPAND_STACK_PARM
7840 || ! safe_from_p (target, exp, 1)
7841 /* Make sure we don't have a hard reg (such as function's return
7842 value) live across basic blocks, if not optimizing. */
7843 || (!optimize && REG_P (target)
7844 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7845 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7848 emit_move_insn (target, const0_rtx);
7850 op1 = gen_label_rtx ();
7851 jumpifnot (exp, op1);
7854 emit_move_insn (target, const1_rtx);
7857 return ignore ? const0_rtx : target;
7859 case TRUTH_NOT_EXPR:
7860 if (modifier == EXPAND_STACK_PARM)
7862 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7863 /* The parser is careful to generate TRUTH_NOT_EXPR
7864 only with operands that are always zero or one. */
7865 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7866 target, 1, OPTAB_LIB_WIDEN);
7870 case STATEMENT_LIST:
7872 tree_stmt_iterator iter;
7874 gcc_assert (ignore);
7876 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7877 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7882 /* If it's void, we don't need to worry about computing a value. */
7883 if (VOID_TYPE_P (TREE_TYPE (exp)))
7885 tree pred = TREE_OPERAND (exp, 0);
7886 tree then_ = TREE_OPERAND (exp, 1);
7887 tree else_ = TREE_OPERAND (exp, 2);
7889 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7890 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7891 && TREE_CODE (else_) == GOTO_EXPR
7892 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7894 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7895 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7898 /* Note that COND_EXPRs whose type is a structure or union
7899 are required to be constructed to contain assignments of
7900 a temporary variable, so that we can evaluate them here
7901 for side effect only. If type is void, we must do likewise. */
7903 gcc_assert (!TREE_ADDRESSABLE (type)
7905 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7906 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7908 /* If we are not to produce a result, we have no target. Otherwise,
7909 if a target was specified use it; it will not be used as an
7910 intermediate target unless it is safe. If no target, use a
7913 if (modifier != EXPAND_STACK_PARM
7915 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7916 && GET_MODE (original_target) == mode
7917 #ifdef HAVE_conditional_move
7918 && (! can_conditionally_move_p (mode)
7919 || REG_P (original_target))
7921 && !MEM_P (original_target))
7922 temp = original_target;
7924 temp = assign_temp (type, 0, 0, 1);
7926 do_pending_stack_adjust ();
7928 op0 = gen_label_rtx ();
7929 op1 = gen_label_rtx ();
7930 jumpifnot (TREE_OPERAND (exp, 0), op0);
7931 store_expr (TREE_OPERAND (exp, 1), temp,
7932 modifier == EXPAND_STACK_PARM);
7934 emit_jump_insn (gen_jump (op1));
7937 store_expr (TREE_OPERAND (exp, 2), temp,
7938 modifier == EXPAND_STACK_PARM);
7945 target = expand_vec_cond_expr (exp, target);
7950 tree lhs = TREE_OPERAND (exp, 0);
7951 tree rhs = TREE_OPERAND (exp, 1);
7953 gcc_assert (ignore);
7955 /* Check for |= or &= of a bitfield of size one into another bitfield
7956 of size 1. In this case, (unless we need the result of the
7957 assignment) we can do this more efficiently with a
7958 test followed by an assignment, if necessary.
7960 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7961 things change so we do, this code should be enhanced to
7963 if (TREE_CODE (lhs) == COMPONENT_REF
7964 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7965 || TREE_CODE (rhs) == BIT_AND_EXPR)
7966 && TREE_OPERAND (rhs, 0) == lhs
7967 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7968 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7969 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7971 rtx label = gen_label_rtx ();
7973 do_jump (TREE_OPERAND (rhs, 1),
7974 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7975 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7976 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7977 (TREE_CODE (rhs) == BIT_IOR_EXPR
7979 : integer_zero_node)));
7980 do_pending_stack_adjust ();
7985 expand_assignment (lhs, rhs);
7991 if (!TREE_OPERAND (exp, 0))
7992 expand_null_return ();
7994 expand_return (TREE_OPERAND (exp, 0));
7998 return expand_expr_addr_expr (exp, target, tmode, modifier);
8000 /* COMPLEX type for Extended Pascal & Fortran */
8003 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8006 /* Get the rtx code of the operands. */
8007 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8008 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8011 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8015 /* Move the real (op0) and imaginary (op1) parts to their location. */
8016 emit_move_insn (gen_realpart (mode, target), op0);
8017 emit_move_insn (gen_imagpart (mode, target), op1);
8019 insns = get_insns ();
8022 /* Complex construction should appear as a single unit. */
8023 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8024 each with a separate pseudo as destination.
8025 It's not correct for flow to treat them as a unit. */
8026 if (GET_CODE (target) != CONCAT)
8027 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8036 return gen_realpart (mode, op0);
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8040 return gen_imagpart (mode, op0);
8043 expand_resx_expr (exp);
8046 case TRY_CATCH_EXPR:
8048 case EH_FILTER_EXPR:
8049 case TRY_FINALLY_EXPR:
8050 /* Lowered by tree-eh.c. */
8053 case WITH_CLEANUP_EXPR:
8054 case CLEANUP_POINT_EXPR:
8056 case CASE_LABEL_EXPR:
8062 case PREINCREMENT_EXPR:
8063 case PREDECREMENT_EXPR:
8064 case POSTINCREMENT_EXPR:
8065 case POSTDECREMENT_EXPR:
8068 case LABELED_BLOCK_EXPR:
8069 case EXIT_BLOCK_EXPR:
8070 case TRUTH_ANDIF_EXPR:
8071 case TRUTH_ORIF_EXPR:
8072 /* Lowered by gimplify.c. */
8076 return get_exception_pointer (cfun);
8079 return get_exception_filter (cfun);
8082 /* Function descriptors are not valid except for as
8083 initialization constants, and should not be expanded. */
8091 expand_label (TREE_OPERAND (exp, 0));
8095 expand_asm_expr (exp);
8098 case WITH_SIZE_EXPR:
8099 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8100 have pulled out the size to use in whatever context it needed. */
8101 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8104 case REALIGN_LOAD_EXPR:
8106 tree oprnd0 = TREE_OPERAND (exp, 0);
8107 tree oprnd1 = TREE_OPERAND (exp, 1);
8108 tree oprnd2 = TREE_OPERAND (exp, 2);
8111 this_optab = optab_for_tree_code (code, type);
8112 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8113 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8114 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8123 return lang_hooks.expand_expr (exp, original_target, tmode,
8127 /* Here to do an ordinary binary operator. */
8129 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8130 subtarget, &op0, &op1, 0);
8132 this_optab = optab_for_tree_code (code, type);
8134 if (modifier == EXPAND_STACK_PARM)
8136 temp = expand_binop (mode, this_optab, op0, op1, target,
8137 unsignedp, OPTAB_LIB_WIDEN);
8139 return REDUCE_BIT_FIELD (temp);
8141 #undef REDUCE_BIT_FIELD
8143 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8144 signedness of TYPE), possibly returning the result in TARGET. */
8146 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8148 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8149 if (target && GET_MODE (target) != GET_MODE (exp))
8151 if (TYPE_UNSIGNED (type))
8154 if (prec < HOST_BITS_PER_WIDE_INT)
8155 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8158 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8159 ((unsigned HOST_WIDE_INT) 1
8160 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8162 return expand_and (GET_MODE (exp), exp, mask, target);
8166 tree count = build_int_cst (NULL_TREE,
8167 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8168 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8169 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8173 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8174 when applied to the address of EXP produces an address known to be
8175 aligned more than BIGGEST_ALIGNMENT. */
8178 is_aligning_offset (tree offset, tree exp)
8180 /* Strip off any conversions. */
8181 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8182 || TREE_CODE (offset) == NOP_EXPR
8183 || TREE_CODE (offset) == CONVERT_EXPR)
8184 offset = TREE_OPERAND (offset, 0);
8186 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8187 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8188 if (TREE_CODE (offset) != BIT_AND_EXPR
8189 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8190 || compare_tree_int (TREE_OPERAND (offset, 1),
8191 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8192 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8195 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8196 It must be NEGATE_EXPR. Then strip any more conversions. */
8197 offset = TREE_OPERAND (offset, 0);
8198 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8199 || TREE_CODE (offset) == NOP_EXPR
8200 || TREE_CODE (offset) == CONVERT_EXPR)
8201 offset = TREE_OPERAND (offset, 0);
8203 if (TREE_CODE (offset) != NEGATE_EXPR)
8206 offset = TREE_OPERAND (offset, 0);
8207 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8208 || TREE_CODE (offset) == NOP_EXPR
8209 || TREE_CODE (offset) == CONVERT_EXPR)
8210 offset = TREE_OPERAND (offset, 0);
8212 /* This must now be the address of EXP. */
8213 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8216 /* Return the tree node if an ARG corresponds to a string constant or zero
8217 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8218 in bytes within the string that ARG is accessing. The type of the
8219 offset will be `sizetype'. */
8222 string_constant (tree arg, tree *ptr_offset)
8227 if (TREE_CODE (arg) == ADDR_EXPR)
8229 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8231 *ptr_offset = size_zero_node;
8232 return TREE_OPERAND (arg, 0);
8234 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8236 array = TREE_OPERAND (arg, 0);
8237 offset = size_zero_node;
8239 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8241 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8242 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8243 if (TREE_CODE (array) != STRING_CST
8244 && TREE_CODE (array) != VAR_DECL)
8250 else if (TREE_CODE (arg) == PLUS_EXPR)
8252 tree arg0 = TREE_OPERAND (arg, 0);
8253 tree arg1 = TREE_OPERAND (arg, 1);
8258 if (TREE_CODE (arg0) == ADDR_EXPR
8259 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8260 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8262 array = TREE_OPERAND (arg0, 0);
8265 else if (TREE_CODE (arg1) == ADDR_EXPR
8266 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8267 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8269 array = TREE_OPERAND (arg1, 0);
8278 if (TREE_CODE (array) == STRING_CST)
8280 *ptr_offset = convert (sizetype, offset);
8283 else if (TREE_CODE (array) == VAR_DECL)
8287 /* Variables initialized to string literals can be handled too. */
8288 if (DECL_INITIAL (array) == NULL_TREE
8289 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8292 /* If they are read-only, non-volatile and bind locally. */
8293 if (! TREE_READONLY (array)
8294 || TREE_SIDE_EFFECTS (array)
8295 || ! targetm.binds_local_p (array))
8298 /* Avoid const char foo[4] = "abcde"; */
8299 if (DECL_SIZE_UNIT (array) == NULL_TREE
8300 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8301 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8302 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8305 /* If variable is bigger than the string literal, OFFSET must be constant
8306 and inside of the bounds of the string literal. */
8307 offset = convert (sizetype, offset);
8308 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8309 && (! host_integerp (offset, 1)
8310 || compare_tree_int (offset, length) >= 0))
8313 *ptr_offset = offset;
8314 return DECL_INITIAL (array);
8320 /* Generate code to calculate EXP using a store-flag instruction
8321 and return an rtx for the result. EXP is either a comparison
8322 or a TRUTH_NOT_EXPR whose operand is a comparison.
8324 If TARGET is nonzero, store the result there if convenient.
8326 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8329 Return zero if there is no suitable set-flag instruction
8330 available on this machine.
8332 Once expand_expr has been called on the arguments of the comparison,
8333 we are committed to doing the store flag, since it is not safe to
8334 re-evaluate the expression. We emit the store-flag insn by calling
8335 emit_store_flag, but only expand the arguments if we have a reason
8336 to believe that emit_store_flag will be successful. If we think that
8337 it will, but it isn't, we have to simulate the store-flag with a
8338 set/jump/set sequence. */
8341 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8344 tree arg0, arg1, type;
8346 enum machine_mode operand_mode;
8350 enum insn_code icode;
8351 rtx subtarget = target;
8354 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8355 result at the end. We can't simply invert the test since it would
8356 have already been inverted if it were valid. This case occurs for
8357 some floating-point comparisons. */
8359 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8360 invert = 1, exp = TREE_OPERAND (exp, 0);
8362 arg0 = TREE_OPERAND (exp, 0);
8363 arg1 = TREE_OPERAND (exp, 1);
8365 /* Don't crash if the comparison was erroneous. */
8366 if (arg0 == error_mark_node || arg1 == error_mark_node)
8369 type = TREE_TYPE (arg0);
8370 operand_mode = TYPE_MODE (type);
8371 unsignedp = TYPE_UNSIGNED (type);
8373 /* We won't bother with BLKmode store-flag operations because it would mean
8374 passing a lot of information to emit_store_flag. */
8375 if (operand_mode == BLKmode)
8378 /* We won't bother with store-flag operations involving function pointers
8379 when function pointers must be canonicalized before comparisons. */
8380 #ifdef HAVE_canonicalize_funcptr_for_compare
8381 if (HAVE_canonicalize_funcptr_for_compare
8382 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8383 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8385 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8386 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8387 == FUNCTION_TYPE))))
8394 /* Get the rtx comparison code to use. We know that EXP is a comparison
8395 operation of some type. Some comparisons against 1 and -1 can be
8396 converted to comparisons with zero. Do so here so that the tests
8397 below will be aware that we have a comparison with zero. These
8398 tests will not catch constants in the first operand, but constants
8399 are rarely passed as the first operand. */
8401 switch (TREE_CODE (exp))
8410 if (integer_onep (arg1))
8411 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8413 code = unsignedp ? LTU : LT;
8416 if (! unsignedp && integer_all_onesp (arg1))
8417 arg1 = integer_zero_node, code = LT;
8419 code = unsignedp ? LEU : LE;
8422 if (! unsignedp && integer_all_onesp (arg1))
8423 arg1 = integer_zero_node, code = GE;
8425 code = unsignedp ? GTU : GT;
8428 if (integer_onep (arg1))
8429 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8431 code = unsignedp ? GEU : GE;
8434 case UNORDERED_EXPR:
8463 /* Put a constant second. */
8464 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8466 tem = arg0; arg0 = arg1; arg1 = tem;
8467 code = swap_condition (code);
8470 /* If this is an equality or inequality test of a single bit, we can
8471 do this by shifting the bit being tested to the low-order bit and
8472 masking the result with the constant 1. If the condition was EQ,
8473 we xor it with 1. This does not require an scc insn and is faster
8474 than an scc insn even if we have it.
8476 The code to make this transformation was moved into fold_single_bit_test,
8477 so we just call into the folder and expand its result. */
8479 if ((code == NE || code == EQ)
8480 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8481 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8483 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8484 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8486 target, VOIDmode, EXPAND_NORMAL);
8489 /* Now see if we are likely to be able to do this. Return if not. */
8490 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8493 icode = setcc_gen_code[(int) code];
8494 if (icode == CODE_FOR_nothing
8495 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8497 /* We can only do this if it is one of the special cases that
8498 can be handled without an scc insn. */
8499 if ((code == LT && integer_zerop (arg1))
8500 || (! only_cheap && code == GE && integer_zerop (arg1)))
8502 else if (BRANCH_COST >= 0
8503 && ! only_cheap && (code == NE || code == EQ)
8504 && TREE_CODE (type) != REAL_TYPE
8505 && ((abs_optab->handlers[(int) operand_mode].insn_code
8506 != CODE_FOR_nothing)
8507 || (ffs_optab->handlers[(int) operand_mode].insn_code
8508 != CODE_FOR_nothing)))
8514 if (! get_subtarget (target)
8515 || GET_MODE (subtarget) != operand_mode)
8518 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8521 target = gen_reg_rtx (mode);
8523 result = emit_store_flag (target, code, op0, op1,
8524 operand_mode, unsignedp, 1);
8529 result = expand_binop (mode, xor_optab, result, const1_rtx,
8530 result, 0, OPTAB_LIB_WIDEN);
8534 /* If this failed, we have to do this with set/compare/jump/set code. */
8536 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8537 target = gen_reg_rtx (GET_MODE (target));
8539 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8540 result = compare_from_rtx (op0, op1, code, unsignedp,
8541 operand_mode, NULL_RTX);
8542 if (GET_CODE (result) == CONST_INT)
8543 return (((result == const0_rtx && ! invert)
8544 || (result != const0_rtx && invert))
8545 ? const0_rtx : const1_rtx);
8547 /* The code of RESULT may not match CODE if compare_from_rtx
8548 decided to swap its operands and reverse the original code.
8550 We know that compare_from_rtx returns either a CONST_INT or
8551 a new comparison code, so it is safe to just extract the
8552 code from RESULT. */
8553 code = GET_CODE (result);
8555 label = gen_label_rtx ();
8556 gcc_assert (bcc_gen_fctn[(int) code]);
8558 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8559 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8566 /* Stubs in case we haven't got a casesi insn. */
8568 # define HAVE_casesi 0
8569 # define gen_casesi(a, b, c, d, e) (0)
8570 # define CODE_FOR_casesi CODE_FOR_nothing
8573 /* If the machine does not have a case insn that compares the bounds,
8574 this means extra overhead for dispatch tables, which raises the
8575 threshold for using them. */
8576 #ifndef CASE_VALUES_THRESHOLD
8577 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8578 #endif /* CASE_VALUES_THRESHOLD */
8581 case_values_threshold (void)
8583 return CASE_VALUES_THRESHOLD;
8586 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8587 0 otherwise (i.e. if there is no casesi instruction). */
8589 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8590 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8592 enum machine_mode index_mode = SImode;
8593 int index_bits = GET_MODE_BITSIZE (index_mode);
8594 rtx op1, op2, index;
8595 enum machine_mode op_mode;
8600 /* Convert the index to SImode. */
8601 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8603 enum machine_mode omode = TYPE_MODE (index_type);
8604 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8606 /* We must handle the endpoints in the original mode. */
8607 index_expr = build2 (MINUS_EXPR, index_type,
8608 index_expr, minval);
8609 minval = integer_zero_node;
8610 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8611 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8612 omode, 1, default_label);
8613 /* Now we can safely truncate. */
8614 index = convert_to_mode (index_mode, index, 0);
8618 if (TYPE_MODE (index_type) != index_mode)
8620 index_expr = convert (lang_hooks.types.type_for_size
8621 (index_bits, 0), index_expr);
8622 index_type = TREE_TYPE (index_expr);
8625 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8628 do_pending_stack_adjust ();
8630 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8631 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8633 index = copy_to_mode_reg (op_mode, index);
8635 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8637 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8638 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8639 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8640 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8642 op1 = copy_to_mode_reg (op_mode, op1);
8644 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8646 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8647 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8648 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8649 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8651 op2 = copy_to_mode_reg (op_mode, op2);
8653 emit_jump_insn (gen_casesi (index, op1, op2,
8654 table_label, default_label));
8658 /* Attempt to generate a tablejump instruction; same concept. */
8659 #ifndef HAVE_tablejump
8660 #define HAVE_tablejump 0
8661 #define gen_tablejump(x, y) (0)
8664 /* Subroutine of the next function.
8666 INDEX is the value being switched on, with the lowest value
8667 in the table already subtracted.
8668 MODE is its expected mode (needed if INDEX is constant).
8669 RANGE is the length of the jump table.
8670 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8672 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8673 index value is out of range. */
8676 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8681 if (INTVAL (range) > cfun->max_jumptable_ents)
8682 cfun->max_jumptable_ents = INTVAL (range);
8684 /* Do an unsigned comparison (in the proper mode) between the index
8685 expression and the value which represents the length of the range.
8686 Since we just finished subtracting the lower bound of the range
8687 from the index expression, this comparison allows us to simultaneously
8688 check that the original index expression value is both greater than
8689 or equal to the minimum value of the range and less than or equal to
8690 the maximum value of the range. */
8692 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8695 /* If index is in range, it must fit in Pmode.
8696 Convert to Pmode so we can index with it. */
8698 index = convert_to_mode (Pmode, index, 1);
8700 /* Don't let a MEM slip through, because then INDEX that comes
8701 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8702 and break_out_memory_refs will go to work on it and mess it up. */
8703 #ifdef PIC_CASE_VECTOR_ADDRESS
8704 if (flag_pic && !REG_P (index))
8705 index = copy_to_mode_reg (Pmode, index);
8708 /* If flag_force_addr were to affect this address
8709 it could interfere with the tricky assumptions made
8710 about addresses that contain label-refs,
8711 which may be valid only very near the tablejump itself. */
8712 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8713 GET_MODE_SIZE, because this indicates how large insns are. The other
8714 uses should all be Pmode, because they are addresses. This code
8715 could fail if addresses and insns are not the same size. */
8716 index = gen_rtx_PLUS (Pmode,
8717 gen_rtx_MULT (Pmode, index,
8718 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8719 gen_rtx_LABEL_REF (Pmode, table_label));
8720 #ifdef PIC_CASE_VECTOR_ADDRESS
8722 index = PIC_CASE_VECTOR_ADDRESS (index);
8725 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8726 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8727 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8728 convert_move (temp, vector, 0);
8730 emit_jump_insn (gen_tablejump (temp, table_label));
8732 /* If we are generating PIC code or if the table is PC-relative, the
8733 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8734 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8739 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8740 rtx table_label, rtx default_label)
8744 if (! HAVE_tablejump)
8747 index_expr = fold (build2 (MINUS_EXPR, index_type,
8748 convert (index_type, index_expr),
8749 convert (index_type, minval)));
8750 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8751 do_pending_stack_adjust ();
8753 do_tablejump (index, TYPE_MODE (index_type),
8754 convert_modes (TYPE_MODE (index_type),
8755 TYPE_MODE (TREE_TYPE (range)),
8756 expand_expr (range, NULL_RTX,
8758 TYPE_UNSIGNED (TREE_TYPE (range))),
8759 table_label, default_label);
8763 /* Nonzero if the mode is a valid vector mode for this architecture.
8764 This returns nonzero even if there is no hardware support for the
8765 vector mode, but we can emulate with narrower modes. */
8768 vector_mode_valid_p (enum machine_mode mode)
8770 enum mode_class class = GET_MODE_CLASS (mode);
8771 enum machine_mode innermode;
8773 /* Doh! What's going on? */
8774 if (class != MODE_VECTOR_INT
8775 && class != MODE_VECTOR_FLOAT)
8778 /* Hardware support. Woo hoo! */
8779 if (targetm.vector_mode_supported_p (mode))
8782 innermode = GET_MODE_INNER (mode);
8784 /* We should probably return 1 if requesting V4DI and we have no DI,
8785 but we have V2DI, but this is probably very unlikely. */
8787 /* If we have support for the inner mode, we can safely emulate it.
8788 We may not have V2DI, but me can emulate with a pair of DIs. */
8789 return targetm.scalar_mode_supported_p (innermode);
8792 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8794 const_vector_from_tree (tree exp)
8799 enum machine_mode inner, mode;
8801 mode = TYPE_MODE (TREE_TYPE (exp));
8803 if (initializer_zerop (exp))
8804 return CONST0_RTX (mode);
8806 units = GET_MODE_NUNITS (mode);
8807 inner = GET_MODE_INNER (mode);
8809 v = rtvec_alloc (units);
8811 link = TREE_VECTOR_CST_ELTS (exp);
8812 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8814 elt = TREE_VALUE (link);
8816 if (TREE_CODE (elt) == REAL_CST)
8817 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8820 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8821 TREE_INT_CST_HIGH (elt),
8825 /* Initialize remaining elements to 0. */
8826 for (; i < units; ++i)
8827 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8829 return gen_rtx_CONST_VECTOR (mode, v);
8831 #include "gt-expr.h"