1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 if (to_real != from_real)
341 /* If the source and destination are already the same, then there's
346 /* If FROM is a SUBREG that indicates that we have already done at least
347 the required extension, strip it. We don't handle such SUBREGs as
350 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
351 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
352 >= GET_MODE_SIZE (to_mode))
353 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
354 from = gen_lowpart (to_mode, from), from_mode = to_mode;
356 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
359 if (to_mode == from_mode
360 || (from_mode == VOIDmode && CONSTANT_P (from)))
362 emit_move_insn (to, from);
366 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
368 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
371 if (VECTOR_MODE_P (to_mode))
372 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
374 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
376 emit_move_insn (to, from);
380 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
382 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
383 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
394 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
399 /* Try converting directly if the insn is supported. */
401 code = tab->handlers[to_mode][from_mode].insn_code;
402 if (code != CODE_FOR_nothing)
404 emit_unop_insn (code, to, from,
405 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
409 /* Otherwise use a libcall. */
410 libcall = tab->handlers[to_mode][from_mode].libfunc;
413 /* This conversion is not implemented yet. */
417 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
419 insns = get_insns ();
421 emit_libcall_block (insns, to, value,
422 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
424 : gen_rtx_FLOAT_EXTEND (to_mode, from));
428 /* Handle pointer conversion. */ /* SPEE 900220. */
429 /* Targets are expected to provide conversion insns between PxImode and
430 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
431 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
433 enum machine_mode full_mode
434 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
436 if (trunc_optab->handlers[to_mode][full_mode].insn_code
440 if (full_mode != from_mode)
441 from = convert_to_mode (full_mode, from, unsignedp);
442 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
446 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
448 enum machine_mode full_mode
449 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
451 if (sext_optab->handlers[full_mode][from_mode].insn_code
455 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
457 if (to_mode == full_mode)
460 /* else proceed to integer conversions below. */
461 from_mode = full_mode;
464 /* Now both modes are integers. */
466 /* Handle expanding beyond a word. */
467 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
468 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
475 enum machine_mode lowpart_mode;
476 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
478 /* Try converting directly if the insn is supported. */
479 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
482 /* If FROM is a SUBREG, put it into a register. Do this
483 so that we always generate the same set of insns for
484 better cse'ing; if an intermediate assignment occurred,
485 we won't be doing the operation directly on the SUBREG. */
486 if (optimize > 0 && GET_CODE (from) == SUBREG)
487 from = force_reg (from_mode, from);
488 emit_unop_insn (code, to, from, equiv_code);
491 /* Next, try converting via full word. */
492 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
493 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
494 != CODE_FOR_nothing))
498 if (reg_overlap_mentioned_p (to, from))
499 from = force_reg (from_mode, from);
500 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
502 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
503 emit_unop_insn (code, to,
504 gen_lowpart (word_mode, to), equiv_code);
508 /* No special multiword conversion insn; do it by hand. */
511 /* Since we will turn this into a no conflict block, we must ensure
512 that the source does not overlap the target. */
514 if (reg_overlap_mentioned_p (to, from))
515 from = force_reg (from_mode, from);
517 /* Get a copy of FROM widened to a word, if necessary. */
518 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
519 lowpart_mode = word_mode;
521 lowpart_mode = from_mode;
523 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
525 lowpart = gen_lowpart (lowpart_mode, to);
526 emit_move_insn (lowpart, lowfrom);
528 /* Compute the value to put in each remaining word. */
530 fill_value = const0_rtx;
535 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
536 && STORE_FLAG_VALUE == -1)
538 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
540 fill_value = gen_reg_rtx (word_mode);
541 emit_insn (gen_slt (fill_value));
547 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
548 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
550 fill_value = convert_to_mode (word_mode, fill_value, 1);
554 /* Fill the remaining words. */
555 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
557 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
558 rtx subword = operand_subword (to, index, 1, to_mode);
563 if (fill_value != subword)
564 emit_move_insn (subword, fill_value);
567 insns = get_insns ();
570 emit_no_conflict_block (insns, to, from, NULL_RTX,
571 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
575 /* Truncating multi-word to a word or less. */
576 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
577 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
580 && ! MEM_VOLATILE_P (from)
581 && direct_load[(int) to_mode]
582 && ! mode_dependent_address_p (XEXP (from, 0)))
584 || GET_CODE (from) == SUBREG))
585 from = force_reg (from_mode, from);
586 convert_move (to, gen_lowpart (word_mode, from), 0);
590 /* Now follow all the conversions between integers
591 no more than a word long. */
593 /* For truncation, usually we can just refer to FROM in a narrower mode. */
594 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
595 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
596 GET_MODE_BITSIZE (from_mode)))
599 && ! MEM_VOLATILE_P (from)
600 && direct_load[(int) to_mode]
601 && ! mode_dependent_address_p (XEXP (from, 0)))
603 || GET_CODE (from) == SUBREG))
604 from = force_reg (from_mode, from);
605 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
606 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
607 from = copy_to_reg (from);
608 emit_move_insn (to, gen_lowpart (to_mode, from));
612 /* Handle extension. */
613 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
615 /* Convert directly if that works. */
616 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
620 from = force_not_mem (from);
622 emit_unop_insn (code, to, from, equiv_code);
627 enum machine_mode intermediate;
631 /* Search for a mode to convert via. */
632 for (intermediate = from_mode; intermediate != VOIDmode;
633 intermediate = GET_MODE_WIDER_MODE (intermediate))
634 if (((can_extend_p (to_mode, intermediate, unsignedp)
636 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
637 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
638 GET_MODE_BITSIZE (intermediate))))
639 && (can_extend_p (intermediate, from_mode, unsignedp)
640 != CODE_FOR_nothing))
642 convert_move (to, convert_to_mode (intermediate, from,
643 unsignedp), unsignedp);
647 /* No suitable intermediate mode.
648 Generate what we need with shifts. */
649 shift_amount = build_int_cst (NULL_TREE,
650 GET_MODE_BITSIZE (to_mode)
651 - GET_MODE_BITSIZE (from_mode), 0);
652 from = gen_lowpart (to_mode, force_reg (from_mode, from));
653 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
655 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
658 emit_move_insn (to, tmp);
663 /* Support special truncate insns for certain modes. */
664 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
666 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
671 /* Handle truncation of volatile memrefs, and so on;
672 the things that couldn't be truncated directly,
673 and for which there was no special instruction.
675 ??? Code above formerly short-circuited this, for most integer
676 mode pairs, with a force_reg in from_mode followed by a recursive
677 call to this routine. Appears always to have been wrong. */
678 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
680 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
681 emit_move_insn (to, temp);
685 /* Mode combination is not recognized. */
689 /* Return an rtx for a value that would result
690 from converting X to mode MODE.
691 Both X and MODE may be floating, or both integer.
692 UNSIGNEDP is nonzero if X is an unsigned value.
693 This can be done by referring to a part of X in place
694 or by copying to a new temporary with conversion. */
697 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
699 return convert_modes (mode, VOIDmode, x, unsignedp);
702 /* Return an rtx for a value that would result
703 from converting X from mode OLDMODE to mode MODE.
704 Both modes may be floating, or both integer.
705 UNSIGNEDP is nonzero if X is an unsigned value.
707 This can be done by referring to a part of X in place
708 or by copying to a new temporary with conversion.
710 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
713 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
717 /* If FROM is a SUBREG that indicates that we have already done at least
718 the required extension, strip it. */
720 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
721 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
722 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
723 x = gen_lowpart (mode, x);
725 if (GET_MODE (x) != VOIDmode)
726 oldmode = GET_MODE (x);
731 /* There is one case that we must handle specially: If we are converting
732 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
733 we are to interpret the constant as unsigned, gen_lowpart will do
734 the wrong if the constant appears negative. What we want to do is
735 make the high-order word of the constant zero, not all ones. */
737 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
738 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
739 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
741 HOST_WIDE_INT val = INTVAL (x);
743 if (oldmode != VOIDmode
744 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
746 int width = GET_MODE_BITSIZE (oldmode);
748 /* We need to zero extend VAL. */
749 val &= ((HOST_WIDE_INT) 1 << width) - 1;
752 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
755 /* We can do this with a gen_lowpart if both desired and current modes
756 are integer, and this is either a constant integer, a register, or a
757 non-volatile MEM. Except for the constant case where MODE is no
758 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
760 if ((GET_CODE (x) == CONST_INT
761 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
762 || (GET_MODE_CLASS (mode) == MODE_INT
763 && GET_MODE_CLASS (oldmode) == MODE_INT
764 && (GET_CODE (x) == CONST_DOUBLE
765 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
766 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
767 && direct_load[(int) mode])
769 && (! HARD_REGISTER_P (x)
770 || HARD_REGNO_MODE_OK (REGNO (x), mode))
771 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
772 GET_MODE_BITSIZE (GET_MODE (x)))))))))
774 /* ?? If we don't know OLDMODE, we have to assume here that
775 X does not need sign- or zero-extension. This may not be
776 the case, but it's the best we can do. */
777 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
778 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
780 HOST_WIDE_INT val = INTVAL (x);
781 int width = GET_MODE_BITSIZE (oldmode);
783 /* We must sign or zero-extend in this case. Start by
784 zero-extending, then sign extend if we need to. */
785 val &= ((HOST_WIDE_INT) 1 << width) - 1;
787 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
788 val |= (HOST_WIDE_INT) (-1) << width;
790 return gen_int_mode (val, mode);
793 return gen_lowpart (mode, x);
796 /* Converting from integer constant into mode is always equivalent to an
798 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
800 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
802 return simplify_gen_subreg (mode, x, oldmode, 0);
805 temp = gen_reg_rtx (mode);
806 convert_move (temp, x, unsignedp);
810 /* STORE_MAX_PIECES is the number of bytes at a time that we can
811 store efficiently. Due to internal GCC limitations, this is
812 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
813 for an immediate constant. */
815 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
817 /* Determine whether the LEN bytes can be moved by using several move
818 instructions. Return nonzero if a call to move_by_pieces should
822 can_move_by_pieces (unsigned HOST_WIDE_INT len,
823 unsigned int align ATTRIBUTE_UNUSED)
825 return MOVE_BY_PIECES_P (len, align);
828 /* Generate several move instructions to copy LEN bytes from block FROM to
829 block TO. (These are MEM rtx's with BLKmode).
831 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
832 used to push FROM to the stack.
834 ALIGN is maximum stack alignment we can assume.
836 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
837 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
841 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
842 unsigned int align, int endp)
844 struct move_by_pieces data;
845 rtx to_addr, from_addr = XEXP (from, 0);
846 unsigned int max_size = MOVE_MAX_PIECES + 1;
847 enum machine_mode mode = VOIDmode, tmode;
848 enum insn_code icode;
850 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
853 data.from_addr = from_addr;
856 to_addr = XEXP (to, 0);
859 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
860 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
862 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
869 #ifdef STACK_GROWS_DOWNWARD
875 data.to_addr = to_addr;
878 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
879 || GET_CODE (from_addr) == POST_INC
880 || GET_CODE (from_addr) == POST_DEC);
882 data.explicit_inc_from = 0;
883 data.explicit_inc_to = 0;
884 if (data.reverse) data.offset = len;
887 /* If copying requires more than two move insns,
888 copy addresses to registers (to make displacements shorter)
889 and use post-increment if available. */
890 if (!(data.autinc_from && data.autinc_to)
891 && move_by_pieces_ninsns (len, align, max_size) > 2)
893 /* Find the mode of the largest move... */
894 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
895 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
896 if (GET_MODE_SIZE (tmode) < max_size)
899 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
901 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
902 data.autinc_from = 1;
903 data.explicit_inc_from = -1;
905 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
907 data.from_addr = copy_addr_to_reg (from_addr);
908 data.autinc_from = 1;
909 data.explicit_inc_from = 1;
911 if (!data.autinc_from && CONSTANT_P (from_addr))
912 data.from_addr = copy_addr_to_reg (from_addr);
913 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
915 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
917 data.explicit_inc_to = -1;
919 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
921 data.to_addr = copy_addr_to_reg (to_addr);
923 data.explicit_inc_to = 1;
925 if (!data.autinc_to && CONSTANT_P (to_addr))
926 data.to_addr = copy_addr_to_reg (to_addr);
929 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
930 if (align >= GET_MODE_ALIGNMENT (tmode))
931 align = GET_MODE_ALIGNMENT (tmode);
934 enum machine_mode xmode;
936 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
938 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
939 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
940 || SLOW_UNALIGNED_ACCESS (tmode, align))
943 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
946 /* First move what we can in the largest integer mode, then go to
947 successively smaller modes. */
951 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
952 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
953 if (GET_MODE_SIZE (tmode) < max_size)
956 if (mode == VOIDmode)
959 icode = mov_optab->handlers[(int) mode].insn_code;
960 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
961 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
963 max_size = GET_MODE_SIZE (mode);
966 /* The code above should have handled everything. */
980 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
981 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
983 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
986 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
993 to1 = adjust_address (data.to, QImode, data.offset);
1001 /* Return number of insns required to move L bytes by pieces.
1002 ALIGN (in bits) is maximum alignment we can assume. */
1004 static unsigned HOST_WIDE_INT
1005 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1006 unsigned int max_size)
1008 unsigned HOST_WIDE_INT n_insns = 0;
1009 enum machine_mode tmode;
1011 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1012 if (align >= GET_MODE_ALIGNMENT (tmode))
1013 align = GET_MODE_ALIGNMENT (tmode);
1016 enum machine_mode tmode, xmode;
1018 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1020 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1021 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1022 || SLOW_UNALIGNED_ACCESS (tmode, align))
1025 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1028 while (max_size > 1)
1030 enum machine_mode mode = VOIDmode;
1031 enum insn_code icode;
1033 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1034 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1035 if (GET_MODE_SIZE (tmode) < max_size)
1038 if (mode == VOIDmode)
1041 icode = mov_optab->handlers[(int) mode].insn_code;
1042 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1043 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1045 max_size = GET_MODE_SIZE (mode);
1053 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1054 with move instructions for mode MODE. GENFUN is the gen_... function
1055 to make a move insn for that mode. DATA has all the other info. */
1058 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1059 struct move_by_pieces *data)
1061 unsigned int size = GET_MODE_SIZE (mode);
1062 rtx to1 = NULL_RTX, from1;
1064 while (data->len >= size)
1067 data->offset -= size;
1071 if (data->autinc_to)
1072 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1075 to1 = adjust_address (data->to, mode, data->offset);
1078 if (data->autinc_from)
1079 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1082 from1 = adjust_address (data->from, mode, data->offset);
1084 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1085 emit_insn (gen_add2_insn (data->to_addr,
1086 GEN_INT (-(HOST_WIDE_INT)size)));
1087 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1088 emit_insn (gen_add2_insn (data->from_addr,
1089 GEN_INT (-(HOST_WIDE_INT)size)));
1092 emit_insn ((*genfun) (to1, from1));
1095 #ifdef PUSH_ROUNDING
1096 emit_single_push_insn (mode, from1, NULL);
1102 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1103 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1104 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1105 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1107 if (! data->reverse)
1108 data->offset += size;
1114 /* Emit code to move a block Y to a block X. This may be done with
1115 string-move instructions, with multiple scalar move instructions,
1116 or with a library call.
1118 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1119 SIZE is an rtx that says how long they are.
1120 ALIGN is the maximum alignment we can assume they have.
1121 METHOD describes what kind of copy this is, and what mechanisms may be used.
1123 Return the address of the new block, if memcpy is called and returns it,
1127 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1135 case BLOCK_OP_NORMAL:
1136 may_use_call = true;
1139 case BLOCK_OP_CALL_PARM:
1140 may_use_call = block_move_libcall_safe_for_call_parm ();
1142 /* Make inhibit_defer_pop nonzero around the library call
1143 to force it to pop the arguments right away. */
1147 case BLOCK_OP_NO_LIBCALL:
1148 may_use_call = false;
1155 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1164 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1165 block copy is more efficient for other large modes, e.g. DCmode. */
1166 x = adjust_address (x, BLKmode, 0);
1167 y = adjust_address (y, BLKmode, 0);
1169 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1170 can be incorrect is coming from __builtin_memcpy. */
1171 if (GET_CODE (size) == CONST_INT)
1173 if (INTVAL (size) == 0)
1176 x = shallow_copy_rtx (x);
1177 y = shallow_copy_rtx (y);
1178 set_mem_size (x, size);
1179 set_mem_size (y, size);
1182 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1183 move_by_pieces (x, y, INTVAL (size), align, 0);
1184 else if (emit_block_move_via_movmem (x, y, size, align))
1186 else if (may_use_call)
1187 retval = emit_block_move_via_libcall (x, y, size);
1189 emit_block_move_via_loop (x, y, size, align);
1191 if (method == BLOCK_OP_CALL_PARM)
1197 /* A subroutine of emit_block_move. Returns true if calling the
1198 block move libcall will not clobber any parameters which may have
1199 already been placed on the stack. */
1202 block_move_libcall_safe_for_call_parm (void)
1204 /* If arguments are pushed on the stack, then they're safe. */
1208 /* If registers go on the stack anyway, any argument is sure to clobber
1209 an outgoing argument. */
1210 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1212 tree fn = emit_block_move_libcall_fn (false);
1214 if (REG_PARM_STACK_SPACE (fn) != 0)
1219 /* If any argument goes in memory, then it might clobber an outgoing
1222 CUMULATIVE_ARGS args_so_far;
1225 fn = emit_block_move_libcall_fn (false);
1226 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1228 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1229 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1231 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1232 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1233 if (!tmp || !REG_P (tmp))
1235 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1238 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1244 /* A subroutine of emit_block_move. Expand a movmem pattern;
1245 return true if successful. */
1248 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1250 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1251 int save_volatile_ok = volatile_ok;
1252 enum machine_mode mode;
1254 /* Since this is a move insn, we don't care about volatility. */
1257 /* Try the most limited insn first, because there's no point
1258 including more than one in the machine description unless
1259 the more limited one has some advantage. */
1261 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1262 mode = GET_MODE_WIDER_MODE (mode))
1264 enum insn_code code = movmem_optab[(int) mode];
1265 insn_operand_predicate_fn pred;
1267 if (code != CODE_FOR_nothing
1268 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1269 here because if SIZE is less than the mode mask, as it is
1270 returned by the macro, it will definitely be less than the
1271 actual mode mask. */
1272 && ((GET_CODE (size) == CONST_INT
1273 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1274 <= (GET_MODE_MASK (mode) >> 1)))
1275 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1276 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1277 || (*pred) (x, BLKmode))
1278 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1279 || (*pred) (y, BLKmode))
1280 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1281 || (*pred) (opalign, VOIDmode)))
1284 rtx last = get_last_insn ();
1287 op2 = convert_to_mode (mode, size, 1);
1288 pred = insn_data[(int) code].operand[2].predicate;
1289 if (pred != 0 && ! (*pred) (op2, mode))
1290 op2 = copy_to_mode_reg (mode, op2);
1292 /* ??? When called via emit_block_move_for_call, it'd be
1293 nice if there were some way to inform the backend, so
1294 that it doesn't fail the expansion because it thinks
1295 emitting the libcall would be more efficient. */
1297 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1301 volatile_ok = save_volatile_ok;
1305 delete_insns_since (last);
1309 volatile_ok = save_volatile_ok;
1313 /* A subroutine of emit_block_move. Expand a call to memcpy.
1314 Return the return value from memcpy, 0 otherwise. */
1317 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1319 rtx dst_addr, src_addr;
1320 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1321 enum machine_mode size_mode;
1324 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1325 pseudos. We can then place those new pseudos into a VAR_DECL and
1328 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1329 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1331 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1332 src_addr = convert_memory_address (ptr_mode, src_addr);
1334 dst_tree = make_tree (ptr_type_node, dst_addr);
1335 src_tree = make_tree (ptr_type_node, src_addr);
1337 size_mode = TYPE_MODE (sizetype);
1339 size = convert_to_mode (size_mode, size, 1);
1340 size = copy_to_mode_reg (size_mode, size);
1342 /* It is incorrect to use the libcall calling conventions to call
1343 memcpy in this context. This could be a user call to memcpy and
1344 the user may wish to examine the return value from memcpy. For
1345 targets where libcalls and normal calls have different conventions
1346 for returning pointers, we could end up generating incorrect code. */
1348 size_tree = make_tree (sizetype, size);
1350 fn = emit_block_move_libcall_fn (true);
1351 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1352 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1353 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1355 /* Now we have to build up the CALL_EXPR itself. */
1356 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1357 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1358 call_expr, arg_list, NULL_TREE);
1360 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1362 /* If we are initializing a readonly value, show the above call clobbered
1363 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1364 the delay slot scheduler might overlook conflicts and take nasty
1366 if (RTX_UNCHANGING_P (dst))
1367 add_function_usage_to
1368 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1369 gen_rtx_CLOBBER (VOIDmode, dst),
1375 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1376 for the function we use for block copies. The first time FOR_CALL
1377 is true, we call assemble_external. */
1379 static GTY(()) tree block_move_fn;
1382 init_block_move_fn (const char *asmspec)
1388 fn = get_identifier ("memcpy");
1389 args = build_function_type_list (ptr_type_node, ptr_type_node,
1390 const_ptr_type_node, sizetype,
1393 fn = build_decl (FUNCTION_DECL, fn, args);
1394 DECL_EXTERNAL (fn) = 1;
1395 TREE_PUBLIC (fn) = 1;
1396 DECL_ARTIFICIAL (fn) = 1;
1397 TREE_NOTHROW (fn) = 1;
1403 set_user_assembler_name (block_move_fn, asmspec);
1407 emit_block_move_libcall_fn (int for_call)
1409 static bool emitted_extern;
1412 init_block_move_fn (NULL);
1414 if (for_call && !emitted_extern)
1416 emitted_extern = true;
1417 make_decl_rtl (block_move_fn);
1418 assemble_external (block_move_fn);
1421 return block_move_fn;
1424 /* A subroutine of emit_block_move. Copy the data via an explicit
1425 loop. This is used only when libcalls are forbidden. */
1426 /* ??? It'd be nice to copy in hunks larger than QImode. */
1429 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1430 unsigned int align ATTRIBUTE_UNUSED)
1432 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1433 enum machine_mode iter_mode;
1435 iter_mode = GET_MODE (size);
1436 if (iter_mode == VOIDmode)
1437 iter_mode = word_mode;
1439 top_label = gen_label_rtx ();
1440 cmp_label = gen_label_rtx ();
1441 iter = gen_reg_rtx (iter_mode);
1443 emit_move_insn (iter, const0_rtx);
1445 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1446 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1447 do_pending_stack_adjust ();
1449 emit_jump (cmp_label);
1450 emit_label (top_label);
1452 tmp = convert_modes (Pmode, iter_mode, iter, true);
1453 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1454 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1455 x = change_address (x, QImode, x_addr);
1456 y = change_address (y, QImode, y_addr);
1458 emit_move_insn (x, y);
1460 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1461 true, OPTAB_LIB_WIDEN);
1463 emit_move_insn (iter, tmp);
1465 emit_label (cmp_label);
1467 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1471 /* Copy all or part of a value X into registers starting at REGNO.
1472 The number of registers to be filled is NREGS. */
1475 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1478 #ifdef HAVE_load_multiple
1486 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1487 x = validize_mem (force_const_mem (mode, x));
1489 /* See if the machine can do this with a load multiple insn. */
1490 #ifdef HAVE_load_multiple
1491 if (HAVE_load_multiple)
1493 last = get_last_insn ();
1494 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1502 delete_insns_since (last);
1506 for (i = 0; i < nregs; i++)
1507 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1508 operand_subword_force (x, i, mode));
1511 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1512 The number of registers to be filled is NREGS. */
1515 move_block_from_reg (int regno, rtx x, int nregs)
1522 /* See if the machine can do this with a store multiple insn. */
1523 #ifdef HAVE_store_multiple
1524 if (HAVE_store_multiple)
1526 rtx last = get_last_insn ();
1527 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1535 delete_insns_since (last);
1539 for (i = 0; i < nregs; i++)
1541 rtx tem = operand_subword (x, i, 1, BLKmode);
1546 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1550 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1551 ORIG, where ORIG is a non-consecutive group of registers represented by
1552 a PARALLEL. The clone is identical to the original except in that the
1553 original set of registers is replaced by a new set of pseudo registers.
1554 The new set has the same modes as the original set. */
1557 gen_group_rtx (rtx orig)
1562 if (GET_CODE (orig) != PARALLEL)
1565 length = XVECLEN (orig, 0);
1566 tmps = alloca (sizeof (rtx) * length);
1568 /* Skip a NULL entry in first slot. */
1569 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1574 for (; i < length; i++)
1576 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1577 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1579 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1582 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1585 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1586 where DST is non-consecutive registers represented by a PARALLEL.
1587 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1591 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1596 if (GET_CODE (dst) != PARALLEL)
1599 /* Check for a NULL entry, used to indicate that the parameter goes
1600 both on the stack and in registers. */
1601 if (XEXP (XVECEXP (dst, 0, 0), 0))
1606 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1608 /* Process the pieces. */
1609 for (i = start; i < XVECLEN (dst, 0); i++)
1611 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1612 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1613 unsigned int bytelen = GET_MODE_SIZE (mode);
1616 /* Handle trailing fragments that run over the size of the struct. */
1617 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1619 /* Arrange to shift the fragment to where it belongs.
1620 extract_bit_field loads to the lsb of the reg. */
1622 #ifdef BLOCK_REG_PADDING
1623 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1624 == (BYTES_BIG_ENDIAN ? upward : downward)
1629 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1630 bytelen = ssize - bytepos;
1635 /* If we won't be loading directly from memory, protect the real source
1636 from strange tricks we might play; but make sure that the source can
1637 be loaded directly into the destination. */
1639 if (!MEM_P (orig_src)
1640 && (!CONSTANT_P (orig_src)
1641 || (GET_MODE (orig_src) != mode
1642 && GET_MODE (orig_src) != VOIDmode)))
1644 if (GET_MODE (orig_src) == VOIDmode)
1645 src = gen_reg_rtx (mode);
1647 src = gen_reg_rtx (GET_MODE (orig_src));
1649 emit_move_insn (src, orig_src);
1652 /* Optimize the access just a bit. */
1654 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1655 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1656 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1657 && bytelen == GET_MODE_SIZE (mode))
1659 tmps[i] = gen_reg_rtx (mode);
1660 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1662 else if (GET_CODE (src) == CONCAT)
1664 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1665 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1667 if ((bytepos == 0 && bytelen == slen0)
1668 || (bytepos != 0 && bytepos + bytelen <= slen))
1670 /* The following assumes that the concatenated objects all
1671 have the same size. In this case, a simple calculation
1672 can be used to determine the object and the bit field
1674 tmps[i] = XEXP (src, bytepos / slen0);
1675 if (! CONSTANT_P (tmps[i])
1676 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1677 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1678 (bytepos % slen0) * BITS_PER_UNIT,
1679 1, NULL_RTX, mode, mode);
1681 else if (bytepos == 0)
1683 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1684 emit_move_insn (mem, src);
1685 tmps[i] = adjust_address (mem, mode, 0);
1690 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1691 SIMD register, which is currently broken. While we get GCC
1692 to emit proper RTL for these cases, let's dump to memory. */
1693 else if (VECTOR_MODE_P (GET_MODE (dst))
1696 int slen = GET_MODE_SIZE (GET_MODE (src));
1699 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1700 emit_move_insn (mem, src);
1701 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1703 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1704 && XVECLEN (dst, 0) > 1)
1705 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1706 else if (CONSTANT_P (src)
1707 || (REG_P (src) && GET_MODE (src) == mode))
1710 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1711 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1715 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1716 build_int_cst (NULL_TREE,
1717 shift, 0), tmps[i], 0);
1720 /* Copy the extracted pieces into the proper (probable) hard regs. */
1721 for (i = start; i < XVECLEN (dst, 0); i++)
1722 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1725 /* Emit code to move a block SRC to block DST, where SRC and DST are
1726 non-consecutive groups of registers, each represented by a PARALLEL. */
1729 emit_group_move (rtx dst, rtx src)
1733 if (GET_CODE (src) != PARALLEL
1734 || GET_CODE (dst) != PARALLEL
1735 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1738 /* Skip first entry if NULL. */
1739 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1740 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1741 XEXP (XVECEXP (src, 0, i), 0));
1744 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1745 where SRC is non-consecutive registers represented by a PARALLEL.
1746 SSIZE represents the total size of block ORIG_DST, or -1 if not
1750 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1755 if (GET_CODE (src) != PARALLEL)
1758 /* Check for a NULL entry, used to indicate that the parameter goes
1759 both on the stack and in registers. */
1760 if (XEXP (XVECEXP (src, 0, 0), 0))
1765 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1767 /* Copy the (probable) hard regs into pseudos. */
1768 for (i = start; i < XVECLEN (src, 0); i++)
1770 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1771 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1772 emit_move_insn (tmps[i], reg);
1775 /* If we won't be storing directly into memory, protect the real destination
1776 from strange tricks we might play. */
1778 if (GET_CODE (dst) == PARALLEL)
1782 /* We can get a PARALLEL dst if there is a conditional expression in
1783 a return statement. In that case, the dst and src are the same,
1784 so no action is necessary. */
1785 if (rtx_equal_p (dst, src))
1788 /* It is unclear if we can ever reach here, but we may as well handle
1789 it. Allocate a temporary, and split this into a store/load to/from
1792 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1793 emit_group_store (temp, src, type, ssize);
1794 emit_group_load (dst, temp, type, ssize);
1797 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1799 dst = gen_reg_rtx (GET_MODE (orig_dst));
1800 /* Make life a bit easier for combine. */
1801 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1804 /* Process the pieces. */
1805 for (i = start; i < XVECLEN (src, 0); i++)
1807 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1808 enum machine_mode mode = GET_MODE (tmps[i]);
1809 unsigned int bytelen = GET_MODE_SIZE (mode);
1812 /* Handle trailing fragments that run over the size of the struct. */
1813 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1815 /* store_bit_field always takes its value from the lsb.
1816 Move the fragment to the lsb if it's not already there. */
1818 #ifdef BLOCK_REG_PADDING
1819 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1820 == (BYTES_BIG_ENDIAN ? upward : downward)
1826 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1827 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1828 build_int_cst (NULL_TREE,
1829 shift, 0), tmps[i], 0);
1831 bytelen = ssize - bytepos;
1834 if (GET_CODE (dst) == CONCAT)
1836 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1837 dest = XEXP (dst, 0);
1838 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1840 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1841 dest = XEXP (dst, 1);
1843 else if (bytepos == 0 && XVECLEN (src, 0))
1845 dest = assign_stack_temp (GET_MODE (dest),
1846 GET_MODE_SIZE (GET_MODE (dest)), 0);
1847 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1856 /* Optimize the access just a bit. */
1858 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1859 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1860 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1861 && bytelen == GET_MODE_SIZE (mode))
1862 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1864 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1868 /* Copy from the pseudo into the (probable) hard reg. */
1869 if (orig_dst != dst)
1870 emit_move_insn (orig_dst, dst);
1873 /* Generate code to copy a BLKmode object of TYPE out of a
1874 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1875 is null, a stack temporary is created. TGTBLK is returned.
1877 The purpose of this routine is to handle functions that return
1878 BLKmode structures in registers. Some machines (the PA for example)
1879 want to return all small structures in registers regardless of the
1880 structure's alignment. */
1883 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1885 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1886 rtx src = NULL, dst = NULL;
1887 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1888 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1892 tgtblk = assign_temp (build_qualified_type (type,
1894 | TYPE_QUAL_CONST)),
1896 preserve_temp_slots (tgtblk);
1899 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1900 into a new pseudo which is a full word. */
1902 if (GET_MODE (srcreg) != BLKmode
1903 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1904 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1906 /* If the structure doesn't take up a whole number of words, see whether
1907 SRCREG is padded on the left or on the right. If it's on the left,
1908 set PADDING_CORRECTION to the number of bits to skip.
1910 In most ABIs, the structure will be returned at the least end of
1911 the register, which translates to right padding on little-endian
1912 targets and left padding on big-endian targets. The opposite
1913 holds if the structure is returned at the most significant
1914 end of the register. */
1915 if (bytes % UNITS_PER_WORD != 0
1916 && (targetm.calls.return_in_msb (type)
1918 : BYTES_BIG_ENDIAN))
1920 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1922 /* Copy the structure BITSIZE bites at a time.
1924 We could probably emit more efficient code for machines which do not use
1925 strict alignment, but it doesn't seem worth the effort at the current
1927 for (bitpos = 0, xbitpos = padding_correction;
1928 bitpos < bytes * BITS_PER_UNIT;
1929 bitpos += bitsize, xbitpos += bitsize)
1931 /* We need a new source operand each time xbitpos is on a
1932 word boundary and when xbitpos == padding_correction
1933 (the first time through). */
1934 if (xbitpos % BITS_PER_WORD == 0
1935 || xbitpos == padding_correction)
1936 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1939 /* We need a new destination operand each time bitpos is on
1941 if (bitpos % BITS_PER_WORD == 0)
1942 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1944 /* Use xbitpos for the source extraction (right justified) and
1945 xbitpos for the destination store (left justified). */
1946 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1947 extract_bit_field (src, bitsize,
1948 xbitpos % BITS_PER_WORD, 1,
1949 NULL_RTX, word_mode, word_mode));
1955 /* Add a USE expression for REG to the (possibly empty) list pointed
1956 to by CALL_FUSAGE. REG must denote a hard register. */
1959 use_reg (rtx *call_fusage, rtx reg)
1962 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
1966 = gen_rtx_EXPR_LIST (VOIDmode,
1967 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1970 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1971 starting at REGNO. All of these registers must be hard registers. */
1974 use_regs (rtx *call_fusage, int regno, int nregs)
1978 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1981 for (i = 0; i < nregs; i++)
1982 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1985 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1986 PARALLEL REGS. This is for calls that pass values in multiple
1987 non-contiguous locations. The Irix 6 ABI has examples of this. */
1990 use_group_regs (rtx *call_fusage, rtx regs)
1994 for (i = 0; i < XVECLEN (regs, 0); i++)
1996 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
1998 /* A NULL entry means the parameter goes both on the stack and in
1999 registers. This can also be a MEM for targets that pass values
2000 partially on the stack and partially in registers. */
2001 if (reg != 0 && REG_P (reg))
2002 use_reg (call_fusage, reg);
2007 /* Determine whether the LEN bytes generated by CONSTFUN can be
2008 stored to memory using several move instructions. CONSTFUNDATA is
2009 a pointer which will be passed as argument in every CONSTFUN call.
2010 ALIGN is maximum alignment we can assume. Return nonzero if a
2011 call to store_by_pieces should succeed. */
2014 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2015 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2016 void *constfundata, unsigned int align)
2018 unsigned HOST_WIDE_INT l;
2019 unsigned int max_size;
2020 HOST_WIDE_INT offset = 0;
2021 enum machine_mode mode, tmode;
2022 enum insn_code icode;
2029 if (! STORE_BY_PIECES_P (len, align))
2032 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2033 if (align >= GET_MODE_ALIGNMENT (tmode))
2034 align = GET_MODE_ALIGNMENT (tmode);
2037 enum machine_mode xmode;
2039 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2041 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2042 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2043 || SLOW_UNALIGNED_ACCESS (tmode, align))
2046 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2049 /* We would first store what we can in the largest integer mode, then go to
2050 successively smaller modes. */
2053 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2058 max_size = STORE_MAX_PIECES + 1;
2059 while (max_size > 1)
2061 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2062 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2063 if (GET_MODE_SIZE (tmode) < max_size)
2066 if (mode == VOIDmode)
2069 icode = mov_optab->handlers[(int) mode].insn_code;
2070 if (icode != CODE_FOR_nothing
2071 && align >= GET_MODE_ALIGNMENT (mode))
2073 unsigned int size = GET_MODE_SIZE (mode);
2080 cst = (*constfun) (constfundata, offset, mode);
2081 if (!LEGITIMATE_CONSTANT_P (cst))
2091 max_size = GET_MODE_SIZE (mode);
2094 /* The code above should have handled everything. */
2102 /* Generate several move instructions to store LEN bytes generated by
2103 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2104 pointer which will be passed as argument in every CONSTFUN call.
2105 ALIGN is maximum alignment we can assume.
2106 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2107 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2111 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2112 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2113 void *constfundata, unsigned int align, int endp)
2115 struct store_by_pieces data;
2124 if (! STORE_BY_PIECES_P (len, align))
2126 data.constfun = constfun;
2127 data.constfundata = constfundata;
2130 store_by_pieces_1 (&data, align);
2141 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2142 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2144 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2147 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2154 to1 = adjust_address (data.to, QImode, data.offset);
2162 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2163 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2166 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2168 struct store_by_pieces data;
2173 data.constfun = clear_by_pieces_1;
2174 data.constfundata = NULL;
2177 store_by_pieces_1 (&data, align);
2180 /* Callback routine for clear_by_pieces.
2181 Return const0_rtx unconditionally. */
2184 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2185 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2186 enum machine_mode mode ATTRIBUTE_UNUSED)
2191 /* Subroutine of clear_by_pieces and store_by_pieces.
2192 Generate several move instructions to store LEN bytes of block TO. (A MEM
2193 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2196 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2197 unsigned int align ATTRIBUTE_UNUSED)
2199 rtx to_addr = XEXP (data->to, 0);
2200 unsigned int max_size = STORE_MAX_PIECES + 1;
2201 enum machine_mode mode = VOIDmode, tmode;
2202 enum insn_code icode;
2205 data->to_addr = to_addr;
2207 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2208 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2210 data->explicit_inc_to = 0;
2212 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2214 data->offset = data->len;
2216 /* If storing requires more than two move insns,
2217 copy addresses to registers (to make displacements shorter)
2218 and use post-increment if available. */
2219 if (!data->autinc_to
2220 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2222 /* Determine the main mode we'll be using. */
2223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2225 if (GET_MODE_SIZE (tmode) < max_size)
2228 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2230 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2231 data->autinc_to = 1;
2232 data->explicit_inc_to = -1;
2235 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2236 && ! data->autinc_to)
2238 data->to_addr = copy_addr_to_reg (to_addr);
2239 data->autinc_to = 1;
2240 data->explicit_inc_to = 1;
2243 if ( !data->autinc_to && CONSTANT_P (to_addr))
2244 data->to_addr = copy_addr_to_reg (to_addr);
2247 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2248 if (align >= GET_MODE_ALIGNMENT (tmode))
2249 align = GET_MODE_ALIGNMENT (tmode);
2252 enum machine_mode xmode;
2254 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2256 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2257 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2258 || SLOW_UNALIGNED_ACCESS (tmode, align))
2261 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2264 /* First store what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2267 while (max_size > 1)
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2270 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2271 if (GET_MODE_SIZE (tmode) < max_size)
2274 if (mode == VOIDmode)
2277 icode = mov_optab->handlers[(int) mode].insn_code;
2278 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2279 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2281 max_size = GET_MODE_SIZE (mode);
2284 /* The code above should have handled everything. */
2289 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2290 with move instructions for mode MODE. GENFUN is the gen_... function
2291 to make a move insn for that mode. DATA has all the other info. */
2294 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2295 struct store_by_pieces *data)
2297 unsigned int size = GET_MODE_SIZE (mode);
2300 while (data->len >= size)
2303 data->offset -= size;
2305 if (data->autinc_to)
2306 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2309 to1 = adjust_address (data->to, mode, data->offset);
2311 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2312 emit_insn (gen_add2_insn (data->to_addr,
2313 GEN_INT (-(HOST_WIDE_INT) size)));
2315 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2316 emit_insn ((*genfun) (to1, cst));
2318 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2319 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2321 if (! data->reverse)
2322 data->offset += size;
2328 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2329 its length in bytes. */
2332 clear_storage (rtx object, rtx size)
2335 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2336 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2338 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2339 just move a zero. Otherwise, do this a piece at a time. */
2340 if (GET_MODE (object) != BLKmode
2341 && GET_CODE (size) == CONST_INT
2342 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2343 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2346 if (size == const0_rtx)
2348 else if (GET_CODE (size) == CONST_INT
2349 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2350 clear_by_pieces (object, INTVAL (size), align);
2351 else if (clear_storage_via_clrmem (object, size, align))
2354 retval = clear_storage_via_libcall (object, size);
2360 /* A subroutine of clear_storage. Expand a clrmem pattern;
2361 return true if successful. */
2364 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2366 /* Try the most limited insn first, because there's no point
2367 including more than one in the machine description unless
2368 the more limited one has some advantage. */
2370 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2371 enum machine_mode mode;
2373 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2374 mode = GET_MODE_WIDER_MODE (mode))
2376 enum insn_code code = clrmem_optab[(int) mode];
2377 insn_operand_predicate_fn pred;
2379 if (code != CODE_FOR_nothing
2380 /* We don't need MODE to be narrower than
2381 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2382 the mode mask, as it is returned by the macro, it will
2383 definitely be less than the actual mode mask. */
2384 && ((GET_CODE (size) == CONST_INT
2385 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2386 <= (GET_MODE_MASK (mode) >> 1)))
2387 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2388 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2389 || (*pred) (object, BLKmode))
2390 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2391 || (*pred) (opalign, VOIDmode)))
2394 rtx last = get_last_insn ();
2397 op1 = convert_to_mode (mode, size, 1);
2398 pred = insn_data[(int) code].operand[1].predicate;
2399 if (pred != 0 && ! (*pred) (op1, mode))
2400 op1 = copy_to_mode_reg (mode, op1);
2402 pat = GEN_FCN ((int) code) (object, op1, opalign);
2409 delete_insns_since (last);
2416 /* A subroutine of clear_storage. Expand a call to memset.
2417 Return the return value of memset, 0 otherwise. */
2420 clear_storage_via_libcall (rtx object, rtx size)
2422 tree call_expr, arg_list, fn, object_tree, size_tree;
2423 enum machine_mode size_mode;
2426 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2427 place those into new pseudos into a VAR_DECL and use them later. */
2429 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2431 size_mode = TYPE_MODE (sizetype);
2432 size = convert_to_mode (size_mode, size, 1);
2433 size = copy_to_mode_reg (size_mode, size);
2435 /* It is incorrect to use the libcall calling conventions to call
2436 memset in this context. This could be a user call to memset and
2437 the user may wish to examine the return value from memset. For
2438 targets where libcalls and normal calls have different conventions
2439 for returning pointers, we could end up generating incorrect code. */
2441 object_tree = make_tree (ptr_type_node, object);
2442 size_tree = make_tree (sizetype, size);
2444 fn = clear_storage_libcall_fn (true);
2445 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2446 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2447 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2449 /* Now we have to build up the CALL_EXPR itself. */
2450 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2451 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2452 call_expr, arg_list, NULL_TREE);
2454 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2456 /* If we are initializing a readonly value, show the above call
2457 clobbered it. Otherwise, a load from it may erroneously be
2458 hoisted from a loop. */
2459 if (RTX_UNCHANGING_P (object))
2460 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2465 /* A subroutine of clear_storage_via_libcall. Create the tree node
2466 for the function we use for block clears. The first time FOR_CALL
2467 is true, we call assemble_external. */
2469 static GTY(()) tree block_clear_fn;
2472 init_block_clear_fn (const char *asmspec)
2474 if (!block_clear_fn)
2478 fn = get_identifier ("memset");
2479 args = build_function_type_list (ptr_type_node, ptr_type_node,
2480 integer_type_node, sizetype,
2483 fn = build_decl (FUNCTION_DECL, fn, args);
2484 DECL_EXTERNAL (fn) = 1;
2485 TREE_PUBLIC (fn) = 1;
2486 DECL_ARTIFICIAL (fn) = 1;
2487 TREE_NOTHROW (fn) = 1;
2489 block_clear_fn = fn;
2493 set_user_assembler_name (block_clear_fn, asmspec);
2497 clear_storage_libcall_fn (int for_call)
2499 static bool emitted_extern;
2501 if (!block_clear_fn)
2502 init_block_clear_fn (NULL);
2504 if (for_call && !emitted_extern)
2506 emitted_extern = true;
2507 make_decl_rtl (block_clear_fn);
2508 assemble_external (block_clear_fn);
2511 return block_clear_fn;
2514 /* Generate code to copy Y into X.
2515 Both Y and X must have the same mode, except that
2516 Y can be a constant with VOIDmode.
2517 This mode cannot be BLKmode; use emit_block_move for that.
2519 Return the last instruction emitted. */
2522 emit_move_insn (rtx x, rtx y)
2524 enum machine_mode mode = GET_MODE (x);
2525 rtx y_cst = NULL_RTX;
2528 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2534 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2535 && (last_insn = compress_float_constant (x, y)))
2540 if (!LEGITIMATE_CONSTANT_P (y))
2542 y = force_const_mem (mode, y);
2544 /* If the target's cannot_force_const_mem prevented the spill,
2545 assume that the target's move expanders will also take care
2546 of the non-legitimate constant. */
2552 /* If X or Y are memory references, verify that their addresses are valid
2555 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2556 && ! push_operand (x, GET_MODE (x)))
2558 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2559 x = validize_mem (x);
2562 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2564 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2565 y = validize_mem (y);
2567 if (mode == BLKmode)
2570 last_insn = emit_move_insn_1 (x, y);
2572 if (y_cst && REG_P (x)
2573 && (set = single_set (last_insn)) != NULL_RTX
2574 && SET_DEST (set) == x
2575 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2576 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2581 /* Low level part of emit_move_insn.
2582 Called just like emit_move_insn, but assumes X and Y
2583 are basically valid. */
2586 emit_move_insn_1 (rtx x, rtx y)
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode submode;
2590 enum mode_class class = GET_MODE_CLASS (mode);
2592 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2595 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2597 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2599 /* Expand complex moves by moving real part and imag part, if possible. */
2600 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2601 && BLKmode != (submode = GET_MODE_INNER (mode))
2602 && (mov_optab->handlers[(int) submode].insn_code
2603 != CODE_FOR_nothing))
2605 /* Don't split destination if it is a stack push. */
2606 int stack = push_operand (x, GET_MODE (x));
2608 #ifdef PUSH_ROUNDING
2609 /* In case we output to the stack, but the size is smaller than the
2610 machine can push exactly, we need to use move instructions. */
2612 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2613 != GET_MODE_SIZE (submode)))
2616 HOST_WIDE_INT offset1, offset2;
2618 /* Do not use anti_adjust_stack, since we don't want to update
2619 stack_pointer_delta. */
2620 temp = expand_binop (Pmode,
2621 #ifdef STACK_GROWS_DOWNWARD
2629 (GET_MODE_SIZE (GET_MODE (x)))),
2630 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2632 if (temp != stack_pointer_rtx)
2633 emit_move_insn (stack_pointer_rtx, temp);
2635 #ifdef STACK_GROWS_DOWNWARD
2637 offset2 = GET_MODE_SIZE (submode);
2639 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2640 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2641 + GET_MODE_SIZE (submode));
2644 emit_move_insn (change_address (x, submode,
2645 gen_rtx_PLUS (Pmode,
2647 GEN_INT (offset1))),
2648 gen_realpart (submode, y));
2649 emit_move_insn (change_address (x, submode,
2650 gen_rtx_PLUS (Pmode,
2652 GEN_INT (offset2))),
2653 gen_imagpart (submode, y));
2657 /* If this is a stack, push the highpart first, so it
2658 will be in the argument order.
2660 In that case, change_address is used only to convert
2661 the mode, not to change the address. */
2664 /* Note that the real part always precedes the imag part in memory
2665 regardless of machine's endianness. */
2666 #ifdef STACK_GROWS_DOWNWARD
2667 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2668 gen_imagpart (submode, y));
2669 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2670 gen_realpart (submode, y));
2672 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2673 gen_realpart (submode, y));
2674 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2675 gen_imagpart (submode, y));
2680 rtx realpart_x, realpart_y;
2681 rtx imagpart_x, imagpart_y;
2683 /* If this is a complex value with each part being smaller than a
2684 word, the usual calling sequence will likely pack the pieces into
2685 a single register. Unfortunately, SUBREG of hard registers only
2686 deals in terms of words, so we have a problem converting input
2687 arguments to the CONCAT of two registers that is used elsewhere
2688 for complex values. If this is before reload, we can copy it into
2689 memory and reload. FIXME, we should see about using extract and
2690 insert on integer registers, but complex short and complex char
2691 variables should be rarely used. */
2692 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2693 && (reload_in_progress | reload_completed) == 0)
2696 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2698 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2700 if (packed_dest_p || packed_src_p)
2702 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2703 ? MODE_FLOAT : MODE_INT);
2705 enum machine_mode reg_mode
2706 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2708 if (reg_mode != BLKmode)
2710 rtx mem = assign_stack_temp (reg_mode,
2711 GET_MODE_SIZE (mode), 0);
2712 rtx cmem = adjust_address (mem, mode, 0);
2716 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2718 emit_move_insn_1 (cmem, y);
2719 return emit_move_insn_1 (sreg, mem);
2723 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2725 emit_move_insn_1 (mem, sreg);
2726 return emit_move_insn_1 (x, cmem);
2732 realpart_x = gen_realpart (submode, x);
2733 realpart_y = gen_realpart (submode, y);
2734 imagpart_x = gen_imagpart (submode, x);
2735 imagpart_y = gen_imagpart (submode, y);
2737 /* Show the output dies here. This is necessary for SUBREGs
2738 of pseudos since we cannot track their lifetimes correctly;
2739 hard regs shouldn't appear here except as return values.
2740 We never want to emit such a clobber after reload. */
2742 && ! (reload_in_progress || reload_completed)
2743 && (GET_CODE (realpart_x) == SUBREG
2744 || GET_CODE (imagpart_x) == SUBREG))
2745 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2747 emit_move_insn (realpart_x, realpart_y);
2748 emit_move_insn (imagpart_x, imagpart_y);
2751 return get_last_insn ();
2754 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2755 find a mode to do it in. If we have a movcc, use it. Otherwise,
2756 find the MODE_INT mode of the same width. */
2757 else if (GET_MODE_CLASS (mode) == MODE_CC
2758 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2760 enum insn_code insn_code;
2761 enum machine_mode tmode = VOIDmode;
2765 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2768 for (tmode = QImode; tmode != VOIDmode;
2769 tmode = GET_MODE_WIDER_MODE (tmode))
2770 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2773 if (tmode == VOIDmode)
2776 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2777 may call change_address which is not appropriate if we were
2778 called when a reload was in progress. We don't have to worry
2779 about changing the address since the size in bytes is supposed to
2780 be the same. Copy the MEM to change the mode and move any
2781 substitutions from the old MEM to the new one. */
2783 if (reload_in_progress)
2785 x = gen_lowpart_common (tmode, x1);
2786 if (x == 0 && MEM_P (x1))
2788 x = adjust_address_nv (x1, tmode, 0);
2789 copy_replacements (x1, x);
2792 y = gen_lowpart_common (tmode, y1);
2793 if (y == 0 && MEM_P (y1))
2795 y = adjust_address_nv (y1, tmode, 0);
2796 copy_replacements (y1, y);
2801 x = gen_lowpart (tmode, x);
2802 y = gen_lowpart (tmode, y);
2805 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2806 return emit_insn (GEN_FCN (insn_code) (x, y));
2809 /* Try using a move pattern for the corresponding integer mode. This is
2810 only safe when simplify_subreg can convert MODE constants into integer
2811 constants. At present, it can only do this reliably if the value
2812 fits within a HOST_WIDE_INT. */
2813 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2814 && (submode = int_mode_for_mode (mode)) != BLKmode
2815 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2816 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2817 (simplify_gen_subreg (submode, x, mode, 0),
2818 simplify_gen_subreg (submode, y, mode, 0)));
2820 /* This will handle any multi-word or full-word mode that lacks a move_insn
2821 pattern. However, you will get better code if you define such patterns,
2822 even if they must turn into multiple assembler instructions. */
2823 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
2830 #ifdef PUSH_ROUNDING
2832 /* If X is a push on the stack, do the push now and replace
2833 X with a reference to the stack pointer. */
2834 if (push_operand (x, GET_MODE (x)))
2839 /* Do not use anti_adjust_stack, since we don't want to update
2840 stack_pointer_delta. */
2841 temp = expand_binop (Pmode,
2842 #ifdef STACK_GROWS_DOWNWARD
2850 (GET_MODE_SIZE (GET_MODE (x)))),
2851 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2853 if (temp != stack_pointer_rtx)
2854 emit_move_insn (stack_pointer_rtx, temp);
2856 code = GET_CODE (XEXP (x, 0));
2858 /* Just hope that small offsets off SP are OK. */
2859 if (code == POST_INC)
2860 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2861 GEN_INT (-((HOST_WIDE_INT)
2862 GET_MODE_SIZE (GET_MODE (x)))));
2863 else if (code == POST_DEC)
2864 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2865 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2867 temp = stack_pointer_rtx;
2869 x = change_address (x, VOIDmode, temp);
2873 /* If we are in reload, see if either operand is a MEM whose address
2874 is scheduled for replacement. */
2875 if (reload_in_progress && MEM_P (x)
2876 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2877 x = replace_equiv_address_nv (x, inner);
2878 if (reload_in_progress && MEM_P (y)
2879 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2880 y = replace_equiv_address_nv (y, inner);
2886 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2889 rtx xpart = operand_subword (x, i, 1, mode);
2890 rtx ypart = operand_subword (y, i, 1, mode);
2892 /* If we can't get a part of Y, put Y into memory if it is a
2893 constant. Otherwise, force it into a register. If we still
2894 can't get a part of Y, abort. */
2895 if (ypart == 0 && CONSTANT_P (y))
2897 y = force_const_mem (mode, y);
2898 ypart = operand_subword (y, i, 1, mode);
2900 else if (ypart == 0)
2901 ypart = operand_subword_force (y, i, mode);
2903 if (xpart == 0 || ypart == 0)
2906 need_clobber |= (GET_CODE (xpart) == SUBREG);
2908 last_insn = emit_move_insn (xpart, ypart);
2914 /* Show the output dies here. This is necessary for SUBREGs
2915 of pseudos since we cannot track their lifetimes correctly;
2916 hard regs shouldn't appear here except as return values.
2917 We never want to emit such a clobber after reload. */
2919 && ! (reload_in_progress || reload_completed)
2920 && need_clobber != 0)
2921 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2931 /* If Y is representable exactly in a narrower mode, and the target can
2932 perform the extension directly from constant or memory, then emit the
2933 move as an extension. */
2936 compress_float_constant (rtx x, rtx y)
2938 enum machine_mode dstmode = GET_MODE (x);
2939 enum machine_mode orig_srcmode = GET_MODE (y);
2940 enum machine_mode srcmode;
2943 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2945 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2946 srcmode != orig_srcmode;
2947 srcmode = GET_MODE_WIDER_MODE (srcmode))
2950 rtx trunc_y, last_insn;
2952 /* Skip if the target can't extend this way. */
2953 ic = can_extend_p (dstmode, srcmode, 0);
2954 if (ic == CODE_FOR_nothing)
2957 /* Skip if the narrowed value isn't exact. */
2958 if (! exact_real_truncate (srcmode, &r))
2961 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2963 if (LEGITIMATE_CONSTANT_P (trunc_y))
2965 /* Skip if the target needs extra instructions to perform
2967 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2970 else if (float_extend_from_mem[dstmode][srcmode])
2971 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2975 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2976 last_insn = get_last_insn ();
2979 set_unique_reg_note (last_insn, REG_EQUAL, y);
2987 /* Pushing data onto the stack. */
2989 /* Push a block of length SIZE (perhaps variable)
2990 and return an rtx to address the beginning of the block.
2991 The value may be virtual_outgoing_args_rtx.
2993 EXTRA is the number of bytes of padding to push in addition to SIZE.
2994 BELOW nonzero means this padding comes at low addresses;
2995 otherwise, the padding comes at high addresses. */
2998 push_block (rtx size, int extra, int below)
3002 size = convert_modes (Pmode, ptr_mode, size, 1);
3003 if (CONSTANT_P (size))
3004 anti_adjust_stack (plus_constant (size, extra));
3005 else if (REG_P (size) && extra == 0)
3006 anti_adjust_stack (size);
3009 temp = copy_to_mode_reg (Pmode, size);
3011 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3012 temp, 0, OPTAB_LIB_WIDEN);
3013 anti_adjust_stack (temp);
3016 #ifndef STACK_GROWS_DOWNWARD
3022 temp = virtual_outgoing_args_rtx;
3023 if (extra != 0 && below)
3024 temp = plus_constant (temp, extra);
3028 if (GET_CODE (size) == CONST_INT)
3029 temp = plus_constant (virtual_outgoing_args_rtx,
3030 -INTVAL (size) - (below ? 0 : extra));
3031 else if (extra != 0 && !below)
3032 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3033 negate_rtx (Pmode, plus_constant (size, extra)));
3035 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3036 negate_rtx (Pmode, size));
3039 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3042 #ifdef PUSH_ROUNDING
3044 /* Emit single push insn. */
3047 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3050 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3052 enum insn_code icode;
3053 insn_operand_predicate_fn pred;
3055 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3056 /* If there is push pattern, use it. Otherwise try old way of throwing
3057 MEM representing push operation to move expander. */
3058 icode = push_optab->handlers[(int) mode].insn_code;
3059 if (icode != CODE_FOR_nothing)
3061 if (((pred = insn_data[(int) icode].operand[0].predicate)
3062 && !((*pred) (x, mode))))
3063 x = force_reg (mode, x);
3064 emit_insn (GEN_FCN (icode) (x));
3067 if (GET_MODE_SIZE (mode) == rounded_size)
3068 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3069 /* If we are to pad downward, adjust the stack pointer first and
3070 then store X into the stack location using an offset. This is
3071 because emit_move_insn does not know how to pad; it does not have
3073 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3075 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3076 HOST_WIDE_INT offset;
3078 emit_move_insn (stack_pointer_rtx,
3079 expand_binop (Pmode,
3080 #ifdef STACK_GROWS_DOWNWARD
3086 GEN_INT (rounded_size),
3087 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3089 offset = (HOST_WIDE_INT) padding_size;
3090 #ifdef STACK_GROWS_DOWNWARD
3091 if (STACK_PUSH_CODE == POST_DEC)
3092 /* We have already decremented the stack pointer, so get the
3094 offset += (HOST_WIDE_INT) rounded_size;
3096 if (STACK_PUSH_CODE == POST_INC)
3097 /* We have already incremented the stack pointer, so get the
3099 offset -= (HOST_WIDE_INT) rounded_size;
3101 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3105 #ifdef STACK_GROWS_DOWNWARD
3106 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3107 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3108 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3110 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3111 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3112 GEN_INT (rounded_size));
3114 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3117 dest = gen_rtx_MEM (mode, dest_addr);
3121 set_mem_attributes (dest, type, 1);
3123 if (flag_optimize_sibling_calls)
3124 /* Function incoming arguments may overlap with sibling call
3125 outgoing arguments and we cannot allow reordering of reads
3126 from function arguments with stores to outgoing arguments
3127 of sibling calls. */
3128 set_mem_alias_set (dest, 0);
3130 emit_move_insn (dest, x);
3134 /* Generate code to push X onto the stack, assuming it has mode MODE and
3136 MODE is redundant except when X is a CONST_INT (since they don't
3138 SIZE is an rtx for the size of data to be copied (in bytes),
3139 needed only if X is BLKmode.
3141 ALIGN (in bits) is maximum alignment we can assume.
3143 If PARTIAL and REG are both nonzero, then copy that many of the first
3144 words of X into registers starting with REG, and push the rest of X.
3145 The amount of space pushed is decreased by PARTIAL words,
3146 rounded *down* to a multiple of PARM_BOUNDARY.
3147 REG must be a hard register in this case.
3148 If REG is zero but PARTIAL is not, take any all others actions for an
3149 argument partially in registers, but do not actually load any
3152 EXTRA is the amount in bytes of extra space to leave next to this arg.
3153 This is ignored if an argument block has already been allocated.
3155 On a machine that lacks real push insns, ARGS_ADDR is the address of
3156 the bottom of the argument block for this call. We use indexing off there
3157 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3158 argument block has not been preallocated.
3160 ARGS_SO_FAR is the size of args previously pushed for this call.
3162 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3163 for arguments passed in registers. If nonzero, it will be the number
3164 of bytes required. */
3167 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3168 unsigned int align, int partial, rtx reg, int extra,
3169 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3173 enum direction stack_direction
3174 #ifdef STACK_GROWS_DOWNWARD
3180 /* Decide where to pad the argument: `downward' for below,
3181 `upward' for above, or `none' for don't pad it.
3182 Default is below for small data on big-endian machines; else above. */
3183 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3185 /* Invert direction if stack is post-decrement.
3187 if (STACK_PUSH_CODE == POST_DEC)
3188 if (where_pad != none)
3189 where_pad = (where_pad == downward ? upward : downward);
3193 if (mode == BLKmode)
3195 /* Copy a block into the stack, entirely or partially. */
3198 int used = partial * UNITS_PER_WORD;
3202 if (reg && GET_CODE (reg) == PARALLEL)
3204 /* Use the size of the elt to compute offset. */
3205 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3206 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3207 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3210 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3217 /* USED is now the # of bytes we need not copy to the stack
3218 because registers will take care of them. */
3221 xinner = adjust_address (xinner, BLKmode, used);
3223 /* If the partial register-part of the arg counts in its stack size,
3224 skip the part of stack space corresponding to the registers.
3225 Otherwise, start copying to the beginning of the stack space,
3226 by setting SKIP to 0. */
3227 skip = (reg_parm_stack_space == 0) ? 0 : used;
3229 #ifdef PUSH_ROUNDING
3230 /* Do it with several push insns if that doesn't take lots of insns
3231 and if there is no difficulty with push insns that skip bytes
3232 on the stack for alignment purposes. */
3235 && GET_CODE (size) == CONST_INT
3237 && MEM_ALIGN (xinner) >= align
3238 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3239 /* Here we avoid the case of a structure whose weak alignment
3240 forces many pushes of a small amount of data,
3241 and such small pushes do rounding that causes trouble. */
3242 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3243 || align >= BIGGEST_ALIGNMENT
3244 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3245 == (align / BITS_PER_UNIT)))
3246 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3248 /* Push padding now if padding above and stack grows down,
3249 or if padding below and stack grows up.
3250 But if space already allocated, this has already been done. */
3251 if (extra && args_addr == 0
3252 && where_pad != none && where_pad != stack_direction)
3253 anti_adjust_stack (GEN_INT (extra));
3255 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3258 #endif /* PUSH_ROUNDING */
3262 /* Otherwise make space on the stack and copy the data
3263 to the address of that space. */
3265 /* Deduct words put into registers from the size we must copy. */
3268 if (GET_CODE (size) == CONST_INT)
3269 size = GEN_INT (INTVAL (size) - used);
3271 size = expand_binop (GET_MODE (size), sub_optab, size,
3272 GEN_INT (used), NULL_RTX, 0,
3276 /* Get the address of the stack space.
3277 In this case, we do not deal with EXTRA separately.
3278 A single stack adjust will do. */
3281 temp = push_block (size, extra, where_pad == downward);
3284 else if (GET_CODE (args_so_far) == CONST_INT)
3285 temp = memory_address (BLKmode,
3286 plus_constant (args_addr,
3287 skip + INTVAL (args_so_far)));
3289 temp = memory_address (BLKmode,
3290 plus_constant (gen_rtx_PLUS (Pmode,
3295 if (!ACCUMULATE_OUTGOING_ARGS)
3297 /* If the source is referenced relative to the stack pointer,
3298 copy it to another register to stabilize it. We do not need
3299 to do this if we know that we won't be changing sp. */
3301 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3302 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3303 temp = copy_to_reg (temp);
3306 target = gen_rtx_MEM (BLKmode, temp);
3308 /* We do *not* set_mem_attributes here, because incoming arguments
3309 may overlap with sibling call outgoing arguments and we cannot
3310 allow reordering of reads from function arguments with stores
3311 to outgoing arguments of sibling calls. We do, however, want
3312 to record the alignment of the stack slot. */
3313 /* ALIGN may well be better aligned than TYPE, e.g. due to
3314 PARM_BOUNDARY. Assume the caller isn't lying. */
3315 set_mem_align (target, align);
3317 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3320 else if (partial > 0)
3322 /* Scalar partly in registers. */
3324 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3327 /* # words of start of argument
3328 that we must make space for but need not store. */
3329 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3330 int args_offset = INTVAL (args_so_far);
3333 /* Push padding now if padding above and stack grows down,
3334 or if padding below and stack grows up.
3335 But if space already allocated, this has already been done. */
3336 if (extra && args_addr == 0
3337 && where_pad != none && where_pad != stack_direction)
3338 anti_adjust_stack (GEN_INT (extra));
3340 /* If we make space by pushing it, we might as well push
3341 the real data. Otherwise, we can leave OFFSET nonzero
3342 and leave the space uninitialized. */
3346 /* Now NOT_STACK gets the number of words that we don't need to
3347 allocate on the stack. */
3348 not_stack = partial - offset;
3350 /* If the partial register-part of the arg counts in its stack size,
3351 skip the part of stack space corresponding to the registers.
3352 Otherwise, start copying to the beginning of the stack space,
3353 by setting SKIP to 0. */
3354 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3356 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3357 x = validize_mem (force_const_mem (mode, x));
3359 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3360 SUBREGs of such registers are not allowed. */
3361 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3362 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3363 x = copy_to_reg (x);
3365 /* Loop over all the words allocated on the stack for this arg. */
3366 /* We can do it by words, because any scalar bigger than a word
3367 has a size a multiple of a word. */
3368 #ifndef PUSH_ARGS_REVERSED
3369 for (i = not_stack; i < size; i++)
3371 for (i = size - 1; i >= not_stack; i--)
3373 if (i >= not_stack + offset)
3374 emit_push_insn (operand_subword_force (x, i, mode),
3375 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3377 GEN_INT (args_offset + ((i - not_stack + skip)
3379 reg_parm_stack_space, alignment_pad);
3386 /* Push padding now if padding above and stack grows down,
3387 or if padding below and stack grows up.
3388 But if space already allocated, this has already been done. */
3389 if (extra && args_addr == 0
3390 && where_pad != none && where_pad != stack_direction)
3391 anti_adjust_stack (GEN_INT (extra));
3393 #ifdef PUSH_ROUNDING
3394 if (args_addr == 0 && PUSH_ARGS)
3395 emit_single_push_insn (mode, x, type);
3399 if (GET_CODE (args_so_far) == CONST_INT)
3401 = memory_address (mode,
3402 plus_constant (args_addr,
3403 INTVAL (args_so_far)));
3405 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3407 dest = gen_rtx_MEM (mode, addr);
3409 /* We do *not* set_mem_attributes here, because incoming arguments
3410 may overlap with sibling call outgoing arguments and we cannot
3411 allow reordering of reads from function arguments with stores
3412 to outgoing arguments of sibling calls. We do, however, want
3413 to record the alignment of the stack slot. */
3414 /* ALIGN may well be better aligned than TYPE, e.g. due to
3415 PARM_BOUNDARY. Assume the caller isn't lying. */
3416 set_mem_align (dest, align);
3418 emit_move_insn (dest, x);
3422 /* If part should go in registers, copy that part
3423 into the appropriate registers. Do this now, at the end,
3424 since mem-to-mem copies above may do function calls. */
3425 if (partial > 0 && reg != 0)
3427 /* Handle calls that pass values in multiple non-contiguous locations.
3428 The Irix 6 ABI has examples of this. */
3429 if (GET_CODE (reg) == PARALLEL)
3430 emit_group_load (reg, x, type, -1);
3432 move_block_to_reg (REGNO (reg), x, partial, mode);
3435 if (extra && args_addr == 0 && where_pad == stack_direction)
3436 anti_adjust_stack (GEN_INT (extra));
3438 if (alignment_pad && args_addr == 0)
3439 anti_adjust_stack (alignment_pad);
3442 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3446 get_subtarget (rtx x)
3449 /* Only registers can be subtargets. */
3451 /* If the register is readonly, it can't be set more than once. */
3452 || RTX_UNCHANGING_P (x)
3453 /* Don't use hard regs to avoid extending their life. */
3454 || REGNO (x) < FIRST_PSEUDO_REGISTER
3455 /* Avoid subtargets inside loops,
3456 since they hide some invariant expressions. */
3457 || preserve_subexpressions_p ())
3461 /* Expand an assignment that stores the value of FROM into TO.
3462 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3463 (If the value is constant, this rtx is a constant.)
3464 Otherwise, the returned value is NULL_RTX. */
3467 expand_assignment (tree to, tree from, int want_value)
3472 /* Don't crash if the lhs of the assignment was erroneous. */
3474 if (TREE_CODE (to) == ERROR_MARK)
3476 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3477 return want_value ? result : NULL_RTX;
3480 /* Assignment of a structure component needs special treatment
3481 if the structure component's rtx is not simply a MEM.
3482 Assignment of an array element at a constant index, and assignment of
3483 an array element in an unaligned packed structure field, has the same
3486 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3487 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3488 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3490 enum machine_mode mode1;
3491 HOST_WIDE_INT bitsize, bitpos;
3499 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3500 &unsignedp, &volatilep);
3502 /* If we are going to use store_bit_field and extract_bit_field,
3503 make sure to_rtx will be safe for multiple use. */
3505 if (mode1 == VOIDmode && want_value)
3506 tem = stabilize_reference (tem);
3508 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3512 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3514 if (!MEM_P (to_rtx))
3517 #ifdef POINTERS_EXTEND_UNSIGNED
3518 if (GET_MODE (offset_rtx) != Pmode)
3519 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3521 if (GET_MODE (offset_rtx) != ptr_mode)
3522 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3525 /* A constant address in TO_RTX can have VOIDmode, we must not try
3526 to call force_reg for that case. Avoid that case. */
3528 && GET_MODE (to_rtx) == BLKmode
3529 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3531 && (bitpos % bitsize) == 0
3532 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3533 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3535 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3539 to_rtx = offset_address (to_rtx, offset_rtx,
3540 highest_pow2_factor_for_target (to,
3546 /* If the field is at offset zero, we could have been given the
3547 DECL_RTX of the parent struct. Don't munge it. */
3548 to_rtx = shallow_copy_rtx (to_rtx);
3550 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3553 /* Deal with volatile and readonly fields. The former is only done
3554 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3555 if (volatilep && MEM_P (to_rtx))
3557 if (to_rtx == orig_to_rtx)
3558 to_rtx = copy_rtx (to_rtx);
3559 MEM_VOLATILE_P (to_rtx) = 1;
3562 if (TREE_CODE (to) == COMPONENT_REF
3563 && TREE_READONLY (TREE_OPERAND (to, 1))
3564 /* We can't assert that a MEM won't be set more than once
3565 if the component is not addressable because another
3566 non-addressable component may be referenced by the same MEM. */
3567 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3569 if (to_rtx == orig_to_rtx)
3570 to_rtx = copy_rtx (to_rtx);
3571 RTX_UNCHANGING_P (to_rtx) = 1;
3574 if (MEM_P (to_rtx) && ! can_address_p (to))
3576 if (to_rtx == orig_to_rtx)
3577 to_rtx = copy_rtx (to_rtx);
3578 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3581 /* Optimize bitfld op= val in certain cases. */
3582 while (mode1 == VOIDmode && !want_value
3583 && bitsize > 0 && bitsize < BITS_PER_WORD
3584 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3585 && !TREE_SIDE_EFFECTS (to)
3586 && !TREE_THIS_VOLATILE (to))
3589 rtx value, str_rtx = to_rtx;
3590 HOST_WIDE_INT bitpos1 = bitpos;
3595 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3596 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3599 op0 = TREE_OPERAND (src, 0);
3600 op1 = TREE_OPERAND (src, 1);
3603 if (! operand_equal_p (to, op0, 0))
3606 if (MEM_P (str_rtx))
3608 enum machine_mode mode = GET_MODE (str_rtx);
3609 HOST_WIDE_INT offset1;
3611 if (GET_MODE_BITSIZE (mode) == 0
3612 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3614 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3616 if (mode == VOIDmode)
3620 bitpos1 %= GET_MODE_BITSIZE (mode);
3621 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3622 str_rtx = adjust_address (str_rtx, mode, offset1);
3624 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3627 /* If the bit field covers the whole REG/MEM, store_field
3628 will likely generate better code. */
3629 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3632 /* We can't handle fields split accross multiple entities. */
3633 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3636 if (BYTES_BIG_ENDIAN)
3637 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3640 /* Special case some bitfield op= exp. */
3641 switch (TREE_CODE (src))
3645 /* For now, just optimize the case of the topmost bitfield
3646 where we don't need to do any masking and also
3647 1 bit bitfields where xor can be used.
3648 We might win by one instruction for the other bitfields
3649 too if insv/extv instructions aren't used, so that
3650 can be added later. */
3651 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3652 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3654 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3655 value = convert_modes (GET_MODE (str_rtx),
3656 TYPE_MODE (TREE_TYPE (op1)), value,
3657 TYPE_UNSIGNED (TREE_TYPE (op1)));
3659 /* We may be accessing data outside the field, which means
3660 we can alias adjacent data. */
3661 if (MEM_P (str_rtx))
3663 str_rtx = shallow_copy_rtx (str_rtx);
3664 set_mem_alias_set (str_rtx, 0);
3665 set_mem_expr (str_rtx, 0);
3668 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3670 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3672 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3676 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3677 build_int_cst (NULL_TREE,bitpos1, 0),
3679 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3680 value, str_rtx, 1, OPTAB_WIDEN);
3681 if (result != str_rtx)
3682 emit_move_insn (str_rtx, result);
3694 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3696 /* Spurious cast for HPUX compiler. */
3697 ? ((enum machine_mode)
3698 TYPE_MODE (TREE_TYPE (to)))
3700 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3702 preserve_temp_slots (result);
3706 /* If the value is meaningful, convert RESULT to the proper mode.
3707 Otherwise, return nothing. */
3708 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3709 TYPE_MODE (TREE_TYPE (from)),
3711 TYPE_UNSIGNED (TREE_TYPE (to)))
3715 /* If the rhs is a function call and its value is not an aggregate,
3716 call the function before we start to compute the lhs.
3717 This is needed for correct code for cases such as
3718 val = setjmp (buf) on machines where reference to val
3719 requires loading up part of an address in a separate insn.
3721 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3722 since it might be a promoted variable where the zero- or sign- extension
3723 needs to be done. Handling this in the normal way is safe because no
3724 computation is done before the call. */
3725 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3726 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3727 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3728 && REG_P (DECL_RTL (to))))
3733 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3737 /* Handle calls that return values in multiple non-contiguous locations.
3738 The Irix 6 ABI has examples of this. */
3739 if (GET_CODE (to_rtx) == PARALLEL)
3740 emit_group_load (to_rtx, value, TREE_TYPE (from),
3741 int_size_in_bytes (TREE_TYPE (from)));
3742 else if (GET_MODE (to_rtx) == BLKmode)
3743 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3746 if (POINTER_TYPE_P (TREE_TYPE (to)))
3747 value = convert_memory_address (GET_MODE (to_rtx), value);
3748 emit_move_insn (to_rtx, value);
3750 preserve_temp_slots (to_rtx);
3753 return want_value ? to_rtx : NULL_RTX;
3756 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3757 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3760 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3762 /* Don't move directly into a return register. */
3763 if (TREE_CODE (to) == RESULT_DECL
3764 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3769 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3771 if (GET_CODE (to_rtx) == PARALLEL)
3772 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3773 int_size_in_bytes (TREE_TYPE (from)));
3775 emit_move_insn (to_rtx, temp);
3777 preserve_temp_slots (to_rtx);
3780 return want_value ? to_rtx : NULL_RTX;
3783 /* In case we are returning the contents of an object which overlaps
3784 the place the value is being stored, use a safe function when copying
3785 a value through a pointer into a structure value return block. */
3786 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3787 && current_function_returns_struct
3788 && !current_function_returns_pcc_struct)
3793 size = expr_size (from);
3794 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3796 emit_library_call (memmove_libfunc, LCT_NORMAL,
3797 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3798 XEXP (from_rtx, 0), Pmode,
3799 convert_to_mode (TYPE_MODE (sizetype),
3800 size, TYPE_UNSIGNED (sizetype)),
3801 TYPE_MODE (sizetype));
3803 preserve_temp_slots (to_rtx);
3806 return want_value ? to_rtx : NULL_RTX;
3809 /* Compute FROM and store the value in the rtx we got. */
3812 result = store_expr (from, to_rtx, want_value);
3813 preserve_temp_slots (result);
3816 return want_value ? result : NULL_RTX;
3819 /* Generate code for computing expression EXP,
3820 and storing the value into TARGET.
3822 If WANT_VALUE & 1 is nonzero, return a copy of the value
3823 not in TARGET, so that we can be sure to use the proper
3824 value in a containing expression even if TARGET has something
3825 else stored in it. If possible, we copy the value through a pseudo
3826 and return that pseudo. Or, if the value is constant, we try to
3827 return the constant. In some cases, we return a pseudo
3828 copied *from* TARGET.
3830 If the mode is BLKmode then we may return TARGET itself.
3831 It turns out that in BLKmode it doesn't cause a problem.
3832 because C has no operators that could combine two different
3833 assignments into the same BLKmode object with different values
3834 with no sequence point. Will other languages need this to
3837 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3838 to catch quickly any cases where the caller uses the value
3839 and fails to set WANT_VALUE.
3841 If WANT_VALUE & 2 is set, this is a store into a call param on the
3842 stack, and block moves may need to be treated specially. */
3845 store_expr (tree exp, rtx target, int want_value)
3848 rtx alt_rtl = NULL_RTX;
3849 int dont_return_target = 0;
3850 int dont_store_target = 0;
3852 if (VOID_TYPE_P (TREE_TYPE (exp)))
3854 /* C++ can generate ?: expressions with a throw expression in one
3855 branch and an rvalue in the other. Here, we resolve attempts to
3856 store the throw expression's nonexistent result. */
3859 expand_expr (exp, const0_rtx, VOIDmode, 0);
3862 if (TREE_CODE (exp) == COMPOUND_EXPR)
3864 /* Perform first part of compound expression, then assign from second
3866 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3867 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3868 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
3870 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3872 /* For conditional expression, get safe form of the target. Then
3873 test the condition, doing the appropriate assignment on either
3874 side. This avoids the creation of unnecessary temporaries.
3875 For non-BLKmode, it is more efficient not to do this. */
3877 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3879 do_pending_stack_adjust ();
3881 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3882 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
3883 emit_jump_insn (gen_jump (lab2));
3886 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
3890 return want_value & 1 ? target : NULL_RTX;
3892 else if ((want_value & 1) != 0
3894 && ! MEM_VOLATILE_P (target)
3895 && GET_MODE (target) != BLKmode)
3896 /* If target is in memory and caller wants value in a register instead,
3897 arrange that. Pass TARGET as target for expand_expr so that,
3898 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3899 We know expand_expr will not use the target in that case.
3900 Don't do this if TARGET is volatile because we are supposed
3901 to write it and then read it. */
3903 temp = expand_expr (exp, target, GET_MODE (target),
3904 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3905 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3907 /* If TEMP is already in the desired TARGET, only copy it from
3908 memory and don't store it there again. */
3910 || (rtx_equal_p (temp, target)
3911 && ! side_effects_p (temp) && ! side_effects_p (target)))
3912 dont_store_target = 1;
3913 temp = copy_to_reg (temp);
3915 dont_return_target = 1;
3917 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3918 /* If this is a scalar in a register that is stored in a wider mode
3919 than the declared mode, compute the result into its declared mode
3920 and then convert to the wider mode. Our value is the computed
3923 rtx inner_target = 0;
3925 /* If we don't want a value, we can do the conversion inside EXP,
3926 which will often result in some optimizations. Do the conversion
3927 in two steps: first change the signedness, if needed, then
3928 the extend. But don't do this if the type of EXP is a subtype
3929 of something else since then the conversion might involve
3930 more than just converting modes. */
3931 if ((want_value & 1) == 0
3932 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3933 && TREE_TYPE (TREE_TYPE (exp)) == 0
3934 && (!lang_hooks.reduce_bit_field_operations
3935 || (GET_MODE_PRECISION (GET_MODE (target))
3936 == TYPE_PRECISION (TREE_TYPE (exp)))))
3938 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3939 != SUBREG_PROMOTED_UNSIGNED_P (target))
3941 (lang_hooks.types.signed_or_unsigned_type
3942 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3944 exp = convert (lang_hooks.types.type_for_mode
3945 (GET_MODE (SUBREG_REG (target)),
3946 SUBREG_PROMOTED_UNSIGNED_P (target)),
3949 inner_target = SUBREG_REG (target);
3952 temp = expand_expr (exp, inner_target, VOIDmode,
3953 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3955 /* If TEMP is a MEM and we want a result value, make the access
3956 now so it gets done only once. Strictly speaking, this is
3957 only necessary if the MEM is volatile, or if the address
3958 overlaps TARGET. But not performing the load twice also
3959 reduces the amount of rtl we generate and then have to CSE. */
3960 if (MEM_P (temp) && (want_value & 1) != 0)
3961 temp = copy_to_reg (temp);
3963 /* If TEMP is a VOIDmode constant, use convert_modes to make
3964 sure that we properly convert it. */
3965 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3967 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3968 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3969 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3970 GET_MODE (target), temp,
3971 SUBREG_PROMOTED_UNSIGNED_P (target));
3974 convert_move (SUBREG_REG (target), temp,
3975 SUBREG_PROMOTED_UNSIGNED_P (target));
3977 /* If we promoted a constant, change the mode back down to match
3978 target. Otherwise, the caller might get confused by a result whose
3979 mode is larger than expected. */
3981 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3983 if (GET_MODE (temp) != VOIDmode)
3985 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
3986 SUBREG_PROMOTED_VAR_P (temp) = 1;
3987 SUBREG_PROMOTED_UNSIGNED_SET (temp,
3988 SUBREG_PROMOTED_UNSIGNED_P (target));
3991 temp = convert_modes (GET_MODE (target),
3992 GET_MODE (SUBREG_REG (target)),
3993 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3996 return want_value & 1 ? temp : NULL_RTX;
4000 temp = expand_expr_real (exp, target, GET_MODE (target),
4002 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4004 /* Return TARGET if it's a specified hardware register.
4005 If TARGET is a volatile mem ref, either return TARGET
4006 or return a reg copied *from* TARGET; ANSI requires this.
4008 Otherwise, if TEMP is not TARGET, return TEMP
4009 if it is constant (for efficiency),
4010 or if we really want the correct value. */
4011 if (!(target && REG_P (target)
4012 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4013 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4014 && ! rtx_equal_p (temp, target)
4015 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4016 dont_return_target = 1;
4019 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4020 the same as that of TARGET, adjust the constant. This is needed, for
4021 example, in case it is a CONST_DOUBLE and we want only a word-sized
4023 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4024 && TREE_CODE (exp) != ERROR_MARK
4025 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4026 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4027 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4029 /* If value was not generated in the target, store it there.
4030 Convert the value to TARGET's type first if necessary and emit the
4031 pending incrementations that have been queued when expanding EXP.
4032 Note that we cannot emit the whole queue blindly because this will
4033 effectively disable the POST_INC optimization later.
4035 If TEMP and TARGET compare equal according to rtx_equal_p, but
4036 one or both of them are volatile memory refs, we have to distinguish
4038 - expand_expr has used TARGET. In this case, we must not generate
4039 another copy. This can be detected by TARGET being equal according
4041 - expand_expr has not used TARGET - that means that the source just
4042 happens to have the same RTX form. Since temp will have been created
4043 by expand_expr, it will compare unequal according to == .
4044 We must generate a copy in this case, to reach the correct number
4045 of volatile memory references. */
4047 if ((! rtx_equal_p (temp, target)
4048 || (temp != target && (side_effects_p (temp)
4049 || side_effects_p (target))))
4050 && TREE_CODE (exp) != ERROR_MARK
4051 && ! dont_store_target
4052 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4053 but TARGET is not valid memory reference, TEMP will differ
4054 from TARGET although it is really the same location. */
4055 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4056 /* If there's nothing to copy, don't bother. Don't call expr_size
4057 unless necessary, because some front-ends (C++) expr_size-hook
4058 aborts on objects that are not supposed to be bit-copied or
4060 && expr_size (exp) != const0_rtx)
4062 if (GET_MODE (temp) != GET_MODE (target)
4063 && GET_MODE (temp) != VOIDmode)
4065 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4066 if (dont_return_target)
4068 /* In this case, we will return TEMP,
4069 so make sure it has the proper mode.
4070 But don't forget to store the value into TARGET. */
4071 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4072 emit_move_insn (target, temp);
4075 convert_move (target, temp, unsignedp);
4078 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4080 /* Handle copying a string constant into an array. The string
4081 constant may be shorter than the array. So copy just the string's
4082 actual length, and clear the rest. First get the size of the data
4083 type of the string, which is actually the size of the target. */
4084 rtx size = expr_size (exp);
4086 if (GET_CODE (size) == CONST_INT
4087 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4088 emit_block_move (target, temp, size,
4090 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4093 /* Compute the size of the data to copy from the string. */
4095 = size_binop (MIN_EXPR,
4096 make_tree (sizetype, size),
4097 size_int (TREE_STRING_LENGTH (exp)));
4099 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4104 /* Copy that much. */
4105 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4106 TYPE_UNSIGNED (sizetype));
4107 emit_block_move (target, temp, copy_size_rtx,
4109 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4111 /* Figure out how much is left in TARGET that we have to clear.
4112 Do all calculations in ptr_mode. */
4113 if (GET_CODE (copy_size_rtx) == CONST_INT)
4115 size = plus_constant (size, -INTVAL (copy_size_rtx));
4116 target = adjust_address (target, BLKmode,
4117 INTVAL (copy_size_rtx));
4121 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4122 copy_size_rtx, NULL_RTX, 0,
4125 #ifdef POINTERS_EXTEND_UNSIGNED
4126 if (GET_MODE (copy_size_rtx) != Pmode)
4127 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4128 TYPE_UNSIGNED (sizetype));
4131 target = offset_address (target, copy_size_rtx,
4132 highest_pow2_factor (copy_size));
4133 label = gen_label_rtx ();
4134 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4135 GET_MODE (size), 0, label);
4138 if (size != const0_rtx)
4139 clear_storage (target, size);
4145 /* Handle calls that return values in multiple non-contiguous locations.
4146 The Irix 6 ABI has examples of this. */
4147 else if (GET_CODE (target) == PARALLEL)
4148 emit_group_load (target, temp, TREE_TYPE (exp),
4149 int_size_in_bytes (TREE_TYPE (exp)));
4150 else if (GET_MODE (temp) == BLKmode)
4151 emit_block_move (target, temp, expr_size (exp),
4153 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4156 temp = force_operand (temp, target);
4158 emit_move_insn (target, temp);
4162 /* If we don't want a value, return NULL_RTX. */
4163 if ((want_value & 1) == 0)
4166 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4167 ??? The latter test doesn't seem to make sense. */
4168 else if (dont_return_target && !MEM_P (temp))
4171 /* Return TARGET itself if it is a hard register. */
4172 else if ((want_value & 1) != 0
4173 && GET_MODE (target) != BLKmode
4174 && ! (REG_P (target)
4175 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4176 return copy_to_reg (target);
4182 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4183 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4184 are set to non-constant values and place it in *P_NC_ELTS. */
4187 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4188 HOST_WIDE_INT *p_nc_elts)
4190 HOST_WIDE_INT nz_elts, nc_elts;
4196 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4198 tree value = TREE_VALUE (list);
4199 tree purpose = TREE_PURPOSE (list);
4203 if (TREE_CODE (purpose) == RANGE_EXPR)
4205 tree lo_index = TREE_OPERAND (purpose, 0);
4206 tree hi_index = TREE_OPERAND (purpose, 1);
4208 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4209 mult = (tree_low_cst (hi_index, 1)
4210 - tree_low_cst (lo_index, 1) + 1);
4213 switch (TREE_CODE (value))
4217 HOST_WIDE_INT nz = 0, nc = 0;
4218 categorize_ctor_elements_1 (value, &nz, &nc);
4219 nz_elts += mult * nz;
4220 nc_elts += mult * nc;
4226 if (!initializer_zerop (value))
4230 if (!initializer_zerop (TREE_REALPART (value)))
4232 if (!initializer_zerop (TREE_IMAGPART (value)))
4238 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4239 if (!initializer_zerop (TREE_VALUE (v)))
4246 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4252 *p_nz_elts += nz_elts;
4253 *p_nc_elts += nc_elts;
4257 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4258 HOST_WIDE_INT *p_nc_elts)
4262 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4265 /* Count the number of scalars in TYPE. Return -1 on overflow or
4269 count_type_elements (tree type)
4271 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4272 switch (TREE_CODE (type))
4276 tree telts = array_type_nelts (type);
4277 if (telts && host_integerp (telts, 1))
4279 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4280 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4283 else if (max / n > m)
4291 HOST_WIDE_INT n = 0, t;
4294 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4295 if (TREE_CODE (f) == FIELD_DECL)
4297 t = count_type_elements (TREE_TYPE (f));
4307 case QUAL_UNION_TYPE:
4309 /* Ho hum. How in the world do we guess here? Clearly it isn't
4310 right to count the fields. Guess based on the number of words. */
4311 HOST_WIDE_INT n = int_size_in_bytes (type);
4314 return n / UNITS_PER_WORD;
4321 return TYPE_VECTOR_SUBPARTS (type);
4330 case REFERENCE_TYPE:
4344 /* Return 1 if EXP contains mostly (3/4) zeros. */
4347 mostly_zeros_p (tree exp)
4349 if (TREE_CODE (exp) == CONSTRUCTOR)
4352 HOST_WIDE_INT nz_elts, nc_elts, elts;
4354 /* If there are no ranges of true bits, it is all zero. */
4355 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4356 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4358 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4359 elts = count_type_elements (TREE_TYPE (exp));
4361 return nz_elts < elts / 4;
4364 return initializer_zerop (exp);
4367 /* Helper function for store_constructor.
4368 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4369 TYPE is the type of the CONSTRUCTOR, not the element type.
4370 CLEARED is as for store_constructor.
4371 ALIAS_SET is the alias set to use for any stores.
4373 This provides a recursive shortcut back to store_constructor when it isn't
4374 necessary to go through store_field. This is so that we can pass through
4375 the cleared field to let store_constructor know that we may not have to
4376 clear a substructure if the outer structure has already been cleared. */
4379 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4380 HOST_WIDE_INT bitpos, enum machine_mode mode,
4381 tree exp, tree type, int cleared, int alias_set)
4383 if (TREE_CODE (exp) == CONSTRUCTOR
4384 /* We can only call store_constructor recursively if the size and
4385 bit position are on a byte boundary. */
4386 && bitpos % BITS_PER_UNIT == 0
4387 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4388 /* If we have a nonzero bitpos for a register target, then we just
4389 let store_field do the bitfield handling. This is unlikely to
4390 generate unnecessary clear instructions anyways. */
4391 && (bitpos == 0 || MEM_P (target)))
4395 = adjust_address (target,
4396 GET_MODE (target) == BLKmode
4398 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4399 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4402 /* Update the alias set, if required. */
4403 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4404 && MEM_ALIAS_SET (target) != 0)
4406 target = copy_rtx (target);
4407 set_mem_alias_set (target, alias_set);
4410 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4413 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4417 /* Store the value of constructor EXP into the rtx TARGET.
4418 TARGET is either a REG or a MEM; we know it cannot conflict, since
4419 safe_from_p has been called.
4420 CLEARED is true if TARGET is known to have been zero'd.
4421 SIZE is the number of bytes of TARGET we are allowed to modify: this
4422 may not be the same as the size of EXP if we are assigning to a field
4423 which has been packed to exclude padding bits. */
4426 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4428 tree type = TREE_TYPE (exp);
4429 #ifdef WORD_REGISTER_OPERATIONS
4430 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4433 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4434 || TREE_CODE (type) == QUAL_UNION_TYPE)
4438 /* If size is zero or the target is already cleared, do nothing. */
4439 if (size == 0 || cleared)
4441 /* We either clear the aggregate or indicate the value is dead. */
4442 else if ((TREE_CODE (type) == UNION_TYPE
4443 || TREE_CODE (type) == QUAL_UNION_TYPE)
4444 && ! CONSTRUCTOR_ELTS (exp))
4445 /* If the constructor is empty, clear the union. */
4447 clear_storage (target, expr_size (exp));
4451 /* If we are building a static constructor into a register,
4452 set the initial value as zero so we can fold the value into
4453 a constant. But if more than one register is involved,
4454 this probably loses. */
4455 else if (REG_P (target) && TREE_STATIC (exp)
4456 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4458 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4462 /* If the constructor has fewer fields than the structure
4463 or if we are initializing the structure to mostly zeros,
4464 clear the whole structure first. Don't do this if TARGET is a
4465 register whose mode size isn't equal to SIZE since clear_storage
4466 can't handle this case. */
4468 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4469 || mostly_zeros_p (exp))
4471 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4474 rtx xtarget = target;
4476 if (readonly_fields_p (type))
4478 xtarget = copy_rtx (xtarget);
4479 RTX_UNCHANGING_P (xtarget) = 1;
4482 clear_storage (xtarget, GEN_INT (size));
4487 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4489 /* Store each element of the constructor into
4490 the corresponding field of TARGET. */
4492 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4494 tree field = TREE_PURPOSE (elt);
4495 tree value = TREE_VALUE (elt);
4496 enum machine_mode mode;
4497 HOST_WIDE_INT bitsize;
4498 HOST_WIDE_INT bitpos = 0;
4500 rtx to_rtx = target;
4502 /* Just ignore missing fields.
4503 We cleared the whole structure, above,
4504 if any fields are missing. */
4508 if (cleared && initializer_zerop (value))
4511 if (host_integerp (DECL_SIZE (field), 1))
4512 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4516 mode = DECL_MODE (field);
4517 if (DECL_BIT_FIELD (field))
4520 offset = DECL_FIELD_OFFSET (field);
4521 if (host_integerp (offset, 0)
4522 && host_integerp (bit_position (field), 0))
4524 bitpos = int_bit_position (field);
4528 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4535 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4536 make_tree (TREE_TYPE (exp),
4539 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4540 if (!MEM_P (to_rtx))
4543 #ifdef POINTERS_EXTEND_UNSIGNED
4544 if (GET_MODE (offset_rtx) != Pmode)
4545 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4547 if (GET_MODE (offset_rtx) != ptr_mode)
4548 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4551 to_rtx = offset_address (to_rtx, offset_rtx,
4552 highest_pow2_factor (offset));
4555 if (TREE_READONLY (field))
4558 to_rtx = copy_rtx (to_rtx);
4560 RTX_UNCHANGING_P (to_rtx) = 1;
4563 #ifdef WORD_REGISTER_OPERATIONS
4564 /* If this initializes a field that is smaller than a word, at the
4565 start of a word, try to widen it to a full word.
4566 This special case allows us to output C++ member function
4567 initializations in a form that the optimizers can understand. */
4569 && bitsize < BITS_PER_WORD
4570 && bitpos % BITS_PER_WORD == 0
4571 && GET_MODE_CLASS (mode) == MODE_INT
4572 && TREE_CODE (value) == INTEGER_CST
4574 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4576 tree type = TREE_TYPE (value);
4578 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4580 type = lang_hooks.types.type_for_size
4581 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4582 value = convert (type, value);
4585 if (BYTES_BIG_ENDIAN)
4587 = fold (build2 (LSHIFT_EXPR, type, value,
4588 build_int_cst (NULL_TREE,
4589 BITS_PER_WORD - bitsize, 0)));
4590 bitsize = BITS_PER_WORD;
4595 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4596 && DECL_NONADDRESSABLE_P (field))
4598 to_rtx = copy_rtx (to_rtx);
4599 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4602 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4603 value, type, cleared,
4604 get_alias_set (TREE_TYPE (field)));
4608 else if (TREE_CODE (type) == ARRAY_TYPE)
4614 tree elttype = TREE_TYPE (type);
4616 HOST_WIDE_INT minelt = 0;
4617 HOST_WIDE_INT maxelt = 0;
4619 domain = TYPE_DOMAIN (type);
4620 const_bounds_p = (TYPE_MIN_VALUE (domain)
4621 && TYPE_MAX_VALUE (domain)
4622 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4623 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4625 /* If we have constant bounds for the range of the type, get them. */
4628 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4629 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4632 /* If the constructor has fewer elements than the array,
4633 clear the whole array first. Similarly if this is
4634 static constructor of a non-BLKmode object. */
4637 else if (REG_P (target) && TREE_STATIC (exp))
4641 HOST_WIDE_INT count = 0, zero_count = 0;
4642 need_to_clear = ! const_bounds_p;
4644 /* This loop is a more accurate version of the loop in
4645 mostly_zeros_p (it handles RANGE_EXPR in an index).
4646 It is also needed to check for missing elements. */
4647 for (elt = CONSTRUCTOR_ELTS (exp);
4648 elt != NULL_TREE && ! need_to_clear;
4649 elt = TREE_CHAIN (elt))
4651 tree index = TREE_PURPOSE (elt);
4652 HOST_WIDE_INT this_node_count;
4654 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4656 tree lo_index = TREE_OPERAND (index, 0);
4657 tree hi_index = TREE_OPERAND (index, 1);
4659 if (! host_integerp (lo_index, 1)
4660 || ! host_integerp (hi_index, 1))
4666 this_node_count = (tree_low_cst (hi_index, 1)
4667 - tree_low_cst (lo_index, 1) + 1);
4670 this_node_count = 1;
4672 count += this_node_count;
4673 if (mostly_zeros_p (TREE_VALUE (elt)))
4674 zero_count += this_node_count;
4677 /* Clear the entire array first if there are any missing elements,
4678 or if the incidence of zero elements is >= 75%. */
4680 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4684 if (need_to_clear && size > 0)
4687 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4689 clear_storage (target, GEN_INT (size));
4693 if (!cleared && REG_P (target))
4694 /* Inform later passes that the old value is dead. */
4695 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4697 /* Store each element of the constructor into
4698 the corresponding element of TARGET, determined
4699 by counting the elements. */
4700 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4702 elt = TREE_CHAIN (elt), i++)
4704 enum machine_mode mode;
4705 HOST_WIDE_INT bitsize;
4706 HOST_WIDE_INT bitpos;
4708 tree value = TREE_VALUE (elt);
4709 tree index = TREE_PURPOSE (elt);
4710 rtx xtarget = target;
4712 if (cleared && initializer_zerop (value))
4715 unsignedp = TYPE_UNSIGNED (elttype);
4716 mode = TYPE_MODE (elttype);
4717 if (mode == BLKmode)
4718 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4719 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4722 bitsize = GET_MODE_BITSIZE (mode);
4724 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4726 tree lo_index = TREE_OPERAND (index, 0);
4727 tree hi_index = TREE_OPERAND (index, 1);
4728 rtx index_r, pos_rtx;
4729 HOST_WIDE_INT lo, hi, count;
4732 /* If the range is constant and "small", unroll the loop. */
4734 && host_integerp (lo_index, 0)
4735 && host_integerp (hi_index, 0)
4736 && (lo = tree_low_cst (lo_index, 0),
4737 hi = tree_low_cst (hi_index, 0),
4738 count = hi - lo + 1,
4741 || (host_integerp (TYPE_SIZE (elttype), 1)
4742 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4745 lo -= minelt; hi -= minelt;
4746 for (; lo <= hi; lo++)
4748 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4751 && !MEM_KEEP_ALIAS_SET_P (target)
4752 && TREE_CODE (type) == ARRAY_TYPE
4753 && TYPE_NONALIASED_COMPONENT (type))
4755 target = copy_rtx (target);
4756 MEM_KEEP_ALIAS_SET_P (target) = 1;
4759 store_constructor_field
4760 (target, bitsize, bitpos, mode, value, type, cleared,
4761 get_alias_set (elttype));
4766 rtx loop_start = gen_label_rtx ();
4767 rtx loop_end = gen_label_rtx ();
4770 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4771 unsignedp = TYPE_UNSIGNED (domain);
4773 index = build_decl (VAR_DECL, NULL_TREE, domain);
4776 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4778 SET_DECL_RTL (index, index_r);
4779 store_expr (lo_index, index_r, 0);
4781 /* Build the head of the loop. */
4782 do_pending_stack_adjust ();
4783 emit_label (loop_start);
4785 /* Assign value to element index. */
4787 = convert (ssizetype,
4788 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4789 index, TYPE_MIN_VALUE (domain))));
4790 position = size_binop (MULT_EXPR, position,
4792 TYPE_SIZE_UNIT (elttype)));
4794 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4795 xtarget = offset_address (target, pos_rtx,
4796 highest_pow2_factor (position));
4797 xtarget = adjust_address (xtarget, mode, 0);
4798 if (TREE_CODE (value) == CONSTRUCTOR)
4799 store_constructor (value, xtarget, cleared,
4800 bitsize / BITS_PER_UNIT);
4802 store_expr (value, xtarget, 0);
4804 /* Generate a conditional jump to exit the loop. */
4805 exit_cond = build2 (LT_EXPR, integer_type_node,
4807 jumpif (exit_cond, loop_end);
4809 /* Update the loop counter, and jump to the head of
4811 expand_assignment (index,
4812 build2 (PLUS_EXPR, TREE_TYPE (index),
4813 index, integer_one_node), 0);
4815 emit_jump (loop_start);
4817 /* Build the end of the loop. */
4818 emit_label (loop_end);
4821 else if ((index != 0 && ! host_integerp (index, 0))
4822 || ! host_integerp (TYPE_SIZE (elttype), 1))
4827 index = ssize_int (1);
4830 index = fold_convert (ssizetype,
4831 fold (build2 (MINUS_EXPR,
4834 TYPE_MIN_VALUE (domain))));
4836 position = size_binop (MULT_EXPR, index,
4838 TYPE_SIZE_UNIT (elttype)));
4839 xtarget = offset_address (target,
4840 expand_expr (position, 0, VOIDmode, 0),
4841 highest_pow2_factor (position));
4842 xtarget = adjust_address (xtarget, mode, 0);
4843 store_expr (value, xtarget, 0);
4848 bitpos = ((tree_low_cst (index, 0) - minelt)
4849 * tree_low_cst (TYPE_SIZE (elttype), 1));
4851 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4853 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4854 && TREE_CODE (type) == ARRAY_TYPE
4855 && TYPE_NONALIASED_COMPONENT (type))
4857 target = copy_rtx (target);
4858 MEM_KEEP_ALIAS_SET_P (target) = 1;
4860 store_constructor_field (target, bitsize, bitpos, mode, value,
4861 type, cleared, get_alias_set (elttype));
4866 else if (TREE_CODE (type) == VECTOR_TYPE)
4872 tree elttype = TREE_TYPE (type);
4873 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4874 enum machine_mode eltmode = TYPE_MODE (elttype);
4875 HOST_WIDE_INT bitsize;
4876 HOST_WIDE_INT bitpos;
4880 if (eltmode == BLKmode)
4883 n_elts = TYPE_VECTOR_SUBPARTS (type);
4884 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4886 enum machine_mode mode = GET_MODE (target);
4888 icode = (int) vec_init_optab->handlers[mode].insn_code;
4889 if (icode != CODE_FOR_nothing)
4893 vector = alloca (n_elts);
4894 for (i = 0; i < n_elts; i++)
4895 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4899 /* If the constructor has fewer elements than the vector,
4900 clear the whole array first. Similarly if this is
4901 static constructor of a non-BLKmode object. */
4904 else if (REG_P (target) && TREE_STATIC (exp))
4908 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4910 for (elt = CONSTRUCTOR_ELTS (exp);
4912 elt = TREE_CHAIN (elt))
4916 int_const_binop (TRUNC_DIV_EXPR,
4917 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4918 TYPE_SIZE (elttype), 0), 1);
4920 count += n_elts_here;
4921 if (mostly_zeros_p (TREE_VALUE (elt)))
4922 zero_count += n_elts_here;
4925 /* Clear the entire vector first if there are any missing elements,
4926 or if the incidence of zero elements is >= 75%. */
4927 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4930 if (need_to_clear && size > 0 && !vector)
4933 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4935 clear_storage (target, GEN_INT (size));
4939 if (!cleared && REG_P (target))
4940 /* Inform later passes that the old value is dead. */
4941 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4943 /* Store each element of the constructor into the corresponding
4944 element of TARGET, determined by counting the elements. */
4945 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4947 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4949 tree value = TREE_VALUE (elt);
4950 tree index = TREE_PURPOSE (elt);
4951 HOST_WIDE_INT eltpos;
4953 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4954 if (cleared && initializer_zerop (value))
4958 eltpos = tree_low_cst (index, 1);
4964 /* Vector CONSTRUCTORs should only be built from smaller
4965 vectors in the case of BLKmode vectors. */
4966 if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
4968 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4972 enum machine_mode value_mode =
4973 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4974 ? TYPE_MODE (TREE_TYPE (value))
4976 bitpos = eltpos * elt_size;
4977 store_constructor_field (target, bitsize, bitpos, value_mode, value,
4978 type, cleared, get_alias_set (elttype));
4983 emit_insn (GEN_FCN (icode) (target,
4984 gen_rtx_PARALLEL (GET_MODE (target),
4985 gen_rtvec_v (n_elts, vector))));
4988 /* Set constructor assignments. */
4989 else if (TREE_CODE (type) == SET_TYPE)
4991 tree elt = CONSTRUCTOR_ELTS (exp);
4992 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4993 tree domain = TYPE_DOMAIN (type);
4994 tree domain_min, domain_max, bitlength;
4996 /* The default implementation strategy is to extract the constant
4997 parts of the constructor, use that to initialize the target,
4998 and then "or" in whatever non-constant ranges we need in addition.
5000 If a large set is all zero or all ones, it is
5001 probably better to set it using memset.
5002 Also, if a large set has just a single range, it may also be
5003 better to first clear all the first clear the set (using
5004 memset), and set the bits we want. */
5006 /* Check for all zeros. */
5007 if (elt == NULL_TREE && size > 0)
5010 clear_storage (target, GEN_INT (size));
5014 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5015 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5016 bitlength = size_binop (PLUS_EXPR,
5017 size_diffop (domain_max, domain_min),
5020 nbits = tree_low_cst (bitlength, 1);
5022 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5023 are "complicated" (more than one range), initialize (the
5024 constant parts) by copying from a constant. */
5025 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5026 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5028 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5029 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5030 char *bit_buffer = alloca (nbits);
5031 HOST_WIDE_INT word = 0;
5032 unsigned int bit_pos = 0;
5033 unsigned int ibit = 0;
5034 unsigned int offset = 0; /* In bytes from beginning of set. */
5036 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5039 if (bit_buffer[ibit])
5041 if (BYTES_BIG_ENDIAN)
5042 word |= (1 << (set_word_size - 1 - bit_pos));
5044 word |= 1 << bit_pos;
5048 if (bit_pos >= set_word_size || ibit == nbits)
5050 if (word != 0 || ! cleared)
5052 rtx datum = gen_int_mode (word, mode);
5055 /* The assumption here is that it is safe to use
5056 XEXP if the set is multi-word, but not if
5057 it's single-word. */
5059 to_rtx = adjust_address (target, mode, offset);
5060 else if (offset == 0)
5064 emit_move_insn (to_rtx, datum);
5071 offset += set_word_size / BITS_PER_UNIT;
5076 /* Don't bother clearing storage if the set is all ones. */
5077 if (TREE_CHAIN (elt) != NULL_TREE
5078 || (TREE_PURPOSE (elt) == NULL_TREE
5080 : ( ! host_integerp (TREE_VALUE (elt), 0)
5081 || ! host_integerp (TREE_PURPOSE (elt), 0)
5082 || (tree_low_cst (TREE_VALUE (elt), 0)
5083 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5084 != (HOST_WIDE_INT) nbits))))
5085 clear_storage (target, expr_size (exp));
5087 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5089 /* Start of range of element or NULL. */
5090 tree startbit = TREE_PURPOSE (elt);
5091 /* End of range of element, or element value. */
5092 tree endbit = TREE_VALUE (elt);
5093 HOST_WIDE_INT startb, endb;
5094 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5096 bitlength_rtx = expand_expr (bitlength,
5097 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5099 /* Handle non-range tuple element like [ expr ]. */
5100 if (startbit == NULL_TREE)
5102 startbit = save_expr (endbit);
5106 startbit = convert (sizetype, startbit);
5107 endbit = convert (sizetype, endbit);
5108 if (! integer_zerop (domain_min))
5110 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5111 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5113 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5114 EXPAND_CONST_ADDRESS);
5115 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5116 EXPAND_CONST_ADDRESS);
5122 ((build_qualified_type (lang_hooks.types.type_for_mode
5123 (GET_MODE (target), 0),
5126 emit_move_insn (targetx, target);
5129 else if (MEM_P (target))
5134 /* Optimization: If startbit and endbit are constants divisible
5135 by BITS_PER_UNIT, call memset instead. */
5136 if (TREE_CODE (startbit) == INTEGER_CST
5137 && TREE_CODE (endbit) == INTEGER_CST
5138 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5139 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5141 emit_library_call (memset_libfunc, LCT_NORMAL,
5143 plus_constant (XEXP (targetx, 0),
5144 startb / BITS_PER_UNIT),
5146 constm1_rtx, TYPE_MODE (integer_type_node),
5147 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5148 TYPE_MODE (sizetype));
5151 emit_library_call (setbits_libfunc, LCT_NORMAL,
5152 VOIDmode, 4, XEXP (targetx, 0),
5153 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5154 startbit_rtx, TYPE_MODE (sizetype),
5155 endbit_rtx, TYPE_MODE (sizetype));
5158 emit_move_insn (target, targetx);
5166 /* Store the value of EXP (an expression tree)
5167 into a subfield of TARGET which has mode MODE and occupies
5168 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5169 If MODE is VOIDmode, it means that we are storing into a bit-field.
5171 If VALUE_MODE is VOIDmode, return nothing in particular.
5172 UNSIGNEDP is not used in this case.
5174 Otherwise, return an rtx for the value stored. This rtx
5175 has mode VALUE_MODE if that is convenient to do.
5176 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5178 TYPE is the type of the underlying object,
5180 ALIAS_SET is the alias set for the destination. This value will
5181 (in general) be different from that for TARGET, since TARGET is a
5182 reference to the containing structure. */
5185 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5186 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5187 int unsignedp, tree type, int alias_set)
5189 HOST_WIDE_INT width_mask = 0;
5191 if (TREE_CODE (exp) == ERROR_MARK)
5194 /* If we have nothing to store, do nothing unless the expression has
5197 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5198 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5199 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5201 /* If we are storing into an unaligned field of an aligned union that is
5202 in a register, we may have the mode of TARGET being an integer mode but
5203 MODE == BLKmode. In that case, get an aligned object whose size and
5204 alignment are the same as TARGET and store TARGET into it (we can avoid
5205 the store if the field being stored is the entire width of TARGET). Then
5206 call ourselves recursively to store the field into a BLKmode version of
5207 that object. Finally, load from the object into TARGET. This is not
5208 very efficient in general, but should only be slightly more expensive
5209 than the otherwise-required unaligned accesses. Perhaps this can be
5210 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5211 twice, once with emit_move_insn and once via store_field. */
5214 && (REG_P (target) || GET_CODE (target) == SUBREG))
5216 rtx object = assign_temp (type, 0, 1, 1);
5217 rtx blk_object = adjust_address (object, BLKmode, 0);
5219 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5220 emit_move_insn (object, target);
5222 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5225 emit_move_insn (target, object);
5227 /* We want to return the BLKmode version of the data. */
5231 if (GET_CODE (target) == CONCAT)
5233 /* We're storing into a struct containing a single __complex. */
5237 return store_expr (exp, target, value_mode != VOIDmode);
5240 /* If the structure is in a register or if the component
5241 is a bit field, we cannot use addressing to access it.
5242 Use bit-field techniques or SUBREG to store in it. */
5244 if (mode == VOIDmode
5245 || (mode != BLKmode && ! direct_store[(int) mode]
5246 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5247 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5249 || GET_CODE (target) == SUBREG
5250 /* If the field isn't aligned enough to store as an ordinary memref,
5251 store it as a bit field. */
5253 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5254 || bitpos % GET_MODE_ALIGNMENT (mode))
5255 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5256 || (bitpos % BITS_PER_UNIT != 0)))
5257 /* If the RHS and field are a constant size and the size of the
5258 RHS isn't the same size as the bitfield, we must use bitfield
5261 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5262 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5264 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5266 /* If BITSIZE is narrower than the size of the type of EXP
5267 we will be narrowing TEMP. Normally, what's wanted are the
5268 low-order bits. However, if EXP's type is a record and this is
5269 big-endian machine, we want the upper BITSIZE bits. */
5270 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5271 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5272 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5273 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5274 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5278 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5280 if (mode != VOIDmode && mode != BLKmode
5281 && mode != TYPE_MODE (TREE_TYPE (exp)))
5282 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5284 /* If the modes of TARGET and TEMP are both BLKmode, both
5285 must be in memory and BITPOS must be aligned on a byte
5286 boundary. If so, we simply do a block copy. */
5287 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5289 if (!MEM_P (target) || !MEM_P (temp)
5290 || bitpos % BITS_PER_UNIT != 0)
5293 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5294 emit_block_move (target, temp,
5295 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5299 return value_mode == VOIDmode ? const0_rtx : target;
5302 /* Store the value in the bitfield. */
5303 store_bit_field (target, bitsize, bitpos, mode, temp);
5305 if (value_mode != VOIDmode)
5307 /* The caller wants an rtx for the value.
5308 If possible, avoid refetching from the bitfield itself. */
5310 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5313 enum machine_mode tmode;
5315 tmode = GET_MODE (temp);
5316 if (tmode == VOIDmode)
5320 return expand_and (tmode, temp,
5321 gen_int_mode (width_mask, tmode),
5324 count = build_int_cst (NULL_TREE,
5325 GET_MODE_BITSIZE (tmode) - bitsize, 0);
5326 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5327 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5330 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5331 NULL_RTX, value_mode, VOIDmode);
5337 rtx addr = XEXP (target, 0);
5338 rtx to_rtx = target;
5340 /* If a value is wanted, it must be the lhs;
5341 so make the address stable for multiple use. */
5343 if (value_mode != VOIDmode && !REG_P (addr)
5344 && ! CONSTANT_ADDRESS_P (addr)
5345 /* A frame-pointer reference is already stable. */
5346 && ! (GET_CODE (addr) == PLUS
5347 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5348 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5349 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5350 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5352 /* Now build a reference to just the desired component. */
5354 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5356 if (to_rtx == target)
5357 to_rtx = copy_rtx (to_rtx);
5359 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5360 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5361 set_mem_alias_set (to_rtx, alias_set);
5363 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5367 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5368 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5369 codes and find the ultimate containing object, which we return.
5371 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5372 bit position, and *PUNSIGNEDP to the signedness of the field.
5373 If the position of the field is variable, we store a tree
5374 giving the variable offset (in units) in *POFFSET.
5375 This offset is in addition to the bit position.
5376 If the position is not variable, we store 0 in *POFFSET.
5378 If any of the extraction expressions is volatile,
5379 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5381 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5382 is a mode that can be used to access the field. In that case, *PBITSIZE
5385 If the field describes a variable-sized object, *PMODE is set to
5386 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5387 this case, but the address of the object can be found. */
5390 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5391 HOST_WIDE_INT *pbitpos, tree *poffset,
5392 enum machine_mode *pmode, int *punsignedp,
5396 enum machine_mode mode = VOIDmode;
5397 tree offset = size_zero_node;
5398 tree bit_offset = bitsize_zero_node;
5401 /* First get the mode, signedness, and size. We do this from just the
5402 outermost expression. */
5403 if (TREE_CODE (exp) == COMPONENT_REF)
5405 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5406 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5407 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5409 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5411 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5413 size_tree = TREE_OPERAND (exp, 1);
5414 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5418 mode = TYPE_MODE (TREE_TYPE (exp));
5419 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5421 if (mode == BLKmode)
5422 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5424 *pbitsize = GET_MODE_BITSIZE (mode);
5429 if (! host_integerp (size_tree, 1))
5430 mode = BLKmode, *pbitsize = -1;
5432 *pbitsize = tree_low_cst (size_tree, 1);
5435 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5436 and find the ultimate containing object. */
5439 if (TREE_CODE (exp) == BIT_FIELD_REF)
5440 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5441 else if (TREE_CODE (exp) == COMPONENT_REF)
5443 tree field = TREE_OPERAND (exp, 1);
5444 tree this_offset = component_ref_field_offset (exp);
5446 /* If this field hasn't been filled in yet, don't go
5447 past it. This should only happen when folding expressions
5448 made during type construction. */
5449 if (this_offset == 0)
5452 offset = size_binop (PLUS_EXPR, offset, this_offset);
5453 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5454 DECL_FIELD_BIT_OFFSET (field));
5456 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5459 else if (TREE_CODE (exp) == ARRAY_REF
5460 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5462 tree index = TREE_OPERAND (exp, 1);
5463 tree low_bound = array_ref_low_bound (exp);
5464 tree unit_size = array_ref_element_size (exp);
5466 /* We assume all arrays have sizes that are a multiple of a byte.
5467 First subtract the lower bound, if any, in the type of the
5468 index, then convert to sizetype and multiply by the size of the
5470 if (! integer_zerop (low_bound))
5471 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5474 offset = size_binop (PLUS_EXPR, offset,
5475 size_binop (MULT_EXPR,
5476 convert (sizetype, index),
5480 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5481 conversions that don't change the mode, and all view conversions
5482 except those that need to "step up" the alignment. */
5483 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5484 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5485 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5486 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5488 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5489 < BIGGEST_ALIGNMENT)
5490 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5491 || TYPE_ALIGN_OK (TREE_TYPE
5492 (TREE_OPERAND (exp, 0))))))
5493 && ! ((TREE_CODE (exp) == NOP_EXPR
5494 || TREE_CODE (exp) == CONVERT_EXPR)
5495 && (TYPE_MODE (TREE_TYPE (exp))
5496 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5499 /* If any reference in the chain is volatile, the effect is volatile. */
5500 if (TREE_THIS_VOLATILE (exp))
5503 exp = TREE_OPERAND (exp, 0);
5506 /* If OFFSET is constant, see if we can return the whole thing as a
5507 constant bit position. Otherwise, split it up. */
5508 if (host_integerp (offset, 0)
5509 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5511 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5512 && host_integerp (tem, 0))
5513 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5515 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5521 /* Return a tree of sizetype representing the size, in bytes, of the element
5522 of EXP, an ARRAY_REF. */
5525 array_ref_element_size (tree exp)
5527 tree aligned_size = TREE_OPERAND (exp, 3);
5528 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5530 /* If a size was specified in the ARRAY_REF, it's the size measured
5531 in alignment units of the element type. So multiply by that value. */
5533 return size_binop (MULT_EXPR, aligned_size,
5534 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5536 /* Otherwise, take the size from that of the element type. Substitute
5537 any PLACEHOLDER_EXPR that we have. */
5539 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5542 /* Return a tree representing the lower bound of the array mentioned in
5543 EXP, an ARRAY_REF. */
5546 array_ref_low_bound (tree exp)
5548 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5550 /* If a lower bound is specified in EXP, use it. */
5551 if (TREE_OPERAND (exp, 2))
5552 return TREE_OPERAND (exp, 2);
5554 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5555 substituting for a PLACEHOLDER_EXPR as needed. */
5556 if (domain_type && TYPE_MIN_VALUE (domain_type))
5557 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5559 /* Otherwise, return a zero of the appropriate type. */
5560 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5563 /* Return a tree representing the upper bound of the array mentioned in
5564 EXP, an ARRAY_REF. */
5567 array_ref_up_bound (tree exp)
5569 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5571 /* If there is a domain type and it has an upper bound, use it, substituting
5572 for a PLACEHOLDER_EXPR as needed. */
5573 if (domain_type && TYPE_MAX_VALUE (domain_type))
5574 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5576 /* Otherwise fail. */
5580 /* Return a tree representing the offset, in bytes, of the field referenced
5581 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5584 component_ref_field_offset (tree exp)
5586 tree aligned_offset = TREE_OPERAND (exp, 2);
5587 tree field = TREE_OPERAND (exp, 1);
5589 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5590 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5593 return size_binop (MULT_EXPR, aligned_offset,
5594 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5596 /* Otherwise, take the offset from that of the field. Substitute
5597 any PLACEHOLDER_EXPR that we have. */
5599 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5602 /* Return 1 if T is an expression that get_inner_reference handles. */
5605 handled_component_p (tree t)
5607 switch (TREE_CODE (t))
5612 case ARRAY_RANGE_REF:
5613 case NON_LVALUE_EXPR:
5614 case VIEW_CONVERT_EXPR:
5617 /* ??? Sure they are handled, but get_inner_reference may return
5618 a different PBITSIZE, depending upon whether the expression is
5619 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5622 return (TYPE_MODE (TREE_TYPE (t))
5623 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5630 /* Given an rtx VALUE that may contain additions and multiplications, return
5631 an equivalent value that just refers to a register, memory, or constant.
5632 This is done by generating instructions to perform the arithmetic and
5633 returning a pseudo-register containing the value.
5635 The returned value may be a REG, SUBREG, MEM or constant. */
5638 force_operand (rtx value, rtx target)
5641 /* Use subtarget as the target for operand 0 of a binary operation. */
5642 rtx subtarget = get_subtarget (target);
5643 enum rtx_code code = GET_CODE (value);
5645 /* Check for subreg applied to an expression produced by loop optimizer. */
5647 && !REG_P (SUBREG_REG (value))
5648 && !MEM_P (SUBREG_REG (value)))
5650 value = simplify_gen_subreg (GET_MODE (value),
5651 force_reg (GET_MODE (SUBREG_REG (value)),
5652 force_operand (SUBREG_REG (value),
5654 GET_MODE (SUBREG_REG (value)),
5655 SUBREG_BYTE (value));
5656 code = GET_CODE (value);
5659 /* Check for a PIC address load. */
5660 if ((code == PLUS || code == MINUS)
5661 && XEXP (value, 0) == pic_offset_table_rtx
5662 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5663 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5664 || GET_CODE (XEXP (value, 1)) == CONST))
5667 subtarget = gen_reg_rtx (GET_MODE (value));
5668 emit_move_insn (subtarget, value);
5672 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5675 target = gen_reg_rtx (GET_MODE (value));
5676 convert_move (target, force_operand (XEXP (value, 0), NULL),
5677 code == ZERO_EXTEND);
5681 if (ARITHMETIC_P (value))
5683 op2 = XEXP (value, 1);
5684 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5686 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5689 op2 = negate_rtx (GET_MODE (value), op2);
5692 /* Check for an addition with OP2 a constant integer and our first
5693 operand a PLUS of a virtual register and something else. In that
5694 case, we want to emit the sum of the virtual register and the
5695 constant first and then add the other value. This allows virtual
5696 register instantiation to simply modify the constant rather than
5697 creating another one around this addition. */
5698 if (code == PLUS && GET_CODE (op2) == CONST_INT
5699 && GET_CODE (XEXP (value, 0)) == PLUS
5700 && REG_P (XEXP (XEXP (value, 0), 0))
5701 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5702 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5704 rtx temp = expand_simple_binop (GET_MODE (value), code,
5705 XEXP (XEXP (value, 0), 0), op2,
5706 subtarget, 0, OPTAB_LIB_WIDEN);
5707 return expand_simple_binop (GET_MODE (value), code, temp,
5708 force_operand (XEXP (XEXP (value,
5710 target, 0, OPTAB_LIB_WIDEN);
5713 op1 = force_operand (XEXP (value, 0), subtarget);
5714 op2 = force_operand (op2, NULL_RTX);
5718 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5720 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5721 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5722 target, 1, OPTAB_LIB_WIDEN);
5724 return expand_divmod (0,
5725 FLOAT_MODE_P (GET_MODE (value))
5726 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5727 GET_MODE (value), op1, op2, target, 0);
5730 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5734 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5738 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5742 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5743 target, 0, OPTAB_LIB_WIDEN);
5746 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5747 target, 1, OPTAB_LIB_WIDEN);
5750 if (UNARY_P (value))
5752 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5753 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5756 #ifdef INSN_SCHEDULING
5757 /* On machines that have insn scheduling, we want all memory reference to be
5758 explicit, so we need to deal with such paradoxical SUBREGs. */
5759 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5760 && (GET_MODE_SIZE (GET_MODE (value))
5761 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5763 = simplify_gen_subreg (GET_MODE (value),
5764 force_reg (GET_MODE (SUBREG_REG (value)),
5765 force_operand (SUBREG_REG (value),
5767 GET_MODE (SUBREG_REG (value)),
5768 SUBREG_BYTE (value));
5774 /* Subroutine of expand_expr: return nonzero iff there is no way that
5775 EXP can reference X, which is being modified. TOP_P is nonzero if this
5776 call is going to be used to determine whether we need a temporary
5777 for EXP, as opposed to a recursive call to this function.
5779 It is always safe for this routine to return zero since it merely
5780 searches for optimization opportunities. */
5783 safe_from_p (rtx x, tree exp, int top_p)
5789 /* If EXP has varying size, we MUST use a target since we currently
5790 have no way of allocating temporaries of variable size
5791 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5792 So we assume here that something at a higher level has prevented a
5793 clash. This is somewhat bogus, but the best we can do. Only
5794 do this when X is BLKmode and when we are at the top level. */
5795 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5796 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5797 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5798 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5799 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5801 && GET_MODE (x) == BLKmode)
5802 /* If X is in the outgoing argument area, it is always safe. */
5804 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5805 || (GET_CODE (XEXP (x, 0)) == PLUS
5806 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5809 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5810 find the underlying pseudo. */
5811 if (GET_CODE (x) == SUBREG)
5814 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5818 /* Now look at our tree code and possibly recurse. */
5819 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5822 exp_rtl = DECL_RTL_IF_SET (exp);
5829 if (TREE_CODE (exp) == TREE_LIST)
5833 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5835 exp = TREE_CHAIN (exp);
5838 if (TREE_CODE (exp) != TREE_LIST)
5839 return safe_from_p (x, exp, 0);
5842 else if (TREE_CODE (exp) == ERROR_MARK)
5843 return 1; /* An already-visited SAVE_EXPR? */
5848 /* The only case we look at here is the DECL_INITIAL inside a
5850 return (TREE_CODE (exp) != DECL_EXPR
5851 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5852 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5853 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5857 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5862 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5866 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5867 the expression. If it is set, we conflict iff we are that rtx or
5868 both are in memory. Otherwise, we check all operands of the
5869 expression recursively. */
5871 switch (TREE_CODE (exp))
5874 /* If the operand is static or we are static, we can't conflict.
5875 Likewise if we don't conflict with the operand at all. */
5876 if (staticp (TREE_OPERAND (exp, 0))
5877 || TREE_STATIC (exp)
5878 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5881 /* Otherwise, the only way this can conflict is if we are taking
5882 the address of a DECL a that address if part of X, which is
5884 exp = TREE_OPERAND (exp, 0);
5887 if (!DECL_RTL_SET_P (exp)
5888 || !MEM_P (DECL_RTL (exp)))
5891 exp_rtl = XEXP (DECL_RTL (exp), 0);
5897 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5898 get_alias_set (exp)))
5903 /* Assume that the call will clobber all hard registers and
5905 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5910 case WITH_CLEANUP_EXPR:
5911 case CLEANUP_POINT_EXPR:
5912 /* Lowered by gimplify.c. */
5916 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5922 /* If we have an rtx, we do not need to scan our operands. */
5926 nops = first_rtl_op (TREE_CODE (exp));
5927 for (i = 0; i < nops; i++)
5928 if (TREE_OPERAND (exp, i) != 0
5929 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5932 /* If this is a language-specific tree code, it may require
5933 special handling. */
5934 if ((unsigned int) TREE_CODE (exp)
5935 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5936 && !lang_hooks.safe_from_p (x, exp))
5940 /* If we have an rtl, find any enclosed object. Then see if we conflict
5944 if (GET_CODE (exp_rtl) == SUBREG)
5946 exp_rtl = SUBREG_REG (exp_rtl);
5948 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5952 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5953 are memory and they conflict. */
5954 return ! (rtx_equal_p (x, exp_rtl)
5955 || (MEM_P (x) && MEM_P (exp_rtl)
5956 && true_dependence (exp_rtl, VOIDmode, x,
5957 rtx_addr_varies_p)));
5960 /* If we reach here, it is safe. */
5965 /* Return the highest power of two that EXP is known to be a multiple of.
5966 This is used in updating alignment of MEMs in array references. */
5968 static unsigned HOST_WIDE_INT
5969 highest_pow2_factor (tree exp)
5971 unsigned HOST_WIDE_INT c0, c1;
5973 switch (TREE_CODE (exp))
5976 /* We can find the lowest bit that's a one. If the low
5977 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5978 We need to handle this case since we can find it in a COND_EXPR,
5979 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5980 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5982 if (TREE_CONSTANT_OVERFLOW (exp))
5983 return BIGGEST_ALIGNMENT;
5986 /* Note: tree_low_cst is intentionally not used here,
5987 we don't care about the upper bits. */
5988 c0 = TREE_INT_CST_LOW (exp);
5990 return c0 ? c0 : BIGGEST_ALIGNMENT;
5994 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5995 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5996 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5997 return MIN (c0, c1);
6000 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6001 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6004 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6006 if (integer_pow2p (TREE_OPERAND (exp, 1))
6007 && host_integerp (TREE_OPERAND (exp, 1), 1))
6009 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6010 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6011 return MAX (1, c0 / c1);
6015 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6017 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6020 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6023 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6024 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6025 return MIN (c0, c1);
6034 /* Similar, except that the alignment requirements of TARGET are
6035 taken into account. Assume it is at least as aligned as its
6036 type, unless it is a COMPONENT_REF in which case the layout of
6037 the structure gives the alignment. */
6039 static unsigned HOST_WIDE_INT
6040 highest_pow2_factor_for_target (tree target, tree exp)
6042 unsigned HOST_WIDE_INT target_align, factor;
6044 factor = highest_pow2_factor (exp);
6045 if (TREE_CODE (target) == COMPONENT_REF)
6046 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6048 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6049 return MAX (factor, target_align);
6052 /* Expands variable VAR. */
6055 expand_var (tree var)
6057 if (DECL_EXTERNAL (var))
6060 if (TREE_STATIC (var))
6061 /* If this is an inlined copy of a static local variable,
6062 look up the original decl. */
6063 var = DECL_ORIGIN (var);
6065 if (TREE_STATIC (var)
6066 ? !TREE_ASM_WRITTEN (var)
6067 : !DECL_RTL_SET_P (var))
6069 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
6070 /* Should be ignored. */;
6071 else if (lang_hooks.expand_decl (var))
6073 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6075 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6076 rest_of_decl_compilation (var, 0, 0);
6077 else if (TREE_CODE (var) == TYPE_DECL
6078 || TREE_CODE (var) == CONST_DECL
6079 || TREE_CODE (var) == FUNCTION_DECL
6080 || TREE_CODE (var) == LABEL_DECL)
6081 /* No expansion needed. */;
6087 /* Subroutine of expand_expr. Expand the two operands of a binary
6088 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6089 The value may be stored in TARGET if TARGET is nonzero. The
6090 MODIFIER argument is as documented by expand_expr. */
6093 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6094 enum expand_modifier modifier)
6096 if (! safe_from_p (target, exp1, 1))
6098 if (operand_equal_p (exp0, exp1, 0))
6100 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6101 *op1 = copy_rtx (*op0);
6105 /* If we need to preserve evaluation order, copy exp0 into its own
6106 temporary variable so that it can't be clobbered by exp1. */
6107 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6108 exp0 = save_expr (exp0);
6109 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6110 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6115 /* expand_expr: generate code for computing expression EXP.
6116 An rtx for the computed value is returned. The value is never null.
6117 In the case of a void EXP, const0_rtx is returned.
6119 The value may be stored in TARGET if TARGET is nonzero.
6120 TARGET is just a suggestion; callers must assume that
6121 the rtx returned may not be the same as TARGET.
6123 If TARGET is CONST0_RTX, it means that the value will be ignored.
6125 If TMODE is not VOIDmode, it suggests generating the
6126 result in mode TMODE. But this is done only when convenient.
6127 Otherwise, TMODE is ignored and the value generated in its natural mode.
6128 TMODE is just a suggestion; callers must assume that
6129 the rtx returned may not have mode TMODE.
6131 Note that TARGET may have neither TMODE nor MODE. In that case, it
6132 probably will not be used.
6134 If MODIFIER is EXPAND_SUM then when EXP is an addition
6135 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6136 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6137 products as above, or REG or MEM, or constant.
6138 Ordinarily in such cases we would output mul or add instructions
6139 and then return a pseudo reg containing the sum.
6141 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6142 it also marks a label as absolutely required (it can't be dead).
6143 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6144 This is used for outputting expressions used in initializers.
6146 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6147 with a constant address even if that address is not normally legitimate.
6148 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6150 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6151 a call parameter. Such targets require special care as we haven't yet
6152 marked TARGET so that it's safe from being trashed by libcalls. We
6153 don't want to use TARGET for anything but the final result;
6154 Intermediate values must go elsewhere. Additionally, calls to
6155 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6157 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6158 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6159 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6160 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6163 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6164 enum expand_modifier, rtx *);
6167 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6168 enum expand_modifier modifier, rtx *alt_rtl)
6171 rtx ret, last = NULL;
6173 /* Handle ERROR_MARK before anybody tries to access its type. */
6174 if (TREE_CODE (exp) == ERROR_MARK
6175 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6177 ret = CONST0_RTX (tmode);
6178 return ret ? ret : const0_rtx;
6181 if (flag_non_call_exceptions)
6183 rn = lookup_stmt_eh_region (exp);
6184 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6186 last = get_last_insn ();
6189 /* If this is an expression of some kind and it has an associated line
6190 number, then emit the line number before expanding the expression.
6192 We need to save and restore the file and line information so that
6193 errors discovered during expansion are emitted with the right
6194 information. It would be better of the diagnostic routines
6195 used the file/line information embedded in the tree nodes rather
6197 if (cfun && EXPR_HAS_LOCATION (exp))
6199 location_t saved_location = input_location;
6200 input_location = EXPR_LOCATION (exp);
6201 emit_line_note (input_location);
6203 /* Record where the insns produced belong. */
6204 record_block_change (TREE_BLOCK (exp));
6206 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6208 input_location = saved_location;
6212 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6215 /* If using non-call exceptions, mark all insns that may trap.
6216 expand_call() will mark CALL_INSNs before we get to this code,
6217 but it doesn't handle libcalls, and these may trap. */
6221 for (insn = next_real_insn (last); insn;
6222 insn = next_real_insn (insn))
6224 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6225 /* If we want exceptions for non-call insns, any
6226 may_trap_p instruction may throw. */
6227 && GET_CODE (PATTERN (insn)) != CLOBBER
6228 && GET_CODE (PATTERN (insn)) != USE
6229 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6231 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6241 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6242 enum expand_modifier modifier, rtx *alt_rtl)
6245 tree type = TREE_TYPE (exp);
6247 enum machine_mode mode;
6248 enum tree_code code = TREE_CODE (exp);
6250 rtx subtarget, original_target;
6253 bool reduce_bit_field = false;
6254 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6255 ? reduce_to_bit_field_precision ((expr), \
6260 mode = TYPE_MODE (type);
6261 unsignedp = TYPE_UNSIGNED (type);
6262 if (lang_hooks.reduce_bit_field_operations
6263 && TREE_CODE (type) == INTEGER_TYPE
6264 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6266 /* An operation in what may be a bit-field type needs the
6267 result to be reduced to the precision of the bit-field type,
6268 which is narrower than that of the type's mode. */
6269 reduce_bit_field = true;
6270 if (modifier == EXPAND_STACK_PARM)
6274 /* Use subtarget as the target for operand 0 of a binary operation. */
6275 subtarget = get_subtarget (target);
6276 original_target = target;
6277 ignore = (target == const0_rtx
6278 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6279 || code == CONVERT_EXPR || code == COND_EXPR
6280 || code == VIEW_CONVERT_EXPR)
6281 && TREE_CODE (type) == VOID_TYPE));
6283 /* If we are going to ignore this result, we need only do something
6284 if there is a side-effect somewhere in the expression. If there
6285 is, short-circuit the most common cases here. Note that we must
6286 not call expand_expr with anything but const0_rtx in case this
6287 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6291 if (! TREE_SIDE_EFFECTS (exp))
6294 /* Ensure we reference a volatile object even if value is ignored, but
6295 don't do this if all we are doing is taking its address. */
6296 if (TREE_THIS_VOLATILE (exp)
6297 && TREE_CODE (exp) != FUNCTION_DECL
6298 && mode != VOIDmode && mode != BLKmode
6299 && modifier != EXPAND_CONST_ADDRESS)
6301 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6303 temp = copy_to_reg (temp);
6307 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6308 || code == INDIRECT_REF)
6309 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6312 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6313 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6315 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6316 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6319 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6320 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6321 /* If the second operand has no side effects, just evaluate
6323 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6325 else if (code == BIT_FIELD_REF)
6327 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6328 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6329 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6336 /* If will do cse, generate all results into pseudo registers
6337 since 1) that allows cse to find more things
6338 and 2) otherwise cse could produce an insn the machine
6339 cannot support. An exception is a CONSTRUCTOR into a multi-word
6340 MEM: that's much more likely to be most efficient into the MEM.
6341 Another is a CALL_EXPR which must return in memory. */
6343 if (! cse_not_expected && mode != BLKmode && target
6344 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6345 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6346 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6353 tree function = decl_function_context (exp);
6355 temp = label_rtx (exp);
6356 temp = gen_rtx_LABEL_REF (Pmode, temp);
6358 if (function != current_function_decl
6360 LABEL_REF_NONLOCAL_P (temp) = 1;
6362 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6368 /* If a static var's type was incomplete when the decl was written,
6369 but the type is complete now, lay out the decl now. */
6370 if (DECL_SIZE (exp) == 0
6371 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6372 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6373 layout_decl (exp, 0);
6375 /* ... fall through ... */
6379 if (DECL_RTL (exp) == 0)
6382 /* Ensure variable marked as used even if it doesn't go through
6383 a parser. If it hasn't be used yet, write out an external
6385 if (! TREE_USED (exp))
6387 assemble_external (exp);
6388 TREE_USED (exp) = 1;
6391 /* Show we haven't gotten RTL for this yet. */
6394 /* Variables inherited from containing functions should have
6395 been lowered by this point. */
6396 context = decl_function_context (exp);
6398 && context != current_function_decl
6399 && !TREE_STATIC (exp)
6400 /* ??? C++ creates functions that are not TREE_STATIC. */
6401 && TREE_CODE (exp) != FUNCTION_DECL)
6404 /* This is the case of an array whose size is to be determined
6405 from its initializer, while the initializer is still being parsed.
6408 else if (MEM_P (DECL_RTL (exp))
6409 && REG_P (XEXP (DECL_RTL (exp), 0)))
6410 temp = validize_mem (DECL_RTL (exp));
6412 /* If DECL_RTL is memory, we are in the normal case and either
6413 the address is not valid or it is not a register and -fforce-addr
6414 is specified, get the address into a register. */
6416 else if (MEM_P (DECL_RTL (exp))
6417 && modifier != EXPAND_CONST_ADDRESS
6418 && modifier != EXPAND_SUM
6419 && modifier != EXPAND_INITIALIZER
6420 && (! memory_address_p (DECL_MODE (exp),
6421 XEXP (DECL_RTL (exp), 0))
6423 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6426 *alt_rtl = DECL_RTL (exp);
6427 temp = replace_equiv_address (DECL_RTL (exp),
6428 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6431 /* If we got something, return it. But first, set the alignment
6432 if the address is a register. */
6435 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6436 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6441 /* If the mode of DECL_RTL does not match that of the decl, it
6442 must be a promoted value. We return a SUBREG of the wanted mode,
6443 but mark it so that we know that it was already extended. */
6445 if (REG_P (DECL_RTL (exp))
6446 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6448 /* Get the signedness used for this variable. Ensure we get the
6449 same mode we got when the variable was declared. */
6450 if (GET_MODE (DECL_RTL (exp))
6451 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6452 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6455 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6456 SUBREG_PROMOTED_VAR_P (temp) = 1;
6457 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6461 return DECL_RTL (exp);
6464 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6465 TREE_INT_CST_HIGH (exp), mode);
6467 /* ??? If overflow is set, fold will have done an incomplete job,
6468 which can result in (plus xx (const_int 0)), which can get
6469 simplified by validate_replace_rtx during virtual register
6470 instantiation, which can result in unrecognizable insns.
6471 Avoid this by forcing all overflows into registers. */
6472 if (TREE_CONSTANT_OVERFLOW (exp)
6473 && modifier != EXPAND_INITIALIZER)
6474 temp = force_reg (mode, temp);
6479 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6480 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6481 return const_vector_from_tree (exp);
6483 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6484 TREE_VECTOR_CST_ELTS (exp)),
6485 ignore ? const0_rtx : target, tmode, modifier);
6488 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6491 /* If optimized, generate immediate CONST_DOUBLE
6492 which will be turned into memory by reload if necessary.
6494 We used to force a register so that loop.c could see it. But
6495 this does not allow gen_* patterns to perform optimizations with
6496 the constants. It also produces two insns in cases like "x = 1.0;".
6497 On most machines, floating-point constants are not permitted in
6498 many insns, so we'd end up copying it to a register in any case.
6500 Now, we do the copying in expand_binop, if appropriate. */
6501 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6502 TYPE_MODE (TREE_TYPE (exp)));
6505 /* Handle evaluating a complex constant in a CONCAT target. */
6506 if (original_target && GET_CODE (original_target) == CONCAT)
6508 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6511 rtarg = XEXP (original_target, 0);
6512 itarg = XEXP (original_target, 1);
6514 /* Move the real and imaginary parts separately. */
6515 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6516 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6519 emit_move_insn (rtarg, op0);
6521 emit_move_insn (itarg, op1);
6523 return original_target;
6526 /* ... fall through ... */
6529 temp = output_constant_def (exp, 1);
6531 /* temp contains a constant address.
6532 On RISC machines where a constant address isn't valid,
6533 make some insns to get that address into a register. */
6534 if (modifier != EXPAND_CONST_ADDRESS
6535 && modifier != EXPAND_INITIALIZER
6536 && modifier != EXPAND_SUM
6537 && (! memory_address_p (mode, XEXP (temp, 0))
6538 || flag_force_addr))
6539 return replace_equiv_address (temp,
6540 copy_rtx (XEXP (temp, 0)));
6545 tree val = TREE_OPERAND (exp, 0);
6546 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6548 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6550 /* We can indeed still hit this case, typically via builtin
6551 expanders calling save_expr immediately before expanding
6552 something. Assume this means that we only have to deal
6553 with non-BLKmode values. */
6554 if (GET_MODE (ret) == BLKmode)
6557 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6558 DECL_ARTIFICIAL (val) = 1;
6559 TREE_OPERAND (exp, 0) = val;
6561 if (!CONSTANT_P (ret))
6562 ret = copy_to_reg (ret);
6563 SET_DECL_RTL (val, ret);
6570 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6571 expand_goto (TREE_OPERAND (exp, 0));
6573 expand_computed_goto (TREE_OPERAND (exp, 0));
6577 /* If we don't need the result, just ensure we evaluate any
6583 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6584 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6589 /* All elts simple constants => refer to a constant in memory. But
6590 if this is a non-BLKmode mode, let it store a field at a time
6591 since that should make a CONST_INT or CONST_DOUBLE when we
6592 fold. Likewise, if we have a target we can use, it is best to
6593 store directly into the target unless the type is large enough
6594 that memcpy will be used. If we are making an initializer and
6595 all operands are constant, put it in memory as well.
6597 FIXME: Avoid trying to fill vector constructors piece-meal.
6598 Output them with output_constant_def below unless we're sure
6599 they're zeros. This should go away when vector initializers
6600 are treated like VECTOR_CST instead of arrays.
6602 else if ((TREE_STATIC (exp)
6603 && ((mode == BLKmode
6604 && ! (target != 0 && safe_from_p (target, exp, 1)))
6605 || TREE_ADDRESSABLE (exp)
6606 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6607 && (! MOVE_BY_PIECES_P
6608 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6610 && ! mostly_zeros_p (exp))))
6611 || ((modifier == EXPAND_INITIALIZER
6612 || modifier == EXPAND_CONST_ADDRESS)
6613 && TREE_CONSTANT (exp)))
6615 rtx constructor = output_constant_def (exp, 1);
6617 if (modifier != EXPAND_CONST_ADDRESS
6618 && modifier != EXPAND_INITIALIZER
6619 && modifier != EXPAND_SUM)
6620 constructor = validize_mem (constructor);
6626 /* Handle calls that pass values in multiple non-contiguous
6627 locations. The Irix 6 ABI has examples of this. */
6628 if (target == 0 || ! safe_from_p (target, exp, 1)
6629 || GET_CODE (target) == PARALLEL
6630 || modifier == EXPAND_STACK_PARM)
6632 = assign_temp (build_qualified_type (type,
6634 | (TREE_READONLY (exp)
6635 * TYPE_QUAL_CONST))),
6636 0, TREE_ADDRESSABLE (exp), 1);
6638 store_constructor (exp, target, 0, int_expr_size (exp));
6644 tree exp1 = TREE_OPERAND (exp, 0);
6646 if (modifier != EXPAND_WRITE)
6650 t = fold_read_from_constant_string (exp);
6652 return expand_expr (t, target, tmode, modifier);
6655 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6656 op0 = memory_address (mode, op0);
6657 temp = gen_rtx_MEM (mode, op0);
6658 set_mem_attributes (temp, exp, 0);
6660 /* If we are writing to this object and its type is a record with
6661 readonly fields, we must mark it as readonly so it will
6662 conflict with readonly references to those fields. */
6663 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6664 RTX_UNCHANGING_P (temp) = 1;
6671 #ifdef ENABLE_CHECKING
6672 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6677 tree array = TREE_OPERAND (exp, 0);
6678 tree low_bound = array_ref_low_bound (exp);
6679 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6682 /* Optimize the special-case of a zero lower bound.
6684 We convert the low_bound to sizetype to avoid some problems
6685 with constant folding. (E.g. suppose the lower bound is 1,
6686 and its mode is QI. Without the conversion, (ARRAY
6687 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6688 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6690 if (! integer_zerop (low_bound))
6691 index = size_diffop (index, convert (sizetype, low_bound));
6693 /* Fold an expression like: "foo"[2].
6694 This is not done in fold so it won't happen inside &.
6695 Don't fold if this is for wide characters since it's too
6696 difficult to do correctly and this is a very rare case. */
6698 if (modifier != EXPAND_CONST_ADDRESS
6699 && modifier != EXPAND_INITIALIZER
6700 && modifier != EXPAND_MEMORY)
6702 tree t = fold_read_from_constant_string (exp);
6705 return expand_expr (t, target, tmode, modifier);
6708 /* If this is a constant index into a constant array,
6709 just get the value from the array. Handle both the cases when
6710 we have an explicit constructor and when our operand is a variable
6711 that was declared const. */
6713 if (modifier != EXPAND_CONST_ADDRESS
6714 && modifier != EXPAND_INITIALIZER
6715 && modifier != EXPAND_MEMORY
6716 && TREE_CODE (array) == CONSTRUCTOR
6717 && ! TREE_SIDE_EFFECTS (array)
6718 && TREE_CODE (index) == INTEGER_CST
6719 && 0 > compare_tree_int (index,
6720 list_length (CONSTRUCTOR_ELTS
6721 (TREE_OPERAND (exp, 0)))))
6725 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6726 i = TREE_INT_CST_LOW (index);
6727 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6731 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6735 else if (optimize >= 1
6736 && modifier != EXPAND_CONST_ADDRESS
6737 && modifier != EXPAND_INITIALIZER
6738 && modifier != EXPAND_MEMORY
6739 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6740 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6741 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6742 && targetm.binds_local_p (array))
6744 if (TREE_CODE (index) == INTEGER_CST)
6746 tree init = DECL_INITIAL (array);
6748 if (TREE_CODE (init) == CONSTRUCTOR)
6752 for (elem = CONSTRUCTOR_ELTS (init);
6754 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6755 elem = TREE_CHAIN (elem))
6758 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6759 return expand_expr (fold (TREE_VALUE (elem)), target,
6762 else if (TREE_CODE (init) == STRING_CST
6763 && 0 > compare_tree_int (index,
6764 TREE_STRING_LENGTH (init)))
6766 tree type = TREE_TYPE (TREE_TYPE (init));
6767 enum machine_mode mode = TYPE_MODE (type);
6769 if (GET_MODE_CLASS (mode) == MODE_INT
6770 && GET_MODE_SIZE (mode) == 1)
6771 return gen_int_mode (TREE_STRING_POINTER (init)
6772 [TREE_INT_CST_LOW (index)], mode);
6777 goto normal_inner_ref;
6780 /* If the operand is a CONSTRUCTOR, we can just extract the
6781 appropriate field if it is present. */
6782 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6786 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6787 elt = TREE_CHAIN (elt))
6788 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6789 /* We can normally use the value of the field in the
6790 CONSTRUCTOR. However, if this is a bitfield in
6791 an integral mode that we can fit in a HOST_WIDE_INT,
6792 we must mask only the number of bits in the bitfield,
6793 since this is done implicitly by the constructor. If
6794 the bitfield does not meet either of those conditions,
6795 we can't do this optimization. */
6796 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6797 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6799 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6800 <= HOST_BITS_PER_WIDE_INT))))
6802 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6803 && modifier == EXPAND_STACK_PARM)
6805 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6806 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6808 HOST_WIDE_INT bitsize
6809 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6810 enum machine_mode imode
6811 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6813 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6815 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6816 op0 = expand_and (imode, op0, op1, target);
6821 = build_int_cst (NULL_TREE,
6822 GET_MODE_BITSIZE (imode) - bitsize,
6825 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6827 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6835 goto normal_inner_ref;
6838 case ARRAY_RANGE_REF:
6841 enum machine_mode mode1;
6842 HOST_WIDE_INT bitsize, bitpos;
6845 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6846 &mode1, &unsignedp, &volatilep);
6849 /* If we got back the original object, something is wrong. Perhaps
6850 we are evaluating an expression too early. In any event, don't
6851 infinitely recurse. */
6855 /* If TEM's type is a union of variable size, pass TARGET to the inner
6856 computation, since it will need a temporary and TARGET is known
6857 to have to do. This occurs in unchecked conversion in Ada. */
6861 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6862 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6864 && modifier != EXPAND_STACK_PARM
6865 ? target : NULL_RTX),
6867 (modifier == EXPAND_INITIALIZER
6868 || modifier == EXPAND_CONST_ADDRESS
6869 || modifier == EXPAND_STACK_PARM)
6870 ? modifier : EXPAND_NORMAL);
6872 /* If this is a constant, put it into a register if it is a
6873 legitimate constant and OFFSET is 0 and memory if it isn't. */
6874 if (CONSTANT_P (op0))
6876 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6877 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6879 op0 = force_reg (mode, op0);
6881 op0 = validize_mem (force_const_mem (mode, op0));
6884 /* Otherwise, if this object not in memory and we either have an
6885 offset or a BLKmode result, put it there. This case can't occur in
6886 C, but can in Ada if we have unchecked conversion of an expression
6887 from a scalar type to an array or record type or for an
6888 ARRAY_RANGE_REF whose type is BLKmode. */
6889 else if (!MEM_P (op0)
6891 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6893 tree nt = build_qualified_type (TREE_TYPE (tem),
6894 (TYPE_QUALS (TREE_TYPE (tem))
6895 | TYPE_QUAL_CONST));
6896 rtx memloc = assign_temp (nt, 1, 1, 1);
6898 emit_move_insn (memloc, op0);
6904 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6910 #ifdef POINTERS_EXTEND_UNSIGNED
6911 if (GET_MODE (offset_rtx) != Pmode)
6912 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6914 if (GET_MODE (offset_rtx) != ptr_mode)
6915 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6918 if (GET_MODE (op0) == BLKmode
6919 /* A constant address in OP0 can have VOIDmode, we must
6920 not try to call force_reg in that case. */
6921 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6923 && (bitpos % bitsize) == 0
6924 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6925 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6927 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6931 op0 = offset_address (op0, offset_rtx,
6932 highest_pow2_factor (offset));
6935 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6936 record its alignment as BIGGEST_ALIGNMENT. */
6937 if (MEM_P (op0) && bitpos == 0 && offset != 0
6938 && is_aligning_offset (offset, tem))
6939 set_mem_align (op0, BIGGEST_ALIGNMENT);
6941 /* Don't forget about volatility even if this is a bitfield. */
6942 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6944 if (op0 == orig_op0)
6945 op0 = copy_rtx (op0);
6947 MEM_VOLATILE_P (op0) = 1;
6950 /* The following code doesn't handle CONCAT.
6951 Assume only bitpos == 0 can be used for CONCAT, due to
6952 one element arrays having the same mode as its element. */
6953 if (GET_CODE (op0) == CONCAT)
6955 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
6960 /* In cases where an aligned union has an unaligned object
6961 as a field, we might be extracting a BLKmode value from
6962 an integer-mode (e.g., SImode) object. Handle this case
6963 by doing the extract into an object as wide as the field
6964 (which we know to be the width of a basic mode), then
6965 storing into memory, and changing the mode to BLKmode. */
6966 if (mode1 == VOIDmode
6967 || REG_P (op0) || GET_CODE (op0) == SUBREG
6968 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6969 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6970 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6971 && modifier != EXPAND_CONST_ADDRESS
6972 && modifier != EXPAND_INITIALIZER)
6973 /* If the field isn't aligned enough to fetch as a memref,
6974 fetch it as a bit field. */
6975 || (mode1 != BLKmode
6976 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6977 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6979 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6980 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6981 && ((modifier == EXPAND_CONST_ADDRESS
6982 || modifier == EXPAND_INITIALIZER)
6984 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6985 || (bitpos % BITS_PER_UNIT != 0)))
6986 /* If the type and the field are a constant size and the
6987 size of the type isn't the same size as the bitfield,
6988 we must use bitfield operations. */
6990 && TYPE_SIZE (TREE_TYPE (exp))
6991 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6992 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6995 enum machine_mode ext_mode = mode;
6997 if (ext_mode == BLKmode
6998 && ! (target != 0 && MEM_P (op0)
7000 && bitpos % BITS_PER_UNIT == 0))
7001 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7003 if (ext_mode == BLKmode)
7006 target = assign_temp (type, 0, 1, 1);
7011 /* In this case, BITPOS must start at a byte boundary and
7012 TARGET, if specified, must be a MEM. */
7014 || (target != 0 && !MEM_P (target))
7015 || bitpos % BITS_PER_UNIT != 0)
7018 emit_block_move (target,
7019 adjust_address (op0, VOIDmode,
7020 bitpos / BITS_PER_UNIT),
7021 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7023 (modifier == EXPAND_STACK_PARM
7024 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7029 op0 = validize_mem (op0);
7031 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7032 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7034 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7035 (modifier == EXPAND_STACK_PARM
7036 ? NULL_RTX : target),
7037 ext_mode, ext_mode);
7039 /* If the result is a record type and BITSIZE is narrower than
7040 the mode of OP0, an integral mode, and this is a big endian
7041 machine, we must put the field into the high-order bits. */
7042 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7043 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7044 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7045 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7046 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7050 /* If the result type is BLKmode, store the data into a temporary
7051 of the appropriate type, but with the mode corresponding to the
7052 mode for the data we have (op0's mode). It's tempting to make
7053 this a constant type, since we know it's only being stored once,
7054 but that can cause problems if we are taking the address of this
7055 COMPONENT_REF because the MEM of any reference via that address
7056 will have flags corresponding to the type, which will not
7057 necessarily be constant. */
7058 if (mode == BLKmode)
7061 = assign_stack_temp_for_type
7062 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7064 emit_move_insn (new, op0);
7065 op0 = copy_rtx (new);
7066 PUT_MODE (op0, BLKmode);
7067 set_mem_attributes (op0, exp, 1);
7073 /* If the result is BLKmode, use that to access the object
7075 if (mode == BLKmode)
7078 /* Get a reference to just this component. */
7079 if (modifier == EXPAND_CONST_ADDRESS
7080 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7081 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7083 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7085 if (op0 == orig_op0)
7086 op0 = copy_rtx (op0);
7088 set_mem_attributes (op0, exp, 0);
7089 if (REG_P (XEXP (op0, 0)))
7090 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7092 MEM_VOLATILE_P (op0) |= volatilep;
7093 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7094 || modifier == EXPAND_CONST_ADDRESS
7095 || modifier == EXPAND_INITIALIZER)
7097 else if (target == 0)
7098 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7100 convert_move (target, op0, unsignedp);
7105 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7108 /* Check for a built-in function. */
7109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7110 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7112 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7114 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7115 == BUILT_IN_FRONTEND)
7116 return lang_hooks.expand_expr (exp, original_target,
7120 return expand_builtin (exp, target, subtarget, tmode, ignore);
7123 return expand_call (exp, target, ignore);
7125 case NON_LVALUE_EXPR:
7128 if (TREE_OPERAND (exp, 0) == error_mark_node)
7131 if (TREE_CODE (type) == UNION_TYPE)
7133 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7135 /* If both input and output are BLKmode, this conversion isn't doing
7136 anything except possibly changing memory attribute. */
7137 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7139 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7142 result = copy_rtx (result);
7143 set_mem_attributes (result, exp, 0);
7149 if (TYPE_MODE (type) != BLKmode)
7150 target = gen_reg_rtx (TYPE_MODE (type));
7152 target = assign_temp (type, 0, 1, 1);
7156 /* Store data into beginning of memory target. */
7157 store_expr (TREE_OPERAND (exp, 0),
7158 adjust_address (target, TYPE_MODE (valtype), 0),
7159 modifier == EXPAND_STACK_PARM ? 2 : 0);
7161 else if (REG_P (target))
7162 /* Store this field into a union of the proper type. */
7163 store_field (target,
7164 MIN ((int_size_in_bytes (TREE_TYPE
7165 (TREE_OPERAND (exp, 0)))
7167 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7168 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7169 VOIDmode, 0, type, 0);
7173 /* Return the entire union. */
7177 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7179 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7182 /* If the signedness of the conversion differs and OP0 is
7183 a promoted SUBREG, clear that indication since we now
7184 have to do the proper extension. */
7185 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7186 && GET_CODE (op0) == SUBREG)
7187 SUBREG_PROMOTED_VAR_P (op0) = 0;
7189 return REDUCE_BIT_FIELD (op0);
7192 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7193 op0 = REDUCE_BIT_FIELD (op0);
7194 if (GET_MODE (op0) == mode)
7197 /* If OP0 is a constant, just convert it into the proper mode. */
7198 if (CONSTANT_P (op0))
7200 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7201 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7203 if (modifier == EXPAND_INITIALIZER)
7204 return simplify_gen_subreg (mode, op0, inner_mode,
7205 subreg_lowpart_offset (mode,
7208 return convert_modes (mode, inner_mode, op0,
7209 TYPE_UNSIGNED (inner_type));
7212 if (modifier == EXPAND_INITIALIZER)
7213 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7217 convert_to_mode (mode, op0,
7218 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7220 convert_move (target, op0,
7221 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7224 case VIEW_CONVERT_EXPR:
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7227 /* If the input and output modes are both the same, we are done.
7228 Otherwise, if neither mode is BLKmode and both are integral and within
7229 a word, we can use gen_lowpart. If neither is true, make sure the
7230 operand is in memory and convert the MEM to the new mode. */
7231 if (TYPE_MODE (type) == GET_MODE (op0))
7233 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7234 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7235 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7236 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7237 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7238 op0 = gen_lowpart (TYPE_MODE (type), op0);
7239 else if (!MEM_P (op0))
7241 /* If the operand is not a MEM, force it into memory. Since we
7242 are going to be be changing the mode of the MEM, don't call
7243 force_const_mem for constants because we don't allow pool
7244 constants to change mode. */
7245 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7247 if (TREE_ADDRESSABLE (exp))
7250 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7252 = assign_stack_temp_for_type
7253 (TYPE_MODE (inner_type),
7254 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7256 emit_move_insn (target, op0);
7260 /* At this point, OP0 is in the correct mode. If the output type is such
7261 that the operand is known to be aligned, indicate that it is.
7262 Otherwise, we need only be concerned about alignment for non-BLKmode
7266 op0 = copy_rtx (op0);
7268 if (TYPE_ALIGN_OK (type))
7269 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7270 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7271 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7273 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7274 HOST_WIDE_INT temp_size
7275 = MAX (int_size_in_bytes (inner_type),
7276 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7277 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7278 temp_size, 0, type);
7279 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7281 if (TREE_ADDRESSABLE (exp))
7284 if (GET_MODE (op0) == BLKmode)
7285 emit_block_move (new_with_op0_mode, op0,
7286 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7287 (modifier == EXPAND_STACK_PARM
7288 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7290 emit_move_insn (new_with_op0_mode, op0);
7295 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7301 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7302 something else, make sure we add the register to the constant and
7303 then to the other thing. This case can occur during strength
7304 reduction and doing it this way will produce better code if the
7305 frame pointer or argument pointer is eliminated.
7307 fold-const.c will ensure that the constant is always in the inner
7308 PLUS_EXPR, so the only case we need to do anything about is if
7309 sp, ap, or fp is our second argument, in which case we must swap
7310 the innermost first argument and our second argument. */
7312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7313 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7314 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7315 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7316 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7317 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7319 tree t = TREE_OPERAND (exp, 1);
7321 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7322 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7325 /* If the result is to be ptr_mode and we are adding an integer to
7326 something, we might be forming a constant. So try to use
7327 plus_constant. If it produces a sum and we can't accept it,
7328 use force_operand. This allows P = &ARR[const] to generate
7329 efficient code on machines where a SYMBOL_REF is not a valid
7332 If this is an EXPAND_SUM call, always return the sum. */
7333 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7334 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7336 if (modifier == EXPAND_STACK_PARM)
7338 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7339 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7340 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7344 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7346 /* Use immed_double_const to ensure that the constant is
7347 truncated according to the mode of OP1, then sign extended
7348 to a HOST_WIDE_INT. Using the constant directly can result
7349 in non-canonical RTL in a 64x32 cross compile. */
7351 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7353 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7354 op1 = plus_constant (op1, INTVAL (constant_part));
7355 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7356 op1 = force_operand (op1, target);
7357 return REDUCE_BIT_FIELD (op1);
7360 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7361 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7362 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7367 (modifier == EXPAND_INITIALIZER
7368 ? EXPAND_INITIALIZER : EXPAND_SUM));
7369 if (! CONSTANT_P (op0))
7371 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7372 VOIDmode, modifier);
7373 /* Return a PLUS if modifier says it's OK. */
7374 if (modifier == EXPAND_SUM
7375 || modifier == EXPAND_INITIALIZER)
7376 return simplify_gen_binary (PLUS, mode, op0, op1);
7379 /* Use immed_double_const to ensure that the constant is
7380 truncated according to the mode of OP1, then sign extended
7381 to a HOST_WIDE_INT. Using the constant directly can result
7382 in non-canonical RTL in a 64x32 cross compile. */
7384 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7386 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7387 op0 = plus_constant (op0, INTVAL (constant_part));
7388 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7389 op0 = force_operand (op0, target);
7390 return REDUCE_BIT_FIELD (op0);
7394 /* No sense saving up arithmetic to be done
7395 if it's all in the wrong mode to form part of an address.
7396 And force_operand won't know whether to sign-extend or
7398 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7399 || mode != ptr_mode)
7401 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7402 subtarget, &op0, &op1, 0);
7403 if (op0 == const0_rtx)
7405 if (op1 == const0_rtx)
7410 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7411 subtarget, &op0, &op1, modifier);
7412 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7415 /* For initializers, we are allowed to return a MINUS of two
7416 symbolic constants. Here we handle all cases when both operands
7418 /* Handle difference of two symbolic constants,
7419 for the sake of an initializer. */
7420 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7421 && really_constant_p (TREE_OPERAND (exp, 0))
7422 && really_constant_p (TREE_OPERAND (exp, 1)))
7424 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7425 NULL_RTX, &op0, &op1, modifier);
7427 /* If the last operand is a CONST_INT, use plus_constant of
7428 the negated constant. Else make the MINUS. */
7429 if (GET_CODE (op1) == CONST_INT)
7430 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7432 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7435 /* No sense saving up arithmetic to be done
7436 if it's all in the wrong mode to form part of an address.
7437 And force_operand won't know whether to sign-extend or
7439 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7440 || mode != ptr_mode)
7443 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7444 subtarget, &op0, &op1, modifier);
7446 /* Convert A - const to A + (-const). */
7447 if (GET_CODE (op1) == CONST_INT)
7449 op1 = negate_rtx (mode, op1);
7450 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7456 /* If first operand is constant, swap them.
7457 Thus the following special case checks need only
7458 check the second operand. */
7459 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7461 tree t1 = TREE_OPERAND (exp, 0);
7462 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7463 TREE_OPERAND (exp, 1) = t1;
7466 /* Attempt to return something suitable for generating an
7467 indexed address, for machines that support that. */
7469 if (modifier == EXPAND_SUM && mode == ptr_mode
7470 && host_integerp (TREE_OPERAND (exp, 1), 0))
7472 tree exp1 = TREE_OPERAND (exp, 1);
7474 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7478 op0 = force_operand (op0, NULL_RTX);
7480 op0 = copy_to_mode_reg (mode, op0);
7482 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7483 gen_int_mode (tree_low_cst (exp1, 0),
7484 TYPE_MODE (TREE_TYPE (exp1)))));
7487 if (modifier == EXPAND_STACK_PARM)
7490 /* Check for multiplying things that have been extended
7491 from a narrower type. If this machine supports multiplying
7492 in that narrower type with a result in the desired type,
7493 do it that way, and avoid the explicit type-conversion. */
7494 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7495 && TREE_CODE (type) == INTEGER_TYPE
7496 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7498 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7499 && int_fits_type_p (TREE_OPERAND (exp, 1),
7500 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7501 /* Don't use a widening multiply if a shift will do. */
7502 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7503 > HOST_BITS_PER_WIDE_INT)
7504 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7506 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7507 && (TYPE_PRECISION (TREE_TYPE
7508 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7509 == TYPE_PRECISION (TREE_TYPE
7511 (TREE_OPERAND (exp, 0), 0))))
7512 /* If both operands are extended, they must either both
7513 be zero-extended or both be sign-extended. */
7514 && (TYPE_UNSIGNED (TREE_TYPE
7515 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7516 == TYPE_UNSIGNED (TREE_TYPE
7518 (TREE_OPERAND (exp, 0), 0)))))))
7520 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7521 enum machine_mode innermode = TYPE_MODE (op0type);
7522 bool zextend_p = TYPE_UNSIGNED (op0type);
7523 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7524 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7526 if (mode == GET_MODE_WIDER_MODE (innermode))
7528 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7530 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7531 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7532 TREE_OPERAND (exp, 1),
7533 NULL_RTX, &op0, &op1, 0);
7535 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7536 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7537 NULL_RTX, &op0, &op1, 0);
7540 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7541 && innermode == word_mode)
7544 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7545 NULL_RTX, VOIDmode, 0);
7546 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7547 op1 = convert_modes (innermode, mode,
7548 expand_expr (TREE_OPERAND (exp, 1),
7549 NULL_RTX, VOIDmode, 0),
7552 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7553 NULL_RTX, VOIDmode, 0);
7554 temp = expand_binop (mode, other_optab, op0, op1, target,
7555 unsignedp, OPTAB_LIB_WIDEN);
7556 hipart = gen_highpart (innermode, temp);
7557 htem = expand_mult_highpart_adjust (innermode, hipart,
7561 emit_move_insn (hipart, htem);
7562 return REDUCE_BIT_FIELD (temp);
7566 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7567 subtarget, &op0, &op1, 0);
7568 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7570 case TRUNC_DIV_EXPR:
7571 case FLOOR_DIV_EXPR:
7573 case ROUND_DIV_EXPR:
7574 case EXACT_DIV_EXPR:
7575 if (modifier == EXPAND_STACK_PARM)
7577 /* Possible optimization: compute the dividend with EXPAND_SUM
7578 then if the divisor is constant can optimize the case
7579 where some terms of the dividend have coeffs divisible by it. */
7580 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7581 subtarget, &op0, &op1, 0);
7582 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7585 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7586 expensive divide. If not, combine will rebuild the original
7588 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7589 && TREE_CODE (type) == REAL_TYPE
7590 && !real_onep (TREE_OPERAND (exp, 0)))
7591 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7592 build2 (RDIV_EXPR, type,
7593 build_real (type, dconst1),
7594 TREE_OPERAND (exp, 1))),
7595 target, tmode, modifier);
7599 case TRUNC_MOD_EXPR:
7600 case FLOOR_MOD_EXPR:
7602 case ROUND_MOD_EXPR:
7603 if (modifier == EXPAND_STACK_PARM)
7605 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7606 subtarget, &op0, &op1, 0);
7607 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7609 case FIX_ROUND_EXPR:
7610 case FIX_FLOOR_EXPR:
7612 abort (); /* Not used for C. */
7614 case FIX_TRUNC_EXPR:
7615 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7616 if (target == 0 || modifier == EXPAND_STACK_PARM)
7617 target = gen_reg_rtx (mode);
7618 expand_fix (target, op0, unsignedp);
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7623 if (target == 0 || modifier == EXPAND_STACK_PARM)
7624 target = gen_reg_rtx (mode);
7625 /* expand_float can't figure out what to do if FROM has VOIDmode.
7626 So give it the correct mode. With -O, cse will optimize this. */
7627 if (GET_MODE (op0) == VOIDmode)
7628 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7630 expand_float (target, op0,
7631 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7635 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7636 if (modifier == EXPAND_STACK_PARM)
7638 temp = expand_unop (mode,
7639 optab_for_tree_code (NEGATE_EXPR, type),
7643 return REDUCE_BIT_FIELD (temp);
7646 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7647 if (modifier == EXPAND_STACK_PARM)
7650 /* ABS_EXPR is not valid for complex arguments. */
7651 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7652 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7655 /* Unsigned abs is simply the operand. Testing here means we don't
7656 risk generating incorrect code below. */
7657 if (TYPE_UNSIGNED (type))
7660 return expand_abs (mode, op0, target, unsignedp,
7661 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7665 target = original_target;
7667 || modifier == EXPAND_STACK_PARM
7668 || (MEM_P (target) && MEM_VOLATILE_P (target))
7669 || GET_MODE (target) != mode
7671 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7672 target = gen_reg_rtx (mode);
7673 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7674 target, &op0, &op1, 0);
7676 /* First try to do it with a special MIN or MAX instruction.
7677 If that does not win, use a conditional jump to select the proper
7679 this_optab = optab_for_tree_code (code, type);
7680 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7685 /* At this point, a MEM target is no longer useful; we will get better
7689 target = gen_reg_rtx (mode);
7691 /* If op1 was placed in target, swap op0 and op1. */
7692 if (target != op0 && target == op1)
7700 emit_move_insn (target, op0);
7702 op0 = gen_label_rtx ();
7704 /* If this mode is an integer too wide to compare properly,
7705 compare word by word. Rely on cse to optimize constant cases. */
7706 if (GET_MODE_CLASS (mode) == MODE_INT
7707 && ! can_compare_p (GE, mode, ccp_jump))
7709 if (code == MAX_EXPR)
7710 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7713 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7718 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7719 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7721 emit_move_insn (target, op1);
7726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7727 if (modifier == EXPAND_STACK_PARM)
7729 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7734 /* ??? Can optimize bitwise operations with one arg constant.
7735 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7736 and (a bitwise1 b) bitwise2 b (etc)
7737 but that is probably not worth while. */
7739 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7740 boolean values when we want in all cases to compute both of them. In
7741 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7742 as actual zero-or-1 values and then bitwise anding. In cases where
7743 there cannot be any side effects, better code would be made by
7744 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7745 how to recognize those cases. */
7747 case TRUTH_AND_EXPR:
7748 code = BIT_AND_EXPR;
7753 code = BIT_IOR_EXPR;
7757 case TRUTH_XOR_EXPR:
7758 code = BIT_XOR_EXPR;
7766 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7768 if (modifier == EXPAND_STACK_PARM)
7770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7771 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7774 /* Could determine the answer when only additive constants differ. Also,
7775 the addition of one can be handled by changing the condition. */
7782 case UNORDERED_EXPR:
7790 temp = do_store_flag (exp,
7791 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7792 tmode != VOIDmode ? tmode : mode, 0);
7796 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7797 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7799 && REG_P (original_target)
7800 && (GET_MODE (original_target)
7801 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7803 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7806 /* If temp is constant, we can just compute the result. */
7807 if (GET_CODE (temp) == CONST_INT)
7809 if (INTVAL (temp) != 0)
7810 emit_move_insn (target, const1_rtx);
7812 emit_move_insn (target, const0_rtx);
7817 if (temp != original_target)
7819 enum machine_mode mode1 = GET_MODE (temp);
7820 if (mode1 == VOIDmode)
7821 mode1 = tmode != VOIDmode ? tmode : mode;
7823 temp = copy_to_mode_reg (mode1, temp);
7826 op1 = gen_label_rtx ();
7827 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7828 GET_MODE (temp), unsignedp, op1);
7829 emit_move_insn (temp, const1_rtx);
7834 /* If no set-flag instruction, must generate a conditional store
7835 into a temporary variable. Drop through and handle this
7840 || modifier == EXPAND_STACK_PARM
7841 || ! safe_from_p (target, exp, 1)
7842 /* Make sure we don't have a hard reg (such as function's return
7843 value) live across basic blocks, if not optimizing. */
7844 || (!optimize && REG_P (target)
7845 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7846 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7849 emit_move_insn (target, const0_rtx);
7851 op1 = gen_label_rtx ();
7852 jumpifnot (exp, op1);
7855 emit_move_insn (target, const1_rtx);
7858 return ignore ? const0_rtx : target;
7860 case TRUTH_NOT_EXPR:
7861 if (modifier == EXPAND_STACK_PARM)
7863 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7864 /* The parser is careful to generate TRUTH_NOT_EXPR
7865 only with operands that are always zero or one. */
7866 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7867 target, 1, OPTAB_LIB_WIDEN);
7872 case STATEMENT_LIST:
7874 tree_stmt_iterator iter;
7879 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7880 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7885 /* If it's void, we don't need to worry about computing a value. */
7886 if (VOID_TYPE_P (TREE_TYPE (exp)))
7888 tree pred = TREE_OPERAND (exp, 0);
7889 tree then_ = TREE_OPERAND (exp, 1);
7890 tree else_ = TREE_OPERAND (exp, 2);
7892 if (TREE_CODE (then_) != GOTO_EXPR
7893 || TREE_CODE (GOTO_DESTINATION (then_)) != LABEL_DECL
7894 || TREE_CODE (else_) != GOTO_EXPR
7895 || TREE_CODE (GOTO_DESTINATION (else_)) != LABEL_DECL)
7898 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7899 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7902 /* Note that COND_EXPRs whose type is a structure or union
7903 are required to be constructed to contain assignments of
7904 a temporary variable, so that we can evaluate them here
7905 for side effect only. If type is void, we must do likewise. */
7907 if (TREE_ADDRESSABLE (type)
7909 || TREE_TYPE (TREE_OPERAND (exp, 1)) == void_type_node
7910 || TREE_TYPE (TREE_OPERAND (exp, 2)) == void_type_node)
7913 /* If we are not to produce a result, we have no target. Otherwise,
7914 if a target was specified use it; it will not be used as an
7915 intermediate target unless it is safe. If no target, use a
7918 if (modifier != EXPAND_STACK_PARM
7920 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7921 && GET_MODE (original_target) == mode
7922 #ifdef HAVE_conditional_move
7923 && (! can_conditionally_move_p (mode)
7924 || REG_P (original_target))
7926 && !MEM_P (original_target))
7927 temp = original_target;
7929 temp = assign_temp (type, 0, 0, 1);
7931 do_pending_stack_adjust ();
7933 op0 = gen_label_rtx ();
7934 op1 = gen_label_rtx ();
7935 jumpifnot (TREE_OPERAND (exp, 0), op0);
7936 store_expr (TREE_OPERAND (exp, 1), temp,
7937 modifier == EXPAND_STACK_PARM ? 2 : 0);
7939 emit_jump_insn (gen_jump (op1));
7942 store_expr (TREE_OPERAND (exp, 2), temp,
7943 modifier == EXPAND_STACK_PARM ? 2 : 0);
7951 /* If lhs is complex, expand calls in rhs before computing it.
7952 That's so we don't compute a pointer and save it over a
7953 call. If lhs is simple, compute it first so we can give it
7954 as a target if the rhs is just a call. This avoids an
7955 extra temp and copy and that prevents a partial-subsumption
7956 which makes bad code. Actually we could treat
7957 component_ref's of vars like vars. */
7959 tree lhs = TREE_OPERAND (exp, 0);
7960 tree rhs = TREE_OPERAND (exp, 1);
7964 /* Check for |= or &= of a bitfield of size one into another bitfield
7965 of size 1. In this case, (unless we need the result of the
7966 assignment) we can do this more efficiently with a
7967 test followed by an assignment, if necessary.
7969 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7970 things change so we do, this code should be enhanced to
7973 && TREE_CODE (lhs) == COMPONENT_REF
7974 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7975 || TREE_CODE (rhs) == BIT_AND_EXPR)
7976 && TREE_OPERAND (rhs, 0) == lhs
7977 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7978 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7979 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7981 rtx label = gen_label_rtx ();
7983 do_jump (TREE_OPERAND (rhs, 1),
7984 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7985 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7986 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7987 (TREE_CODE (rhs) == BIT_IOR_EXPR
7989 : integer_zero_node)),
7991 do_pending_stack_adjust ();
7996 temp = expand_assignment (lhs, rhs, ! ignore);
8002 if (!TREE_OPERAND (exp, 0))
8003 expand_null_return ();
8005 expand_return (TREE_OPERAND (exp, 0));
8009 if (modifier == EXPAND_STACK_PARM)
8011 /* If we are taking the address of something erroneous, just
8013 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8015 /* If we are taking the address of a constant and are at the
8016 top level, we have to use output_constant_def since we can't
8017 call force_const_mem at top level. */
8019 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8020 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8022 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8025 /* We make sure to pass const0_rtx down if we came in with
8026 ignore set, to avoid doing the cleanups twice for something. */
8027 op0 = expand_expr (TREE_OPERAND (exp, 0),
8028 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8029 (modifier == EXPAND_INITIALIZER
8030 ? modifier : EXPAND_CONST_ADDRESS));
8032 /* If we are going to ignore the result, OP0 will have been set
8033 to const0_rtx, so just return it. Don't get confused and
8034 think we are taking the address of the constant. */
8038 /* We would like the object in memory. If it is a constant, we can
8039 have it be statically allocated into memory. For a non-constant,
8040 we need to allocate some memory and store the value into it. */
8042 if (CONSTANT_P (op0))
8043 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8045 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8046 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8047 || GET_CODE (op0) == LO_SUM)
8049 /* If this object is in a register, it can't be BLKmode. */
8050 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8051 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8053 if (GET_CODE (op0) == PARALLEL)
8054 /* Handle calls that pass values in multiple
8055 non-contiguous locations. The Irix 6 ABI has examples
8057 emit_group_store (memloc, op0, inner_type,
8058 int_size_in_bytes (inner_type));
8060 emit_move_insn (memloc, op0);
8068 mark_temp_addr_taken (op0);
8069 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8071 op0 = XEXP (op0, 0);
8072 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8073 op0 = convert_memory_address (ptr_mode, op0);
8077 /* If OP0 is not aligned as least as much as the type requires, we
8078 need to make a temporary, copy OP0 to it, and take the address of
8079 the temporary. We want to use the alignment of the type, not of
8080 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8081 the test for BLKmode means that can't happen. The test for
8082 BLKmode is because we never make mis-aligned MEMs with
8085 We don't need to do this at all if the machine doesn't have
8086 strict alignment. */
8087 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8088 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8090 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8092 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8095 if (TYPE_ALIGN_OK (inner_type))
8098 if (TREE_ADDRESSABLE (inner_type))
8100 /* We can't make a bitwise copy of this object, so fail. */
8101 error ("cannot take the address of an unaligned member");
8105 new = assign_stack_temp_for_type
8106 (TYPE_MODE (inner_type),
8107 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8108 : int_size_in_bytes (inner_type),
8109 1, build_qualified_type (inner_type,
8110 (TYPE_QUALS (inner_type)
8111 | TYPE_QUAL_CONST)));
8113 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8114 (modifier == EXPAND_STACK_PARM
8115 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8120 op0 = force_operand (XEXP (op0, 0), target);
8125 && modifier != EXPAND_CONST_ADDRESS
8126 && modifier != EXPAND_INITIALIZER
8127 && modifier != EXPAND_SUM)
8128 op0 = force_reg (Pmode, op0);
8131 && ! REG_USERVAR_P (op0))
8132 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8134 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8135 op0 = convert_memory_address (ptr_mode, op0);
8139 /* COMPLEX type for Extended Pascal & Fortran */
8142 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8145 /* Get the rtx code of the operands. */
8146 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8147 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8150 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8154 /* Move the real (op0) and imaginary (op1) parts to their location. */
8155 emit_move_insn (gen_realpart (mode, target), op0);
8156 emit_move_insn (gen_imagpart (mode, target), op1);
8158 insns = get_insns ();
8161 /* Complex construction should appear as a single unit. */
8162 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8163 each with a separate pseudo as destination.
8164 It's not correct for flow to treat them as a unit. */
8165 if (GET_CODE (target) != CONCAT)
8166 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8174 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8175 return gen_realpart (mode, op0);
8178 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8179 return gen_imagpart (mode, op0);
8182 expand_resx_expr (exp);
8185 case TRY_CATCH_EXPR:
8187 case EH_FILTER_EXPR:
8188 case TRY_FINALLY_EXPR:
8189 /* Lowered by tree-eh.c. */
8192 case WITH_CLEANUP_EXPR:
8193 case CLEANUP_POINT_EXPR:
8195 case CASE_LABEL_EXPR:
8201 case PREINCREMENT_EXPR:
8202 case PREDECREMENT_EXPR:
8203 case POSTINCREMENT_EXPR:
8204 case POSTDECREMENT_EXPR:
8207 case LABELED_BLOCK_EXPR:
8208 case EXIT_BLOCK_EXPR:
8209 case TRUTH_ANDIF_EXPR:
8210 case TRUTH_ORIF_EXPR:
8211 /* Lowered by gimplify.c. */
8215 return get_exception_pointer (cfun);
8218 return get_exception_filter (cfun);
8221 /* Function descriptors are not valid except for as
8222 initialization constants, and should not be expanded. */
8230 expand_label (TREE_OPERAND (exp, 0));
8234 expand_asm_expr (exp);
8237 case WITH_SIZE_EXPR:
8238 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8239 have pulled out the size to use in whatever context it needed. */
8240 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8244 return lang_hooks.expand_expr (exp, original_target, tmode,
8248 /* Here to do an ordinary binary operator. */
8250 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8251 subtarget, &op0, &op1, 0);
8253 this_optab = optab_for_tree_code (code, type);
8255 if (modifier == EXPAND_STACK_PARM)
8257 temp = expand_binop (mode, this_optab, op0, op1, target,
8258 unsignedp, OPTAB_LIB_WIDEN);
8261 return REDUCE_BIT_FIELD (temp);
8263 #undef REDUCE_BIT_FIELD
8265 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8266 signedness of TYPE), possibly returning the result in TARGET. */
8268 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8270 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8271 if (target && GET_MODE (target) != GET_MODE (exp))
8273 if (TYPE_UNSIGNED (type))
8276 if (prec < HOST_BITS_PER_WIDE_INT)
8277 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8280 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8281 ((unsigned HOST_WIDE_INT) 1
8282 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8284 return expand_and (GET_MODE (exp), exp, mask, target);
8288 tree count = build_int_cst (NULL_TREE,
8289 GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8290 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8291 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8295 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8296 when applied to the address of EXP produces an address known to be
8297 aligned more than BIGGEST_ALIGNMENT. */
8300 is_aligning_offset (tree offset, tree exp)
8302 /* Strip off any conversions. */
8303 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8304 || TREE_CODE (offset) == NOP_EXPR
8305 || TREE_CODE (offset) == CONVERT_EXPR)
8306 offset = TREE_OPERAND (offset, 0);
8308 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8309 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8310 if (TREE_CODE (offset) != BIT_AND_EXPR
8311 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8312 || compare_tree_int (TREE_OPERAND (offset, 1),
8313 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8314 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8317 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8318 It must be NEGATE_EXPR. Then strip any more conversions. */
8319 offset = TREE_OPERAND (offset, 0);
8320 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8321 || TREE_CODE (offset) == NOP_EXPR
8322 || TREE_CODE (offset) == CONVERT_EXPR)
8323 offset = TREE_OPERAND (offset, 0);
8325 if (TREE_CODE (offset) != NEGATE_EXPR)
8328 offset = TREE_OPERAND (offset, 0);
8329 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8330 || TREE_CODE (offset) == NOP_EXPR
8331 || TREE_CODE (offset) == CONVERT_EXPR)
8332 offset = TREE_OPERAND (offset, 0);
8334 /* This must now be the address of EXP. */
8335 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8338 /* Return the tree node if an ARG corresponds to a string constant or zero
8339 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8340 in bytes within the string that ARG is accessing. The type of the
8341 offset will be `sizetype'. */
8344 string_constant (tree arg, tree *ptr_offset)
8348 if (TREE_CODE (arg) == ADDR_EXPR
8349 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8351 *ptr_offset = size_zero_node;
8352 return TREE_OPERAND (arg, 0);
8354 if (TREE_CODE (arg) == ADDR_EXPR
8355 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
8356 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
8358 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
8359 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8361 else if (TREE_CODE (arg) == PLUS_EXPR)
8363 tree arg0 = TREE_OPERAND (arg, 0);
8364 tree arg1 = TREE_OPERAND (arg, 1);
8369 if (TREE_CODE (arg0) == ADDR_EXPR
8370 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
8372 *ptr_offset = convert (sizetype, arg1);
8373 return TREE_OPERAND (arg0, 0);
8375 else if (TREE_CODE (arg1) == ADDR_EXPR
8376 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
8378 *ptr_offset = convert (sizetype, arg0);
8379 return TREE_OPERAND (arg1, 0);
8386 /* Generate code to calculate EXP using a store-flag instruction
8387 and return an rtx for the result. EXP is either a comparison
8388 or a TRUTH_NOT_EXPR whose operand is a comparison.
8390 If TARGET is nonzero, store the result there if convenient.
8392 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8395 Return zero if there is no suitable set-flag instruction
8396 available on this machine.
8398 Once expand_expr has been called on the arguments of the comparison,
8399 we are committed to doing the store flag, since it is not safe to
8400 re-evaluate the expression. We emit the store-flag insn by calling
8401 emit_store_flag, but only expand the arguments if we have a reason
8402 to believe that emit_store_flag will be successful. If we think that
8403 it will, but it isn't, we have to simulate the store-flag with a
8404 set/jump/set sequence. */
8407 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8410 tree arg0, arg1, type;
8412 enum machine_mode operand_mode;
8416 enum insn_code icode;
8417 rtx subtarget = target;
8420 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8421 result at the end. We can't simply invert the test since it would
8422 have already been inverted if it were valid. This case occurs for
8423 some floating-point comparisons. */
8425 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8426 invert = 1, exp = TREE_OPERAND (exp, 0);
8428 arg0 = TREE_OPERAND (exp, 0);
8429 arg1 = TREE_OPERAND (exp, 1);
8431 /* Don't crash if the comparison was erroneous. */
8432 if (arg0 == error_mark_node || arg1 == error_mark_node)
8435 type = TREE_TYPE (arg0);
8436 operand_mode = TYPE_MODE (type);
8437 unsignedp = TYPE_UNSIGNED (type);
8439 /* We won't bother with BLKmode store-flag operations because it would mean
8440 passing a lot of information to emit_store_flag. */
8441 if (operand_mode == BLKmode)
8444 /* We won't bother with store-flag operations involving function pointers
8445 when function pointers must be canonicalized before comparisons. */
8446 #ifdef HAVE_canonicalize_funcptr_for_compare
8447 if (HAVE_canonicalize_funcptr_for_compare
8448 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8449 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8451 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8452 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8453 == FUNCTION_TYPE))))
8460 /* Get the rtx comparison code to use. We know that EXP is a comparison
8461 operation of some type. Some comparisons against 1 and -1 can be
8462 converted to comparisons with zero. Do so here so that the tests
8463 below will be aware that we have a comparison with zero. These
8464 tests will not catch constants in the first operand, but constants
8465 are rarely passed as the first operand. */
8467 switch (TREE_CODE (exp))
8476 if (integer_onep (arg1))
8477 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8479 code = unsignedp ? LTU : LT;
8482 if (! unsignedp && integer_all_onesp (arg1))
8483 arg1 = integer_zero_node, code = LT;
8485 code = unsignedp ? LEU : LE;
8488 if (! unsignedp && integer_all_onesp (arg1))
8489 arg1 = integer_zero_node, code = GE;
8491 code = unsignedp ? GTU : GT;
8494 if (integer_onep (arg1))
8495 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8497 code = unsignedp ? GEU : GE;
8500 case UNORDERED_EXPR:
8529 /* Put a constant second. */
8530 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8532 tem = arg0; arg0 = arg1; arg1 = tem;
8533 code = swap_condition (code);
8536 /* If this is an equality or inequality test of a single bit, we can
8537 do this by shifting the bit being tested to the low-order bit and
8538 masking the result with the constant 1. If the condition was EQ,
8539 we xor it with 1. This does not require an scc insn and is faster
8540 than an scc insn even if we have it.
8542 The code to make this transformation was moved into fold_single_bit_test,
8543 so we just call into the folder and expand its result. */
8545 if ((code == NE || code == EQ)
8546 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8547 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8549 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8550 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8552 target, VOIDmode, EXPAND_NORMAL);
8555 /* Now see if we are likely to be able to do this. Return if not. */
8556 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8559 icode = setcc_gen_code[(int) code];
8560 if (icode == CODE_FOR_nothing
8561 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8563 /* We can only do this if it is one of the special cases that
8564 can be handled without an scc insn. */
8565 if ((code == LT && integer_zerop (arg1))
8566 || (! only_cheap && code == GE && integer_zerop (arg1)))
8568 else if (BRANCH_COST >= 0
8569 && ! only_cheap && (code == NE || code == EQ)
8570 && TREE_CODE (type) != REAL_TYPE
8571 && ((abs_optab->handlers[(int) operand_mode].insn_code
8572 != CODE_FOR_nothing)
8573 || (ffs_optab->handlers[(int) operand_mode].insn_code
8574 != CODE_FOR_nothing)))
8580 if (! get_subtarget (target)
8581 || GET_MODE (subtarget) != operand_mode)
8584 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8587 target = gen_reg_rtx (mode);
8589 result = emit_store_flag (target, code, op0, op1,
8590 operand_mode, unsignedp, 1);
8595 result = expand_binop (mode, xor_optab, result, const1_rtx,
8596 result, 0, OPTAB_LIB_WIDEN);
8600 /* If this failed, we have to do this with set/compare/jump/set code. */
8602 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8603 target = gen_reg_rtx (GET_MODE (target));
8605 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8606 result = compare_from_rtx (op0, op1, code, unsignedp,
8607 operand_mode, NULL_RTX);
8608 if (GET_CODE (result) == CONST_INT)
8609 return (((result == const0_rtx && ! invert)
8610 || (result != const0_rtx && invert))
8611 ? const0_rtx : const1_rtx);
8613 /* The code of RESULT may not match CODE if compare_from_rtx
8614 decided to swap its operands and reverse the original code.
8616 We know that compare_from_rtx returns either a CONST_INT or
8617 a new comparison code, so it is safe to just extract the
8618 code from RESULT. */
8619 code = GET_CODE (result);
8621 label = gen_label_rtx ();
8622 if (bcc_gen_fctn[(int) code] == 0)
8625 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8626 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8633 /* Stubs in case we haven't got a casesi insn. */
8635 # define HAVE_casesi 0
8636 # define gen_casesi(a, b, c, d, e) (0)
8637 # define CODE_FOR_casesi CODE_FOR_nothing
8640 /* If the machine does not have a case insn that compares the bounds,
8641 this means extra overhead for dispatch tables, which raises the
8642 threshold for using them. */
8643 #ifndef CASE_VALUES_THRESHOLD
8644 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8645 #endif /* CASE_VALUES_THRESHOLD */
8648 case_values_threshold (void)
8650 return CASE_VALUES_THRESHOLD;
8653 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8654 0 otherwise (i.e. if there is no casesi instruction). */
8656 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8657 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8659 enum machine_mode index_mode = SImode;
8660 int index_bits = GET_MODE_BITSIZE (index_mode);
8661 rtx op1, op2, index;
8662 enum machine_mode op_mode;
8667 /* Convert the index to SImode. */
8668 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8670 enum machine_mode omode = TYPE_MODE (index_type);
8671 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8673 /* We must handle the endpoints in the original mode. */
8674 index_expr = build2 (MINUS_EXPR, index_type,
8675 index_expr, minval);
8676 minval = integer_zero_node;
8677 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8678 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8679 omode, 1, default_label);
8680 /* Now we can safely truncate. */
8681 index = convert_to_mode (index_mode, index, 0);
8685 if (TYPE_MODE (index_type) != index_mode)
8687 index_expr = convert (lang_hooks.types.type_for_size
8688 (index_bits, 0), index_expr);
8689 index_type = TREE_TYPE (index_expr);
8692 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8695 do_pending_stack_adjust ();
8697 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8698 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8700 index = copy_to_mode_reg (op_mode, index);
8702 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8704 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8705 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8706 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8707 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8709 op1 = copy_to_mode_reg (op_mode, op1);
8711 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8713 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8714 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8715 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8716 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8718 op2 = copy_to_mode_reg (op_mode, op2);
8720 emit_jump_insn (gen_casesi (index, op1, op2,
8721 table_label, default_label));
8725 /* Attempt to generate a tablejump instruction; same concept. */
8726 #ifndef HAVE_tablejump
8727 #define HAVE_tablejump 0
8728 #define gen_tablejump(x, y) (0)
8731 /* Subroutine of the next function.
8733 INDEX is the value being switched on, with the lowest value
8734 in the table already subtracted.
8735 MODE is its expected mode (needed if INDEX is constant).
8736 RANGE is the length of the jump table.
8737 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8739 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8740 index value is out of range. */
8743 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8748 if (INTVAL (range) > cfun->max_jumptable_ents)
8749 cfun->max_jumptable_ents = INTVAL (range);
8751 /* Do an unsigned comparison (in the proper mode) between the index
8752 expression and the value which represents the length of the range.
8753 Since we just finished subtracting the lower bound of the range
8754 from the index expression, this comparison allows us to simultaneously
8755 check that the original index expression value is both greater than
8756 or equal to the minimum value of the range and less than or equal to
8757 the maximum value of the range. */
8759 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8762 /* If index is in range, it must fit in Pmode.
8763 Convert to Pmode so we can index with it. */
8765 index = convert_to_mode (Pmode, index, 1);
8767 /* Don't let a MEM slip through, because then INDEX that comes
8768 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8769 and break_out_memory_refs will go to work on it and mess it up. */
8770 #ifdef PIC_CASE_VECTOR_ADDRESS
8771 if (flag_pic && !REG_P (index))
8772 index = copy_to_mode_reg (Pmode, index);
8775 /* If flag_force_addr were to affect this address
8776 it could interfere with the tricky assumptions made
8777 about addresses that contain label-refs,
8778 which may be valid only very near the tablejump itself. */
8779 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8780 GET_MODE_SIZE, because this indicates how large insns are. The other
8781 uses should all be Pmode, because they are addresses. This code
8782 could fail if addresses and insns are not the same size. */
8783 index = gen_rtx_PLUS (Pmode,
8784 gen_rtx_MULT (Pmode, index,
8785 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8786 gen_rtx_LABEL_REF (Pmode, table_label));
8787 #ifdef PIC_CASE_VECTOR_ADDRESS
8789 index = PIC_CASE_VECTOR_ADDRESS (index);
8792 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8793 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8794 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
8795 RTX_UNCHANGING_P (vector) = 1;
8796 MEM_NOTRAP_P (vector) = 1;
8797 convert_move (temp, vector, 0);
8799 emit_jump_insn (gen_tablejump (temp, table_label));
8801 /* If we are generating PIC code or if the table is PC-relative, the
8802 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8803 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8808 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8809 rtx table_label, rtx default_label)
8813 if (! HAVE_tablejump)
8816 index_expr = fold (build2 (MINUS_EXPR, index_type,
8817 convert (index_type, index_expr),
8818 convert (index_type, minval)));
8819 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8820 do_pending_stack_adjust ();
8822 do_tablejump (index, TYPE_MODE (index_type),
8823 convert_modes (TYPE_MODE (index_type),
8824 TYPE_MODE (TREE_TYPE (range)),
8825 expand_expr (range, NULL_RTX,
8827 TYPE_UNSIGNED (TREE_TYPE (range))),
8828 table_label, default_label);
8832 /* Nonzero if the mode is a valid vector mode for this architecture.
8833 This returns nonzero even if there is no hardware support for the
8834 vector mode, but we can emulate with narrower modes. */
8837 vector_mode_valid_p (enum machine_mode mode)
8839 enum mode_class class = GET_MODE_CLASS (mode);
8840 enum machine_mode innermode;
8842 /* Doh! What's going on? */
8843 if (class != MODE_VECTOR_INT
8844 && class != MODE_VECTOR_FLOAT)
8847 /* Hardware support. Woo hoo! */
8848 if (VECTOR_MODE_SUPPORTED_P (mode))
8851 innermode = GET_MODE_INNER (mode);
8853 /* We should probably return 1 if requesting V4DI and we have no DI,
8854 but we have V2DI, but this is probably very unlikely. */
8856 /* If we have support for the inner mode, we can safely emulate it.
8857 We may not have V2DI, but me can emulate with a pair of DIs. */
8858 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
8861 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8863 const_vector_from_tree (tree exp)
8868 enum machine_mode inner, mode;
8870 mode = TYPE_MODE (TREE_TYPE (exp));
8872 if (initializer_zerop (exp))
8873 return CONST0_RTX (mode);
8875 units = GET_MODE_NUNITS (mode);
8876 inner = GET_MODE_INNER (mode);
8878 v = rtvec_alloc (units);
8880 link = TREE_VECTOR_CST_ELTS (exp);
8881 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8883 elt = TREE_VALUE (link);
8885 if (TREE_CODE (elt) == REAL_CST)
8886 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8889 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8890 TREE_INT_CST_HIGH (elt),
8894 /* Initialize remaining elements to 0. */
8895 for (; i < units; ++i)
8896 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8898 return gen_rtx_raw_CONST_VECTOR (mode, v);
8900 #include "gt-expr.h"