1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1561 where DST is non-consecutive registers represented by a PARALLEL.
1562 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1566 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1570 enum machine_mode m = GET_MODE (orig_src);
1572 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 if (!SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src) && GET_CODE (orig_src) != CONCAT)
1577 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1578 if (imode == BLKmode)
1579 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1581 src = gen_reg_rtx (imode);
1582 if (imode != BLKmode)
1583 src = gen_lowpart (GET_MODE (orig_src), src);
1584 emit_move_insn (src, orig_src);
1585 /* ...and back again. */
1586 if (imode != BLKmode)
1587 src = gen_lowpart (imode, src);
1588 emit_group_load (dst, src, type, ssize);
1592 /* Check for a NULL entry, used to indicate that the parameter goes
1593 both on the stack and in registers. */
1594 if (XEXP (XVECEXP (dst, 0, 0), 0))
1599 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1601 /* Process the pieces. */
1602 for (i = start; i < XVECLEN (dst, 0); i++)
1604 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1605 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1606 unsigned int bytelen = GET_MODE_SIZE (mode);
1609 /* Handle trailing fragments that run over the size of the struct. */
1610 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1612 /* Arrange to shift the fragment to where it belongs.
1613 extract_bit_field loads to the lsb of the reg. */
1615 #ifdef BLOCK_REG_PADDING
1616 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1617 == (BYTES_BIG_ENDIAN ? upward : downward)
1622 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1623 bytelen = ssize - bytepos;
1624 gcc_assert (bytelen > 0);
1627 /* If we won't be loading directly from memory, protect the real source
1628 from strange tricks we might play; but make sure that the source can
1629 be loaded directly into the destination. */
1631 if (!MEM_P (orig_src)
1632 && (!CONSTANT_P (orig_src)
1633 || (GET_MODE (orig_src) != mode
1634 && GET_MODE (orig_src) != VOIDmode)))
1636 if (GET_MODE (orig_src) == VOIDmode)
1637 src = gen_reg_rtx (mode);
1639 src = gen_reg_rtx (GET_MODE (orig_src));
1641 emit_move_insn (src, orig_src);
1644 /* Optimize the access just a bit. */
1646 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1647 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1648 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1649 && bytelen == GET_MODE_SIZE (mode))
1651 tmps[i] = gen_reg_rtx (mode);
1652 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1654 else if (GET_CODE (src) == CONCAT)
1656 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1657 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1659 if ((bytepos == 0 && bytelen == slen0)
1660 || (bytepos != 0 && bytepos + bytelen <= slen))
1662 /* The following assumes that the concatenated objects all
1663 have the same size. In this case, a simple calculation
1664 can be used to determine the object and the bit field
1666 tmps[i] = XEXP (src, bytepos / slen0);
1667 if (! CONSTANT_P (tmps[i])
1668 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1669 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1670 (bytepos % slen0) * BITS_PER_UNIT,
1671 1, NULL_RTX, mode, mode);
1677 gcc_assert (!bytepos);
1678 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1679 emit_move_insn (mem, src);
1680 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1681 0, 1, NULL_RTX, mode, mode);
1684 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1685 SIMD register, which is currently broken. While we get GCC
1686 to emit proper RTL for these cases, let's dump to memory. */
1687 else if (VECTOR_MODE_P (GET_MODE (dst))
1690 int slen = GET_MODE_SIZE (GET_MODE (src));
1693 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1694 emit_move_insn (mem, src);
1695 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1697 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1698 && XVECLEN (dst, 0) > 1)
1699 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1700 else if (CONSTANT_P (src)
1701 || (REG_P (src) && GET_MODE (src) == mode))
1704 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1705 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1709 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1710 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Copy the extracted pieces into the proper (probable) hard regs. */
1714 for (i = start; i < XVECLEN (dst, 0); i++)
1715 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1718 /* Emit code to move a block SRC to block DST, where SRC and DST are
1719 non-consecutive groups of registers, each represented by a PARALLEL. */
1722 emit_group_move (rtx dst, rtx src)
1726 gcc_assert (GET_CODE (src) == PARALLEL
1727 && GET_CODE (dst) == PARALLEL
1728 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1730 /* Skip first entry if NULL. */
1731 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1732 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1733 XEXP (XVECEXP (src, 0, i), 0));
1736 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1737 where SRC is non-consecutive registers represented by a PARALLEL.
1738 SSIZE represents the total size of block ORIG_DST, or -1 if not
1742 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1746 enum machine_mode m = GET_MODE (orig_dst);
1748 gcc_assert (GET_CODE (src) == PARALLEL);
1750 if (!SCALAR_INT_MODE_P (m)
1751 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1753 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1754 if (imode == BLKmode)
1755 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1757 dst = gen_reg_rtx (imode);
1758 emit_group_store (dst, src, type, ssize);
1759 if (imode != BLKmode)
1760 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1761 emit_move_insn (orig_dst, dst);
1765 /* Check for a NULL entry, used to indicate that the parameter goes
1766 both on the stack and in registers. */
1767 if (XEXP (XVECEXP (src, 0, 0), 0))
1772 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1774 /* Copy the (probable) hard regs into pseudos. */
1775 for (i = start; i < XVECLEN (src, 0); i++)
1777 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1778 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1779 emit_move_insn (tmps[i], reg);
1782 /* If we won't be storing directly into memory, protect the real destination
1783 from strange tricks we might play. */
1785 if (GET_CODE (dst) == PARALLEL)
1789 /* We can get a PARALLEL dst if there is a conditional expression in
1790 a return statement. In that case, the dst and src are the same,
1791 so no action is necessary. */
1792 if (rtx_equal_p (dst, src))
1795 /* It is unclear if we can ever reach here, but we may as well handle
1796 it. Allocate a temporary, and split this into a store/load to/from
1799 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1800 emit_group_store (temp, src, type, ssize);
1801 emit_group_load (dst, temp, type, ssize);
1804 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1806 dst = gen_reg_rtx (GET_MODE (orig_dst));
1807 /* Make life a bit easier for combine. */
1808 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1811 /* Process the pieces. */
1812 for (i = start; i < XVECLEN (src, 0); i++)
1814 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1815 enum machine_mode mode = GET_MODE (tmps[i]);
1816 unsigned int bytelen = GET_MODE_SIZE (mode);
1819 /* Handle trailing fragments that run over the size of the struct. */
1820 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1822 /* store_bit_field always takes its value from the lsb.
1823 Move the fragment to the lsb if it's not already there. */
1825 #ifdef BLOCK_REG_PADDING
1826 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1827 == (BYTES_BIG_ENDIAN ? upward : downward)
1833 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1834 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1835 build_int_cst (NULL_TREE, shift),
1838 bytelen = ssize - bytepos;
1841 if (GET_CODE (dst) == CONCAT)
1843 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1844 dest = XEXP (dst, 0);
1845 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1847 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1848 dest = XEXP (dst, 1);
1852 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1853 dest = assign_stack_temp (GET_MODE (dest),
1854 GET_MODE_SIZE (GET_MODE (dest)), 0);
1855 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1862 /* Optimize the access just a bit. */
1864 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1865 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1866 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1867 && bytelen == GET_MODE_SIZE (mode))
1868 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1870 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1874 /* Copy from the pseudo into the (probable) hard reg. */
1875 if (orig_dst != dst)
1876 emit_move_insn (orig_dst, dst);
1879 /* Generate code to copy a BLKmode object of TYPE out of a
1880 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1881 is null, a stack temporary is created. TGTBLK is returned.
1883 The purpose of this routine is to handle functions that return
1884 BLKmode structures in registers. Some machines (the PA for example)
1885 want to return all small structures in registers regardless of the
1886 structure's alignment. */
1889 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1891 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1892 rtx src = NULL, dst = NULL;
1893 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1894 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1898 tgtblk = assign_temp (build_qualified_type (type,
1900 | TYPE_QUAL_CONST)),
1902 preserve_temp_slots (tgtblk);
1905 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1906 into a new pseudo which is a full word. */
1908 if (GET_MODE (srcreg) != BLKmode
1909 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1910 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1912 /* If the structure doesn't take up a whole number of words, see whether
1913 SRCREG is padded on the left or on the right. If it's on the left,
1914 set PADDING_CORRECTION to the number of bits to skip.
1916 In most ABIs, the structure will be returned at the least end of
1917 the register, which translates to right padding on little-endian
1918 targets and left padding on big-endian targets. The opposite
1919 holds if the structure is returned at the most significant
1920 end of the register. */
1921 if (bytes % UNITS_PER_WORD != 0
1922 && (targetm.calls.return_in_msb (type)
1924 : BYTES_BIG_ENDIAN))
1926 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1928 /* Copy the structure BITSIZE bites at a time.
1930 We could probably emit more efficient code for machines which do not use
1931 strict alignment, but it doesn't seem worth the effort at the current
1933 for (bitpos = 0, xbitpos = padding_correction;
1934 bitpos < bytes * BITS_PER_UNIT;
1935 bitpos += bitsize, xbitpos += bitsize)
1937 /* We need a new source operand each time xbitpos is on a
1938 word boundary and when xbitpos == padding_correction
1939 (the first time through). */
1940 if (xbitpos % BITS_PER_WORD == 0
1941 || xbitpos == padding_correction)
1942 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
1945 /* We need a new destination operand each time bitpos is on
1947 if (bitpos % BITS_PER_WORD == 0)
1948 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
1950 /* Use xbitpos for the source extraction (right justified) and
1951 xbitpos for the destination store (left justified). */
1952 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
1953 extract_bit_field (src, bitsize,
1954 xbitpos % BITS_PER_WORD, 1,
1955 NULL_RTX, word_mode, word_mode));
1961 /* Add a USE expression for REG to the (possibly empty) list pointed
1962 to by CALL_FUSAGE. REG must denote a hard register. */
1965 use_reg (rtx *call_fusage, rtx reg)
1967 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
1970 = gen_rtx_EXPR_LIST (VOIDmode,
1971 gen_rtx_USE (VOIDmode, reg), *call_fusage);
1974 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1975 starting at REGNO. All of these registers must be hard registers. */
1978 use_regs (rtx *call_fusage, int regno, int nregs)
1982 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
1984 for (i = 0; i < nregs; i++)
1985 use_reg (call_fusage, regno_reg_rtx[regno + i]);
1988 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1989 PARALLEL REGS. This is for calls that pass values in multiple
1990 non-contiguous locations. The Irix 6 ABI has examples of this. */
1993 use_group_regs (rtx *call_fusage, rtx regs)
1997 for (i = 0; i < XVECLEN (regs, 0); i++)
1999 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2001 /* A NULL entry means the parameter goes both on the stack and in
2002 registers. This can also be a MEM for targets that pass values
2003 partially on the stack and partially in registers. */
2004 if (reg != 0 && REG_P (reg))
2005 use_reg (call_fusage, reg);
2010 /* Determine whether the LEN bytes generated by CONSTFUN can be
2011 stored to memory using several move instructions. CONSTFUNDATA is
2012 a pointer which will be passed as argument in every CONSTFUN call.
2013 ALIGN is maximum alignment we can assume. Return nonzero if a
2014 call to store_by_pieces should succeed. */
2017 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2018 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2019 void *constfundata, unsigned int align)
2021 unsigned HOST_WIDE_INT l;
2022 unsigned int max_size;
2023 HOST_WIDE_INT offset = 0;
2024 enum machine_mode mode, tmode;
2025 enum insn_code icode;
2032 if (! STORE_BY_PIECES_P (len, align))
2035 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2036 if (align >= GET_MODE_ALIGNMENT (tmode))
2037 align = GET_MODE_ALIGNMENT (tmode);
2040 enum machine_mode xmode;
2042 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2044 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2045 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2046 || SLOW_UNALIGNED_ACCESS (tmode, align))
2049 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2052 /* We would first store what we can in the largest integer mode, then go to
2053 successively smaller modes. */
2056 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2061 max_size = STORE_MAX_PIECES + 1;
2062 while (max_size > 1)
2064 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2065 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2066 if (GET_MODE_SIZE (tmode) < max_size)
2069 if (mode == VOIDmode)
2072 icode = mov_optab->handlers[(int) mode].insn_code;
2073 if (icode != CODE_FOR_nothing
2074 && align >= GET_MODE_ALIGNMENT (mode))
2076 unsigned int size = GET_MODE_SIZE (mode);
2083 cst = (*constfun) (constfundata, offset, mode);
2084 if (!LEGITIMATE_CONSTANT_P (cst))
2094 max_size = GET_MODE_SIZE (mode);
2097 /* The code above should have handled everything. */
2104 /* Generate several move instructions to store LEN bytes generated by
2105 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2106 pointer which will be passed as argument in every CONSTFUN call.
2107 ALIGN is maximum alignment we can assume.
2108 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2109 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2113 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2114 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2115 void *constfundata, unsigned int align, int endp)
2117 struct store_by_pieces data;
2121 gcc_assert (endp != 2);
2125 gcc_assert (STORE_BY_PIECES_P (len, align));
2126 data.constfun = constfun;
2127 data.constfundata = constfundata;
2130 store_by_pieces_1 (&data, align);
2135 gcc_assert (!data.reverse);
2140 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2141 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2143 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2146 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2153 to1 = adjust_address (data.to, QImode, data.offset);
2161 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2162 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2165 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2167 struct store_by_pieces data;
2172 data.constfun = clear_by_pieces_1;
2173 data.constfundata = NULL;
2176 store_by_pieces_1 (&data, align);
2179 /* Callback routine for clear_by_pieces.
2180 Return const0_rtx unconditionally. */
2183 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2184 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2185 enum machine_mode mode ATTRIBUTE_UNUSED)
2190 /* Subroutine of clear_by_pieces and store_by_pieces.
2191 Generate several move instructions to store LEN bytes of block TO. (A MEM
2192 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2195 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2196 unsigned int align ATTRIBUTE_UNUSED)
2198 rtx to_addr = XEXP (data->to, 0);
2199 unsigned int max_size = STORE_MAX_PIECES + 1;
2200 enum machine_mode mode = VOIDmode, tmode;
2201 enum insn_code icode;
2204 data->to_addr = to_addr;
2206 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2207 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2209 data->explicit_inc_to = 0;
2211 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2213 data->offset = data->len;
2215 /* If storing requires more than two move insns,
2216 copy addresses to registers (to make displacements shorter)
2217 and use post-increment if available. */
2218 if (!data->autinc_to
2219 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2221 /* Determine the main mode we'll be using. */
2222 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2223 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2224 if (GET_MODE_SIZE (tmode) < max_size)
2227 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2229 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2230 data->autinc_to = 1;
2231 data->explicit_inc_to = -1;
2234 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2235 && ! data->autinc_to)
2237 data->to_addr = copy_addr_to_reg (to_addr);
2238 data->autinc_to = 1;
2239 data->explicit_inc_to = 1;
2242 if ( !data->autinc_to && CONSTANT_P (to_addr))
2243 data->to_addr = copy_addr_to_reg (to_addr);
2246 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2247 if (align >= GET_MODE_ALIGNMENT (tmode))
2248 align = GET_MODE_ALIGNMENT (tmode);
2251 enum machine_mode xmode;
2253 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2255 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2256 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2257 || SLOW_UNALIGNED_ACCESS (tmode, align))
2260 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2263 /* First store what we can in the largest integer mode, then go to
2264 successively smaller modes. */
2266 while (max_size > 1)
2268 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2269 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2270 if (GET_MODE_SIZE (tmode) < max_size)
2273 if (mode == VOIDmode)
2276 icode = mov_optab->handlers[(int) mode].insn_code;
2277 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2278 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2280 max_size = GET_MODE_SIZE (mode);
2283 /* The code above should have handled everything. */
2284 gcc_assert (!data->len);
2287 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2288 with move instructions for mode MODE. GENFUN is the gen_... function
2289 to make a move insn for that mode. DATA has all the other info. */
2292 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2293 struct store_by_pieces *data)
2295 unsigned int size = GET_MODE_SIZE (mode);
2298 while (data->len >= size)
2301 data->offset -= size;
2303 if (data->autinc_to)
2304 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2307 to1 = adjust_address (data->to, mode, data->offset);
2309 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2310 emit_insn (gen_add2_insn (data->to_addr,
2311 GEN_INT (-(HOST_WIDE_INT) size)));
2313 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2314 emit_insn ((*genfun) (to1, cst));
2316 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2317 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2319 if (! data->reverse)
2320 data->offset += size;
2326 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2327 its length in bytes. */
2330 clear_storage (rtx object, rtx size)
2333 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2334 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2336 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2337 just move a zero. Otherwise, do this a piece at a time. */
2338 if (GET_MODE (object) != BLKmode
2339 && GET_CODE (size) == CONST_INT
2340 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2341 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2344 if (size == const0_rtx)
2346 else if (GET_CODE (size) == CONST_INT
2347 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2348 clear_by_pieces (object, INTVAL (size), align);
2349 else if (clear_storage_via_clrmem (object, size, align))
2352 retval = clear_storage_via_libcall (object, size);
2358 /* A subroutine of clear_storage. Expand a clrmem pattern;
2359 return true if successful. */
2362 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2364 /* Try the most limited insn first, because there's no point
2365 including more than one in the machine description unless
2366 the more limited one has some advantage. */
2368 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2369 enum machine_mode mode;
2371 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2372 mode = GET_MODE_WIDER_MODE (mode))
2374 enum insn_code code = clrmem_optab[(int) mode];
2375 insn_operand_predicate_fn pred;
2377 if (code != CODE_FOR_nothing
2378 /* We don't need MODE to be narrower than
2379 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2380 the mode mask, as it is returned by the macro, it will
2381 definitely be less than the actual mode mask. */
2382 && ((GET_CODE (size) == CONST_INT
2383 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2384 <= (GET_MODE_MASK (mode) >> 1)))
2385 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2386 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2387 || (*pred) (object, BLKmode))
2388 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2389 || (*pred) (opalign, VOIDmode)))
2392 rtx last = get_last_insn ();
2395 op1 = convert_to_mode (mode, size, 1);
2396 pred = insn_data[(int) code].operand[1].predicate;
2397 if (pred != 0 && ! (*pred) (op1, mode))
2398 op1 = copy_to_mode_reg (mode, op1);
2400 pat = GEN_FCN ((int) code) (object, op1, opalign);
2407 delete_insns_since (last);
2414 /* A subroutine of clear_storage. Expand a call to memset.
2415 Return the return value of memset, 0 otherwise. */
2418 clear_storage_via_libcall (rtx object, rtx size)
2420 tree call_expr, arg_list, fn, object_tree, size_tree;
2421 enum machine_mode size_mode;
2424 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2425 place those into new pseudos into a VAR_DECL and use them later. */
2427 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2429 size_mode = TYPE_MODE (sizetype);
2430 size = convert_to_mode (size_mode, size, 1);
2431 size = copy_to_mode_reg (size_mode, size);
2433 /* It is incorrect to use the libcall calling conventions to call
2434 memset in this context. This could be a user call to memset and
2435 the user may wish to examine the return value from memset. For
2436 targets where libcalls and normal calls have different conventions
2437 for returning pointers, we could end up generating incorrect code. */
2439 object_tree = make_tree (ptr_type_node, object);
2440 size_tree = make_tree (sizetype, size);
2442 fn = clear_storage_libcall_fn (true);
2443 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2444 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2445 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2447 /* Now we have to build up the CALL_EXPR itself. */
2448 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2449 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2450 call_expr, arg_list, NULL_TREE);
2452 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2457 /* A subroutine of clear_storage_via_libcall. Create the tree node
2458 for the function we use for block clears. The first time FOR_CALL
2459 is true, we call assemble_external. */
2461 static GTY(()) tree block_clear_fn;
2464 init_block_clear_fn (const char *asmspec)
2466 if (!block_clear_fn)
2470 fn = get_identifier ("memset");
2471 args = build_function_type_list (ptr_type_node, ptr_type_node,
2472 integer_type_node, sizetype,
2475 fn = build_decl (FUNCTION_DECL, fn, args);
2476 DECL_EXTERNAL (fn) = 1;
2477 TREE_PUBLIC (fn) = 1;
2478 DECL_ARTIFICIAL (fn) = 1;
2479 TREE_NOTHROW (fn) = 1;
2481 block_clear_fn = fn;
2485 set_user_assembler_name (block_clear_fn, asmspec);
2489 clear_storage_libcall_fn (int for_call)
2491 static bool emitted_extern;
2493 if (!block_clear_fn)
2494 init_block_clear_fn (NULL);
2496 if (for_call && !emitted_extern)
2498 emitted_extern = true;
2499 make_decl_rtl (block_clear_fn);
2500 assemble_external (block_clear_fn);
2503 return block_clear_fn;
2506 /* Generate code to copy Y into X.
2507 Both Y and X must have the same mode, except that
2508 Y can be a constant with VOIDmode.
2509 This mode cannot be BLKmode; use emit_block_move for that.
2511 Return the last instruction emitted. */
2514 emit_move_insn (rtx x, rtx y)
2516 enum machine_mode mode = GET_MODE (x);
2517 rtx y_cst = NULL_RTX;
2520 gcc_assert (mode != BLKmode
2521 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2526 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2527 && (last_insn = compress_float_constant (x, y)))
2532 if (!LEGITIMATE_CONSTANT_P (y))
2534 y = force_const_mem (mode, y);
2536 /* If the target's cannot_force_const_mem prevented the spill,
2537 assume that the target's move expanders will also take care
2538 of the non-legitimate constant. */
2544 /* If X or Y are memory references, verify that their addresses are valid
2547 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2548 && ! push_operand (x, GET_MODE (x)))
2550 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2551 x = validize_mem (x);
2554 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2556 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2557 y = validize_mem (y);
2559 gcc_assert (mode != BLKmode);
2561 last_insn = emit_move_insn_1 (x, y);
2563 if (y_cst && REG_P (x)
2564 && (set = single_set (last_insn)) != NULL_RTX
2565 && SET_DEST (set) == x
2566 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2567 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2572 /* Low level part of emit_move_insn.
2573 Called just like emit_move_insn, but assumes X and Y
2574 are basically valid. */
2577 emit_move_insn_1 (rtx x, rtx y)
2579 enum machine_mode mode = GET_MODE (x);
2580 enum machine_mode submode;
2581 enum mode_class class = GET_MODE_CLASS (mode);
2583 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2585 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2587 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2589 /* Expand complex moves by moving real part and imag part, if possible. */
2590 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2591 && BLKmode != (submode = GET_MODE_INNER (mode))
2592 && (mov_optab->handlers[(int) submode].insn_code
2593 != CODE_FOR_nothing))
2595 /* Don't split destination if it is a stack push. */
2596 int stack = push_operand (x, GET_MODE (x));
2598 #ifdef PUSH_ROUNDING
2599 /* In case we output to the stack, but the size is smaller than the
2600 machine can push exactly, we need to use move instructions. */
2602 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2603 != GET_MODE_SIZE (submode)))
2606 HOST_WIDE_INT offset1, offset2;
2608 /* Do not use anti_adjust_stack, since we don't want to update
2609 stack_pointer_delta. */
2610 temp = expand_binop (Pmode,
2611 #ifdef STACK_GROWS_DOWNWARD
2619 (GET_MODE_SIZE (GET_MODE (x)))),
2620 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2622 if (temp != stack_pointer_rtx)
2623 emit_move_insn (stack_pointer_rtx, temp);
2625 #ifdef STACK_GROWS_DOWNWARD
2627 offset2 = GET_MODE_SIZE (submode);
2629 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2630 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2631 + GET_MODE_SIZE (submode));
2634 emit_move_insn (change_address (x, submode,
2635 gen_rtx_PLUS (Pmode,
2637 GEN_INT (offset1))),
2638 gen_realpart (submode, y));
2639 emit_move_insn (change_address (x, submode,
2640 gen_rtx_PLUS (Pmode,
2642 GEN_INT (offset2))),
2643 gen_imagpart (submode, y));
2647 /* If this is a stack, push the highpart first, so it
2648 will be in the argument order.
2650 In that case, change_address is used only to convert
2651 the mode, not to change the address. */
2654 /* Note that the real part always precedes the imag part in memory
2655 regardless of machine's endianness. */
2656 #ifdef STACK_GROWS_DOWNWARD
2657 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2658 gen_imagpart (submode, y));
2659 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2660 gen_realpart (submode, y));
2662 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2663 gen_realpart (submode, y));
2664 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2665 gen_imagpart (submode, y));
2670 rtx realpart_x, realpart_y;
2671 rtx imagpart_x, imagpart_y;
2673 /* If this is a complex value with each part being smaller than a
2674 word, the usual calling sequence will likely pack the pieces into
2675 a single register. Unfortunately, SUBREG of hard registers only
2676 deals in terms of words, so we have a problem converting input
2677 arguments to the CONCAT of two registers that is used elsewhere
2678 for complex values. If this is before reload, we can copy it into
2679 memory and reload. FIXME, we should see about using extract and
2680 insert on integer registers, but complex short and complex char
2681 variables should be rarely used. */
2682 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2683 && (reload_in_progress | reload_completed) == 0)
2686 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2688 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2690 if (packed_dest_p || packed_src_p)
2692 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2693 ? MODE_FLOAT : MODE_INT);
2695 enum machine_mode reg_mode
2696 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2698 if (reg_mode != BLKmode)
2700 rtx mem = assign_stack_temp (reg_mode,
2701 GET_MODE_SIZE (mode), 0);
2702 rtx cmem = adjust_address (mem, mode, 0);
2706 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2708 emit_move_insn_1 (cmem, y);
2709 return emit_move_insn_1 (sreg, mem);
2713 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2715 emit_move_insn_1 (mem, sreg);
2716 return emit_move_insn_1 (x, cmem);
2722 realpart_x = gen_realpart (submode, x);
2723 realpart_y = gen_realpart (submode, y);
2724 imagpart_x = gen_imagpart (submode, x);
2725 imagpart_y = gen_imagpart (submode, y);
2727 /* Show the output dies here. This is necessary for SUBREGs
2728 of pseudos since we cannot track their lifetimes correctly;
2729 hard regs shouldn't appear here except as return values.
2730 We never want to emit such a clobber after reload. */
2732 && ! (reload_in_progress || reload_completed)
2733 && (GET_CODE (realpart_x) == SUBREG
2734 || GET_CODE (imagpart_x) == SUBREG))
2735 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2737 emit_move_insn (realpart_x, realpart_y);
2738 emit_move_insn (imagpart_x, imagpart_y);
2741 return get_last_insn ();
2744 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2745 find a mode to do it in. If we have a movcc, use it. Otherwise,
2746 find the MODE_INT mode of the same width. */
2747 else if (GET_MODE_CLASS (mode) == MODE_CC
2748 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2750 enum insn_code insn_code;
2751 enum machine_mode tmode = VOIDmode;
2755 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2758 for (tmode = QImode; tmode != VOIDmode;
2759 tmode = GET_MODE_WIDER_MODE (tmode))
2760 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2763 gcc_assert (tmode != VOIDmode);
2765 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2766 may call change_address which is not appropriate if we were
2767 called when a reload was in progress. We don't have to worry
2768 about changing the address since the size in bytes is supposed to
2769 be the same. Copy the MEM to change the mode and move any
2770 substitutions from the old MEM to the new one. */
2772 if (reload_in_progress)
2774 x = gen_lowpart_common (tmode, x1);
2775 if (x == 0 && MEM_P (x1))
2777 x = adjust_address_nv (x1, tmode, 0);
2778 copy_replacements (x1, x);
2781 y = gen_lowpart_common (tmode, y1);
2782 if (y == 0 && MEM_P (y1))
2784 y = adjust_address_nv (y1, tmode, 0);
2785 copy_replacements (y1, y);
2790 x = gen_lowpart (tmode, x);
2791 y = gen_lowpart (tmode, y);
2794 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2795 return emit_insn (GEN_FCN (insn_code) (x, y));
2798 /* Try using a move pattern for the corresponding integer mode. This is
2799 only safe when simplify_subreg can convert MODE constants into integer
2800 constants. At present, it can only do this reliably if the value
2801 fits within a HOST_WIDE_INT. */
2802 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2803 && (submode = int_mode_for_mode (mode)) != BLKmode
2804 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2805 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2806 (simplify_gen_subreg (submode, x, mode, 0),
2807 simplify_gen_subreg (submode, y, mode, 0)));
2809 /* This will handle any multi-word or full-word mode that lacks a move_insn
2810 pattern. However, you will get better code if you define such patterns,
2811 even if they must turn into multiple assembler instructions. */
2819 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2821 #ifdef PUSH_ROUNDING
2823 /* If X is a push on the stack, do the push now and replace
2824 X with a reference to the stack pointer. */
2825 if (push_operand (x, GET_MODE (x)))
2830 /* Do not use anti_adjust_stack, since we don't want to update
2831 stack_pointer_delta. */
2832 temp = expand_binop (Pmode,
2833 #ifdef STACK_GROWS_DOWNWARD
2841 (GET_MODE_SIZE (GET_MODE (x)))),
2842 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2844 if (temp != stack_pointer_rtx)
2845 emit_move_insn (stack_pointer_rtx, temp);
2847 code = GET_CODE (XEXP (x, 0));
2849 /* Just hope that small offsets off SP are OK. */
2850 if (code == POST_INC)
2851 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2852 GEN_INT (-((HOST_WIDE_INT)
2853 GET_MODE_SIZE (GET_MODE (x)))));
2854 else if (code == POST_DEC)
2855 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2856 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2858 temp = stack_pointer_rtx;
2860 x = change_address (x, VOIDmode, temp);
2864 /* If we are in reload, see if either operand is a MEM whose address
2865 is scheduled for replacement. */
2866 if (reload_in_progress && MEM_P (x)
2867 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2868 x = replace_equiv_address_nv (x, inner);
2869 if (reload_in_progress && MEM_P (y)
2870 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2871 y = replace_equiv_address_nv (y, inner);
2877 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2880 rtx xpart = operand_subword (x, i, 1, mode);
2881 rtx ypart = operand_subword (y, i, 1, mode);
2883 /* If we can't get a part of Y, put Y into memory if it is a
2884 constant. Otherwise, force it into a register. If we still
2885 can't get a part of Y, abort. */
2886 if (ypart == 0 && CONSTANT_P (y))
2888 y = force_const_mem (mode, y);
2889 ypart = operand_subword (y, i, 1, mode);
2891 else if (ypart == 0)
2892 ypart = operand_subword_force (y, i, mode);
2894 gcc_assert (xpart && ypart);
2896 need_clobber |= (GET_CODE (xpart) == SUBREG);
2898 last_insn = emit_move_insn (xpart, ypart);
2904 /* Show the output dies here. This is necessary for SUBREGs
2905 of pseudos since we cannot track their lifetimes correctly;
2906 hard regs shouldn't appear here except as return values.
2907 We never want to emit such a clobber after reload. */
2909 && ! (reload_in_progress || reload_completed)
2910 && need_clobber != 0)
2911 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2919 /* If Y is representable exactly in a narrower mode, and the target can
2920 perform the extension directly from constant or memory, then emit the
2921 move as an extension. */
2924 compress_float_constant (rtx x, rtx y)
2926 enum machine_mode dstmode = GET_MODE (x);
2927 enum machine_mode orig_srcmode = GET_MODE (y);
2928 enum machine_mode srcmode;
2931 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2933 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2934 srcmode != orig_srcmode;
2935 srcmode = GET_MODE_WIDER_MODE (srcmode))
2938 rtx trunc_y, last_insn;
2940 /* Skip if the target can't extend this way. */
2941 ic = can_extend_p (dstmode, srcmode, 0);
2942 if (ic == CODE_FOR_nothing)
2945 /* Skip if the narrowed value isn't exact. */
2946 if (! exact_real_truncate (srcmode, &r))
2949 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
2951 if (LEGITIMATE_CONSTANT_P (trunc_y))
2953 /* Skip if the target needs extra instructions to perform
2955 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
2958 else if (float_extend_from_mem[dstmode][srcmode])
2959 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
2963 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
2964 last_insn = get_last_insn ();
2967 set_unique_reg_note (last_insn, REG_EQUAL, y);
2975 /* Pushing data onto the stack. */
2977 /* Push a block of length SIZE (perhaps variable)
2978 and return an rtx to address the beginning of the block.
2979 The value may be virtual_outgoing_args_rtx.
2981 EXTRA is the number of bytes of padding to push in addition to SIZE.
2982 BELOW nonzero means this padding comes at low addresses;
2983 otherwise, the padding comes at high addresses. */
2986 push_block (rtx size, int extra, int below)
2990 size = convert_modes (Pmode, ptr_mode, size, 1);
2991 if (CONSTANT_P (size))
2992 anti_adjust_stack (plus_constant (size, extra));
2993 else if (REG_P (size) && extra == 0)
2994 anti_adjust_stack (size);
2997 temp = copy_to_mode_reg (Pmode, size);
2999 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3000 temp, 0, OPTAB_LIB_WIDEN);
3001 anti_adjust_stack (temp);
3004 #ifndef STACK_GROWS_DOWNWARD
3010 temp = virtual_outgoing_args_rtx;
3011 if (extra != 0 && below)
3012 temp = plus_constant (temp, extra);
3016 if (GET_CODE (size) == CONST_INT)
3017 temp = plus_constant (virtual_outgoing_args_rtx,
3018 -INTVAL (size) - (below ? 0 : extra));
3019 else if (extra != 0 && !below)
3020 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3021 negate_rtx (Pmode, plus_constant (size, extra)));
3023 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3024 negate_rtx (Pmode, size));
3027 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3030 #ifdef PUSH_ROUNDING
3032 /* Emit single push insn. */
3035 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3038 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3040 enum insn_code icode;
3041 insn_operand_predicate_fn pred;
3043 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3044 /* If there is push pattern, use it. Otherwise try old way of throwing
3045 MEM representing push operation to move expander. */
3046 icode = push_optab->handlers[(int) mode].insn_code;
3047 if (icode != CODE_FOR_nothing)
3049 if (((pred = insn_data[(int) icode].operand[0].predicate)
3050 && !((*pred) (x, mode))))
3051 x = force_reg (mode, x);
3052 emit_insn (GEN_FCN (icode) (x));
3055 if (GET_MODE_SIZE (mode) == rounded_size)
3056 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3057 /* If we are to pad downward, adjust the stack pointer first and
3058 then store X into the stack location using an offset. This is
3059 because emit_move_insn does not know how to pad; it does not have
3061 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3063 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3064 HOST_WIDE_INT offset;
3066 emit_move_insn (stack_pointer_rtx,
3067 expand_binop (Pmode,
3068 #ifdef STACK_GROWS_DOWNWARD
3074 GEN_INT (rounded_size),
3075 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3077 offset = (HOST_WIDE_INT) padding_size;
3078 #ifdef STACK_GROWS_DOWNWARD
3079 if (STACK_PUSH_CODE == POST_DEC)
3080 /* We have already decremented the stack pointer, so get the
3082 offset += (HOST_WIDE_INT) rounded_size;
3084 if (STACK_PUSH_CODE == POST_INC)
3085 /* We have already incremented the stack pointer, so get the
3087 offset -= (HOST_WIDE_INT) rounded_size;
3089 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3093 #ifdef STACK_GROWS_DOWNWARD
3094 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3095 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3096 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3098 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3099 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3100 GEN_INT (rounded_size));
3102 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3105 dest = gen_rtx_MEM (mode, dest_addr);
3109 set_mem_attributes (dest, type, 1);
3111 if (flag_optimize_sibling_calls)
3112 /* Function incoming arguments may overlap with sibling call
3113 outgoing arguments and we cannot allow reordering of reads
3114 from function arguments with stores to outgoing arguments
3115 of sibling calls. */
3116 set_mem_alias_set (dest, 0);
3118 emit_move_insn (dest, x);
3122 /* Generate code to push X onto the stack, assuming it has mode MODE and
3124 MODE is redundant except when X is a CONST_INT (since they don't
3126 SIZE is an rtx for the size of data to be copied (in bytes),
3127 needed only if X is BLKmode.
3129 ALIGN (in bits) is maximum alignment we can assume.
3131 If PARTIAL and REG are both nonzero, then copy that many of the first
3132 words of X into registers starting with REG, and push the rest of X.
3133 The amount of space pushed is decreased by PARTIAL words,
3134 rounded *down* to a multiple of PARM_BOUNDARY.
3135 REG must be a hard register in this case.
3136 If REG is zero but PARTIAL is not, take any all others actions for an
3137 argument partially in registers, but do not actually load any
3140 EXTRA is the amount in bytes of extra space to leave next to this arg.
3141 This is ignored if an argument block has already been allocated.
3143 On a machine that lacks real push insns, ARGS_ADDR is the address of
3144 the bottom of the argument block for this call. We use indexing off there
3145 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3146 argument block has not been preallocated.
3148 ARGS_SO_FAR is the size of args previously pushed for this call.
3150 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3151 for arguments passed in registers. If nonzero, it will be the number
3152 of bytes required. */
3155 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3156 unsigned int align, int partial, rtx reg, int extra,
3157 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3161 enum direction stack_direction
3162 #ifdef STACK_GROWS_DOWNWARD
3168 /* Decide where to pad the argument: `downward' for below,
3169 `upward' for above, or `none' for don't pad it.
3170 Default is below for small data on big-endian machines; else above. */
3171 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3173 /* Invert direction if stack is post-decrement.
3175 if (STACK_PUSH_CODE == POST_DEC)
3176 if (where_pad != none)
3177 where_pad = (where_pad == downward ? upward : downward);
3181 if (mode == BLKmode)
3183 /* Copy a block into the stack, entirely or partially. */
3186 int used = partial * UNITS_PER_WORD;
3190 if (reg && GET_CODE (reg) == PARALLEL)
3192 /* Use the size of the elt to compute offset. */
3193 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3194 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3195 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3198 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3204 /* USED is now the # of bytes we need not copy to the stack
3205 because registers will take care of them. */
3208 xinner = adjust_address (xinner, BLKmode, used);
3210 /* If the partial register-part of the arg counts in its stack size,
3211 skip the part of stack space corresponding to the registers.
3212 Otherwise, start copying to the beginning of the stack space,
3213 by setting SKIP to 0. */
3214 skip = (reg_parm_stack_space == 0) ? 0 : used;
3216 #ifdef PUSH_ROUNDING
3217 /* Do it with several push insns if that doesn't take lots of insns
3218 and if there is no difficulty with push insns that skip bytes
3219 on the stack for alignment purposes. */
3222 && GET_CODE (size) == CONST_INT
3224 && MEM_ALIGN (xinner) >= align
3225 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3226 /* Here we avoid the case of a structure whose weak alignment
3227 forces many pushes of a small amount of data,
3228 and such small pushes do rounding that causes trouble. */
3229 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3230 || align >= BIGGEST_ALIGNMENT
3231 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3232 == (align / BITS_PER_UNIT)))
3233 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3235 /* Push padding now if padding above and stack grows down,
3236 or if padding below and stack grows up.
3237 But if space already allocated, this has already been done. */
3238 if (extra && args_addr == 0
3239 && where_pad != none && where_pad != stack_direction)
3240 anti_adjust_stack (GEN_INT (extra));
3242 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3245 #endif /* PUSH_ROUNDING */
3249 /* Otherwise make space on the stack and copy the data
3250 to the address of that space. */
3252 /* Deduct words put into registers from the size we must copy. */
3255 if (GET_CODE (size) == CONST_INT)
3256 size = GEN_INT (INTVAL (size) - used);
3258 size = expand_binop (GET_MODE (size), sub_optab, size,
3259 GEN_INT (used), NULL_RTX, 0,
3263 /* Get the address of the stack space.
3264 In this case, we do not deal with EXTRA separately.
3265 A single stack adjust will do. */
3268 temp = push_block (size, extra, where_pad == downward);
3271 else if (GET_CODE (args_so_far) == CONST_INT)
3272 temp = memory_address (BLKmode,
3273 plus_constant (args_addr,
3274 skip + INTVAL (args_so_far)));
3276 temp = memory_address (BLKmode,
3277 plus_constant (gen_rtx_PLUS (Pmode,
3282 if (!ACCUMULATE_OUTGOING_ARGS)
3284 /* If the source is referenced relative to the stack pointer,
3285 copy it to another register to stabilize it. We do not need
3286 to do this if we know that we won't be changing sp. */
3288 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3289 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3290 temp = copy_to_reg (temp);
3293 target = gen_rtx_MEM (BLKmode, temp);
3295 /* We do *not* set_mem_attributes here, because incoming arguments
3296 may overlap with sibling call outgoing arguments and we cannot
3297 allow reordering of reads from function arguments with stores
3298 to outgoing arguments of sibling calls. We do, however, want
3299 to record the alignment of the stack slot. */
3300 /* ALIGN may well be better aligned than TYPE, e.g. due to
3301 PARM_BOUNDARY. Assume the caller isn't lying. */
3302 set_mem_align (target, align);
3304 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3307 else if (partial > 0)
3309 /* Scalar partly in registers. */
3311 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3314 /* # words of start of argument
3315 that we must make space for but need not store. */
3316 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3317 int args_offset = INTVAL (args_so_far);
3320 /* Push padding now if padding above and stack grows down,
3321 or if padding below and stack grows up.
3322 But if space already allocated, this has already been done. */
3323 if (extra && args_addr == 0
3324 && where_pad != none && where_pad != stack_direction)
3325 anti_adjust_stack (GEN_INT (extra));
3327 /* If we make space by pushing it, we might as well push
3328 the real data. Otherwise, we can leave OFFSET nonzero
3329 and leave the space uninitialized. */
3333 /* Now NOT_STACK gets the number of words that we don't need to
3334 allocate on the stack. */
3335 not_stack = partial - offset;
3337 /* If the partial register-part of the arg counts in its stack size,
3338 skip the part of stack space corresponding to the registers.
3339 Otherwise, start copying to the beginning of the stack space,
3340 by setting SKIP to 0. */
3341 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3343 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3344 x = validize_mem (force_const_mem (mode, x));
3346 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3347 SUBREGs of such registers are not allowed. */
3348 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3349 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3350 x = copy_to_reg (x);
3352 /* Loop over all the words allocated on the stack for this arg. */
3353 /* We can do it by words, because any scalar bigger than a word
3354 has a size a multiple of a word. */
3355 #ifndef PUSH_ARGS_REVERSED
3356 for (i = not_stack; i < size; i++)
3358 for (i = size - 1; i >= not_stack; i--)
3360 if (i >= not_stack + offset)
3361 emit_push_insn (operand_subword_force (x, i, mode),
3362 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3364 GEN_INT (args_offset + ((i - not_stack + skip)
3366 reg_parm_stack_space, alignment_pad);
3373 /* Push padding now if padding above and stack grows down,
3374 or if padding below and stack grows up.
3375 But if space already allocated, this has already been done. */
3376 if (extra && args_addr == 0
3377 && where_pad != none && where_pad != stack_direction)
3378 anti_adjust_stack (GEN_INT (extra));
3380 #ifdef PUSH_ROUNDING
3381 if (args_addr == 0 && PUSH_ARGS)
3382 emit_single_push_insn (mode, x, type);
3386 if (GET_CODE (args_so_far) == CONST_INT)
3388 = memory_address (mode,
3389 plus_constant (args_addr,
3390 INTVAL (args_so_far)));
3392 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3394 dest = gen_rtx_MEM (mode, addr);
3396 /* We do *not* set_mem_attributes here, because incoming arguments
3397 may overlap with sibling call outgoing arguments and we cannot
3398 allow reordering of reads from function arguments with stores
3399 to outgoing arguments of sibling calls. We do, however, want
3400 to record the alignment of the stack slot. */
3401 /* ALIGN may well be better aligned than TYPE, e.g. due to
3402 PARM_BOUNDARY. Assume the caller isn't lying. */
3403 set_mem_align (dest, align);
3405 emit_move_insn (dest, x);
3409 /* If part should go in registers, copy that part
3410 into the appropriate registers. Do this now, at the end,
3411 since mem-to-mem copies above may do function calls. */
3412 if (partial > 0 && reg != 0)
3414 /* Handle calls that pass values in multiple non-contiguous locations.
3415 The Irix 6 ABI has examples of this. */
3416 if (GET_CODE (reg) == PARALLEL)
3417 emit_group_load (reg, x, type, -1);
3419 move_block_to_reg (REGNO (reg), x, partial, mode);
3422 if (extra && args_addr == 0 && where_pad == stack_direction)
3423 anti_adjust_stack (GEN_INT (extra));
3425 if (alignment_pad && args_addr == 0)
3426 anti_adjust_stack (alignment_pad);
3429 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3433 get_subtarget (rtx x)
3437 /* Only registers can be subtargets. */
3439 /* Don't use hard regs to avoid extending their life. */
3440 || REGNO (x) < FIRST_PSEUDO_REGISTER
3444 /* Expand an assignment that stores the value of FROM into TO. */
3447 expand_assignment (tree to, tree from)
3452 /* Don't crash if the lhs of the assignment was erroneous. */
3454 if (TREE_CODE (to) == ERROR_MARK)
3456 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3460 /* Assignment of a structure component needs special treatment
3461 if the structure component's rtx is not simply a MEM.
3462 Assignment of an array element at a constant index, and assignment of
3463 an array element in an unaligned packed structure field, has the same
3466 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3467 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3468 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3470 enum machine_mode mode1;
3471 HOST_WIDE_INT bitsize, bitpos;
3479 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3480 &unsignedp, &volatilep);
3482 /* If we are going to use store_bit_field and extract_bit_field,
3483 make sure to_rtx will be safe for multiple use. */
3485 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3489 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3491 gcc_assert (MEM_P (to_rtx));
3493 #ifdef POINTERS_EXTEND_UNSIGNED
3494 if (GET_MODE (offset_rtx) != Pmode)
3495 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3497 if (GET_MODE (offset_rtx) != ptr_mode)
3498 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3501 /* A constant address in TO_RTX can have VOIDmode, we must not try
3502 to call force_reg for that case. Avoid that case. */
3504 && GET_MODE (to_rtx) == BLKmode
3505 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3507 && (bitpos % bitsize) == 0
3508 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3509 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3511 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3515 to_rtx = offset_address (to_rtx, offset_rtx,
3516 highest_pow2_factor_for_target (to,
3522 /* If the field is at offset zero, we could have been given the
3523 DECL_RTX of the parent struct. Don't munge it. */
3524 to_rtx = shallow_copy_rtx (to_rtx);
3526 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3529 /* Deal with volatile and readonly fields. The former is only done
3530 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3531 if (volatilep && MEM_P (to_rtx))
3533 if (to_rtx == orig_to_rtx)
3534 to_rtx = copy_rtx (to_rtx);
3535 MEM_VOLATILE_P (to_rtx) = 1;
3538 if (MEM_P (to_rtx) && ! can_address_p (to))
3540 if (to_rtx == orig_to_rtx)
3541 to_rtx = copy_rtx (to_rtx);
3542 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3545 /* Optimize bitfld op= val in certain cases. */
3546 while (mode1 == VOIDmode
3547 && bitsize > 0 && bitsize < BITS_PER_WORD
3548 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3549 && !TREE_SIDE_EFFECTS (to)
3550 && !TREE_THIS_VOLATILE (to))
3553 rtx value, str_rtx = to_rtx;
3554 HOST_WIDE_INT bitpos1 = bitpos;
3559 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3560 || !BINARY_CLASS_P (src))
3563 op0 = TREE_OPERAND (src, 0);
3564 op1 = TREE_OPERAND (src, 1);
3567 if (! operand_equal_p (to, op0, 0))
3570 if (MEM_P (str_rtx))
3572 enum machine_mode mode = GET_MODE (str_rtx);
3573 HOST_WIDE_INT offset1;
3575 if (GET_MODE_BITSIZE (mode) == 0
3576 || GET_MODE_BITSIZE (mode) > BITS_PER_WORD)
3578 mode = get_best_mode (bitsize, bitpos1, MEM_ALIGN (str_rtx),
3580 if (mode == VOIDmode)
3584 bitpos1 %= GET_MODE_BITSIZE (mode);
3585 offset1 = (offset1 - bitpos1) / BITS_PER_UNIT;
3586 str_rtx = adjust_address (str_rtx, mode, offset1);
3588 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3591 /* If the bit field covers the whole REG/MEM, store_field
3592 will likely generate better code. */
3593 if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3596 /* We can't handle fields split across multiple entities. */
3597 if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3600 if (BYTES_BIG_ENDIAN)
3601 bitpos1 = GET_MODE_BITSIZE (GET_MODE (str_rtx)) - bitpos1
3604 /* Special case some bitfield op= exp. */
3605 switch (TREE_CODE (src))
3609 /* For now, just optimize the case of the topmost bitfield
3610 where we don't need to do any masking and also
3611 1 bit bitfields where xor can be used.
3612 We might win by one instruction for the other bitfields
3613 too if insv/extv instructions aren't used, so that
3614 can be added later. */
3615 if (bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx))
3616 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3618 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), 0);
3619 value = convert_modes (GET_MODE (str_rtx),
3620 TYPE_MODE (TREE_TYPE (op1)), value,
3621 TYPE_UNSIGNED (TREE_TYPE (op1)));
3623 /* We may be accessing data outside the field, which means
3624 we can alias adjacent data. */
3625 if (MEM_P (str_rtx))
3627 str_rtx = shallow_copy_rtx (str_rtx);
3628 set_mem_alias_set (str_rtx, 0);
3629 set_mem_expr (str_rtx, 0);
3632 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3634 && bitpos1 + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
3636 value = expand_and (GET_MODE (str_rtx), value, const1_rtx,
3640 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
3641 build_int_cst (NULL_TREE, bitpos1),
3643 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
3644 value, str_rtx, 1, OPTAB_WIDEN);
3645 if (result != str_rtx)
3646 emit_move_insn (str_rtx, result);
3658 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3659 TREE_TYPE (tem), get_alias_set (to));
3661 preserve_temp_slots (result);
3665 /* If the value is meaningful, convert RESULT to the proper mode.
3666 Otherwise, return nothing. */
3670 /* If the rhs is a function call and its value is not an aggregate,
3671 call the function before we start to compute the lhs.
3672 This is needed for correct code for cases such as
3673 val = setjmp (buf) on machines where reference to val
3674 requires loading up part of an address in a separate insn.
3676 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3677 since it might be a promoted variable where the zero- or sign- extension
3678 needs to be done. Handling this in the normal way is safe because no
3679 computation is done before the call. */
3680 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3681 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3682 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3683 && REG_P (DECL_RTL (to))))
3688 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3690 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3692 /* Handle calls that return values in multiple non-contiguous locations.
3693 The Irix 6 ABI has examples of this. */
3694 if (GET_CODE (to_rtx) == PARALLEL)
3695 emit_group_load (to_rtx, value, TREE_TYPE (from),
3696 int_size_in_bytes (TREE_TYPE (from)));
3697 else if (GET_MODE (to_rtx) == BLKmode)
3698 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3701 if (POINTER_TYPE_P (TREE_TYPE (to)))
3702 value = convert_memory_address (GET_MODE (to_rtx), value);
3703 emit_move_insn (to_rtx, value);
3705 preserve_temp_slots (to_rtx);
3711 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3712 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3715 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3717 /* Don't move directly into a return register. */
3718 if (TREE_CODE (to) == RESULT_DECL
3719 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3724 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3726 if (GET_CODE (to_rtx) == PARALLEL)
3727 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3728 int_size_in_bytes (TREE_TYPE (from)));
3730 emit_move_insn (to_rtx, temp);
3732 preserve_temp_slots (to_rtx);
3738 /* In case we are returning the contents of an object which overlaps
3739 the place the value is being stored, use a safe function when copying
3740 a value through a pointer into a structure value return block. */
3741 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3742 && current_function_returns_struct
3743 && !current_function_returns_pcc_struct)
3748 size = expr_size (from);
3749 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3751 emit_library_call (memmove_libfunc, LCT_NORMAL,
3752 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3753 XEXP (from_rtx, 0), Pmode,
3754 convert_to_mode (TYPE_MODE (sizetype),
3755 size, TYPE_UNSIGNED (sizetype)),
3756 TYPE_MODE (sizetype));
3758 preserve_temp_slots (to_rtx);
3764 /* Compute FROM and store the value in the rtx we got. */
3767 result = store_expr (from, to_rtx, 0);
3768 preserve_temp_slots (result);
3774 /* Generate code for computing expression EXP,
3775 and storing the value into TARGET.
3777 If the mode is BLKmode then we may return TARGET itself.
3778 It turns out that in BLKmode it doesn't cause a problem.
3779 because C has no operators that could combine two different
3780 assignments into the same BLKmode object with different values
3781 with no sequence point. Will other languages need this to
3784 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3785 stack, and block moves may need to be treated specially. */
3788 store_expr (tree exp, rtx target, int call_param_p)
3791 rtx alt_rtl = NULL_RTX;
3792 int dont_return_target = 0;
3794 if (VOID_TYPE_P (TREE_TYPE (exp)))
3796 /* C++ can generate ?: expressions with a throw expression in one
3797 branch and an rvalue in the other. Here, we resolve attempts to
3798 store the throw expression's nonexistent result. */
3799 gcc_assert (!call_param_p);
3800 expand_expr (exp, const0_rtx, VOIDmode, 0);
3803 if (TREE_CODE (exp) == COMPOUND_EXPR)
3805 /* Perform first part of compound expression, then assign from second
3807 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3808 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3809 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3811 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3813 /* For conditional expression, get safe form of the target. Then
3814 test the condition, doing the appropriate assignment on either
3815 side. This avoids the creation of unnecessary temporaries.
3816 For non-BLKmode, it is more efficient not to do this. */
3818 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3820 do_pending_stack_adjust ();
3822 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3823 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3824 emit_jump_insn (gen_jump (lab2));
3827 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3833 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3834 /* If this is a scalar in a register that is stored in a wider mode
3835 than the declared mode, compute the result into its declared mode
3836 and then convert to the wider mode. Our value is the computed
3839 rtx inner_target = 0;
3841 /* We can do the conversion inside EXP, which will often result
3842 in some optimizations. Do the conversion in two steps: first
3843 change the signedness, if needed, then the extend. But don't
3844 do this if the type of EXP is a subtype of something else
3845 since then the conversion might involve more than just
3846 converting modes. */
3847 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3848 && TREE_TYPE (TREE_TYPE (exp)) == 0
3849 && (!lang_hooks.reduce_bit_field_operations
3850 || (GET_MODE_PRECISION (GET_MODE (target))
3851 == TYPE_PRECISION (TREE_TYPE (exp)))))
3853 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3854 != SUBREG_PROMOTED_UNSIGNED_P (target))
3856 (lang_hooks.types.signed_or_unsigned_type
3857 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3859 exp = convert (lang_hooks.types.type_for_mode
3860 (GET_MODE (SUBREG_REG (target)),
3861 SUBREG_PROMOTED_UNSIGNED_P (target)),
3864 inner_target = SUBREG_REG (target);
3867 temp = expand_expr (exp, inner_target, VOIDmode,
3868 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3870 /* If TEMP is a VOIDmode constant, use convert_modes to make
3871 sure that we properly convert it. */
3872 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3874 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3875 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3876 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3877 GET_MODE (target), temp,
3878 SUBREG_PROMOTED_UNSIGNED_P (target));
3881 convert_move (SUBREG_REG (target), temp,
3882 SUBREG_PROMOTED_UNSIGNED_P (target));
3888 temp = expand_expr_real (exp, target, GET_MODE (target),
3890 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3892 /* Return TARGET if it's a specified hardware register.
3893 If TARGET is a volatile mem ref, either return TARGET
3894 or return a reg copied *from* TARGET; ANSI requires this.
3896 Otherwise, if TEMP is not TARGET, return TEMP
3897 if it is constant (for efficiency),
3898 or if we really want the correct value. */
3899 if (!(target && REG_P (target)
3900 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3901 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3902 && ! rtx_equal_p (temp, target)
3903 && CONSTANT_P (temp))
3904 dont_return_target = 1;
3907 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3908 the same as that of TARGET, adjust the constant. This is needed, for
3909 example, in case it is a CONST_DOUBLE and we want only a word-sized
3911 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3912 && TREE_CODE (exp) != ERROR_MARK
3913 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3914 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3915 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3917 /* If value was not generated in the target, store it there.
3918 Convert the value to TARGET's type first if necessary and emit the
3919 pending incrementations that have been queued when expanding EXP.
3920 Note that we cannot emit the whole queue blindly because this will
3921 effectively disable the POST_INC optimization later.
3923 If TEMP and TARGET compare equal according to rtx_equal_p, but
3924 one or both of them are volatile memory refs, we have to distinguish
3926 - expand_expr has used TARGET. In this case, we must not generate
3927 another copy. This can be detected by TARGET being equal according
3929 - expand_expr has not used TARGET - that means that the source just
3930 happens to have the same RTX form. Since temp will have been created
3931 by expand_expr, it will compare unequal according to == .
3932 We must generate a copy in this case, to reach the correct number
3933 of volatile memory references. */
3935 if ((! rtx_equal_p (temp, target)
3936 || (temp != target && (side_effects_p (temp)
3937 || side_effects_p (target))))
3938 && TREE_CODE (exp) != ERROR_MARK
3939 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
3940 but TARGET is not valid memory reference, TEMP will differ
3941 from TARGET although it is really the same location. */
3942 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
3943 /* If there's nothing to copy, don't bother. Don't call expr_size
3944 unless necessary, because some front-ends (C++) expr_size-hook
3945 aborts on objects that are not supposed to be bit-copied or
3947 && expr_size (exp) != const0_rtx)
3949 if (GET_MODE (temp) != GET_MODE (target)
3950 && GET_MODE (temp) != VOIDmode)
3952 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
3953 if (dont_return_target)
3955 /* In this case, we will return TEMP,
3956 so make sure it has the proper mode.
3957 But don't forget to store the value into TARGET. */
3958 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3959 emit_move_insn (target, temp);
3962 convert_move (target, temp, unsignedp);
3965 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3967 /* Handle copying a string constant into an array. The string
3968 constant may be shorter than the array. So copy just the string's
3969 actual length, and clear the rest. First get the size of the data
3970 type of the string, which is actually the size of the target. */
3971 rtx size = expr_size (exp);
3973 if (GET_CODE (size) == CONST_INT
3974 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3975 emit_block_move (target, temp, size,
3977 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3980 /* Compute the size of the data to copy from the string. */
3982 = size_binop (MIN_EXPR,
3983 make_tree (sizetype, size),
3984 size_int (TREE_STRING_LENGTH (exp)));
3986 = expand_expr (copy_size, NULL_RTX, VOIDmode,
3988 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
3991 /* Copy that much. */
3992 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
3993 TYPE_UNSIGNED (sizetype));
3994 emit_block_move (target, temp, copy_size_rtx,
3996 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3998 /* Figure out how much is left in TARGET that we have to clear.
3999 Do all calculations in ptr_mode. */
4000 if (GET_CODE (copy_size_rtx) == CONST_INT)
4002 size = plus_constant (size, -INTVAL (copy_size_rtx));
4003 target = adjust_address (target, BLKmode,
4004 INTVAL (copy_size_rtx));
4008 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4009 copy_size_rtx, NULL_RTX, 0,
4012 #ifdef POINTERS_EXTEND_UNSIGNED
4013 if (GET_MODE (copy_size_rtx) != Pmode)
4014 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4015 TYPE_UNSIGNED (sizetype));
4018 target = offset_address (target, copy_size_rtx,
4019 highest_pow2_factor (copy_size));
4020 label = gen_label_rtx ();
4021 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4022 GET_MODE (size), 0, label);
4025 if (size != const0_rtx)
4026 clear_storage (target, size);
4032 /* Handle calls that return values in multiple non-contiguous locations.
4033 The Irix 6 ABI has examples of this. */
4034 else if (GET_CODE (target) == PARALLEL)
4035 emit_group_load (target, temp, TREE_TYPE (exp),
4036 int_size_in_bytes (TREE_TYPE (exp)));
4037 else if (GET_MODE (temp) == BLKmode)
4038 emit_block_move (target, temp, expr_size (exp),
4040 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4043 temp = force_operand (temp, target);
4045 emit_move_insn (target, temp);
4052 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4053 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4054 are set to non-constant values and place it in *P_NC_ELTS. */
4057 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4058 HOST_WIDE_INT *p_nc_elts)
4060 HOST_WIDE_INT nz_elts, nc_elts;
4066 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4068 tree value = TREE_VALUE (list);
4069 tree purpose = TREE_PURPOSE (list);
4073 if (TREE_CODE (purpose) == RANGE_EXPR)
4075 tree lo_index = TREE_OPERAND (purpose, 0);
4076 tree hi_index = TREE_OPERAND (purpose, 1);
4078 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4079 mult = (tree_low_cst (hi_index, 1)
4080 - tree_low_cst (lo_index, 1) + 1);
4083 switch (TREE_CODE (value))
4087 HOST_WIDE_INT nz = 0, nc = 0;
4088 categorize_ctor_elements_1 (value, &nz, &nc);
4089 nz_elts += mult * nz;
4090 nc_elts += mult * nc;
4096 if (!initializer_zerop (value))
4100 if (!initializer_zerop (TREE_REALPART (value)))
4102 if (!initializer_zerop (TREE_IMAGPART (value)))
4108 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4109 if (!initializer_zerop (TREE_VALUE (v)))
4116 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4122 *p_nz_elts += nz_elts;
4123 *p_nc_elts += nc_elts;
4127 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4128 HOST_WIDE_INT *p_nc_elts)
4132 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4135 /* Count the number of scalars in TYPE. Return -1 on overflow or
4139 count_type_elements (tree type)
4141 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4142 switch (TREE_CODE (type))
4146 tree telts = array_type_nelts (type);
4147 if (telts && host_integerp (telts, 1))
4149 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4150 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4153 else if (max / n > m)
4161 HOST_WIDE_INT n = 0, t;
4164 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4165 if (TREE_CODE (f) == FIELD_DECL)
4167 t = count_type_elements (TREE_TYPE (f));
4177 case QUAL_UNION_TYPE:
4179 /* Ho hum. How in the world do we guess here? Clearly it isn't
4180 right to count the fields. Guess based on the number of words. */
4181 HOST_WIDE_INT n = int_size_in_bytes (type);
4184 return n / UNITS_PER_WORD;
4191 return TYPE_VECTOR_SUBPARTS (type);
4200 case REFERENCE_TYPE:
4214 /* Return 1 if EXP contains mostly (3/4) zeros. */
4217 mostly_zeros_p (tree exp)
4219 if (TREE_CODE (exp) == CONSTRUCTOR)
4222 HOST_WIDE_INT nz_elts, nc_elts, elts;
4224 /* If there are no ranges of true bits, it is all zero. */
4225 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4226 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4228 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4229 elts = count_type_elements (TREE_TYPE (exp));
4231 return nz_elts < elts / 4;
4234 return initializer_zerop (exp);
4237 /* Helper function for store_constructor.
4238 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4239 TYPE is the type of the CONSTRUCTOR, not the element type.
4240 CLEARED is as for store_constructor.
4241 ALIAS_SET is the alias set to use for any stores.
4243 This provides a recursive shortcut back to store_constructor when it isn't
4244 necessary to go through store_field. This is so that we can pass through
4245 the cleared field to let store_constructor know that we may not have to
4246 clear a substructure if the outer structure has already been cleared. */
4249 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4250 HOST_WIDE_INT bitpos, enum machine_mode mode,
4251 tree exp, tree type, int cleared, int alias_set)
4253 if (TREE_CODE (exp) == CONSTRUCTOR
4254 /* We can only call store_constructor recursively if the size and
4255 bit position are on a byte boundary. */
4256 && bitpos % BITS_PER_UNIT == 0
4257 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4258 /* If we have a nonzero bitpos for a register target, then we just
4259 let store_field do the bitfield handling. This is unlikely to
4260 generate unnecessary clear instructions anyways. */
4261 && (bitpos == 0 || MEM_P (target)))
4265 = adjust_address (target,
4266 GET_MODE (target) == BLKmode
4268 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4269 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4272 /* Update the alias set, if required. */
4273 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4274 && MEM_ALIAS_SET (target) != 0)
4276 target = copy_rtx (target);
4277 set_mem_alias_set (target, alias_set);
4280 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4283 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4286 /* Store the value of constructor EXP into the rtx TARGET.
4287 TARGET is either a REG or a MEM; we know it cannot conflict, since
4288 safe_from_p has been called.
4289 CLEARED is true if TARGET is known to have been zero'd.
4290 SIZE is the number of bytes of TARGET we are allowed to modify: this
4291 may not be the same as the size of EXP if we are assigning to a field
4292 which has been packed to exclude padding bits. */
4295 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4297 tree type = TREE_TYPE (exp);
4298 #ifdef WORD_REGISTER_OPERATIONS
4299 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4302 switch (TREE_CODE (type))
4306 case QUAL_UNION_TYPE:
4310 /* If size is zero or the target is already cleared, do nothing. */
4311 if (size == 0 || cleared)
4313 /* We either clear the aggregate or indicate the value is dead. */
4314 else if ((TREE_CODE (type) == UNION_TYPE
4315 || TREE_CODE (type) == QUAL_UNION_TYPE)
4316 && ! CONSTRUCTOR_ELTS (exp))
4317 /* If the constructor is empty, clear the union. */
4319 clear_storage (target, expr_size (exp));
4323 /* If we are building a static constructor into a register,
4324 set the initial value as zero so we can fold the value into
4325 a constant. But if more than one register is involved,
4326 this probably loses. */
4327 else if (REG_P (target) && TREE_STATIC (exp)
4328 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4330 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4334 /* If the constructor has fewer fields than the structure or
4335 if we are initializing the structure to mostly zeros, clear
4336 the whole structure first. Don't do this if TARGET is a
4337 register whose mode size isn't equal to SIZE since
4338 clear_storage can't handle this case. */
4340 && ((list_length (CONSTRUCTOR_ELTS (exp))
4341 != fields_length (type))
4342 || mostly_zeros_p (exp))
4344 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4347 clear_storage (target, GEN_INT (size));
4352 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4354 /* Store each element of the constructor into the
4355 corresponding field of TARGET. */
4357 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4359 tree field = TREE_PURPOSE (elt);
4360 tree value = TREE_VALUE (elt);
4361 enum machine_mode mode;
4362 HOST_WIDE_INT bitsize;
4363 HOST_WIDE_INT bitpos = 0;
4365 rtx to_rtx = target;
4367 /* Just ignore missing fields. We cleared the whole
4368 structure, above, if any fields are missing. */
4372 if (cleared && initializer_zerop (value))
4375 if (host_integerp (DECL_SIZE (field), 1))
4376 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4380 mode = DECL_MODE (field);
4381 if (DECL_BIT_FIELD (field))
4384 offset = DECL_FIELD_OFFSET (field);
4385 if (host_integerp (offset, 0)
4386 && host_integerp (bit_position (field), 0))
4388 bitpos = int_bit_position (field);
4392 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4399 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4400 make_tree (TREE_TYPE (exp),
4403 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4404 gcc_assert (MEM_P (to_rtx));
4406 #ifdef POINTERS_EXTEND_UNSIGNED
4407 if (GET_MODE (offset_rtx) != Pmode)
4408 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4410 if (GET_MODE (offset_rtx) != ptr_mode)
4411 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4414 to_rtx = offset_address (to_rtx, offset_rtx,
4415 highest_pow2_factor (offset));
4418 #ifdef WORD_REGISTER_OPERATIONS
4419 /* If this initializes a field that is smaller than a
4420 word, at the start of a word, try to widen it to a full
4421 word. This special case allows us to output C++ member
4422 function initializations in a form that the optimizers
4425 && bitsize < BITS_PER_WORD
4426 && bitpos % BITS_PER_WORD == 0
4427 && GET_MODE_CLASS (mode) == MODE_INT
4428 && TREE_CODE (value) == INTEGER_CST
4430 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4432 tree type = TREE_TYPE (value);
4434 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4436 type = lang_hooks.types.type_for_size
4437 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4438 value = convert (type, value);
4441 if (BYTES_BIG_ENDIAN)
4443 = fold (build2 (LSHIFT_EXPR, type, value,
4444 build_int_cst (NULL_TREE,
4445 BITS_PER_WORD - bitsize)));
4446 bitsize = BITS_PER_WORD;
4451 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4452 && DECL_NONADDRESSABLE_P (field))
4454 to_rtx = copy_rtx (to_rtx);
4455 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4458 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4459 value, type, cleared,
4460 get_alias_set (TREE_TYPE (field)));
4470 tree elttype = TREE_TYPE (type);
4472 HOST_WIDE_INT minelt = 0;
4473 HOST_WIDE_INT maxelt = 0;
4475 domain = TYPE_DOMAIN (type);
4476 const_bounds_p = (TYPE_MIN_VALUE (domain)
4477 && TYPE_MAX_VALUE (domain)
4478 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4479 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4481 /* If we have constant bounds for the range of the type, get them. */
4484 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4485 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4488 /* If the constructor has fewer elements than the array, clear
4489 the whole array first. Similarly if this is static
4490 constructor of a non-BLKmode object. */
4493 else if (REG_P (target) && TREE_STATIC (exp))
4497 HOST_WIDE_INT count = 0, zero_count = 0;
4498 need_to_clear = ! const_bounds_p;
4500 /* This loop is a more accurate version of the loop in
4501 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4502 is also needed to check for missing elements. */
4503 for (elt = CONSTRUCTOR_ELTS (exp);
4504 elt != NULL_TREE && ! need_to_clear;
4505 elt = TREE_CHAIN (elt))
4507 tree index = TREE_PURPOSE (elt);
4508 HOST_WIDE_INT this_node_count;
4510 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4512 tree lo_index = TREE_OPERAND (index, 0);
4513 tree hi_index = TREE_OPERAND (index, 1);
4515 if (! host_integerp (lo_index, 1)
4516 || ! host_integerp (hi_index, 1))
4522 this_node_count = (tree_low_cst (hi_index, 1)
4523 - tree_low_cst (lo_index, 1) + 1);
4526 this_node_count = 1;
4528 count += this_node_count;
4529 if (mostly_zeros_p (TREE_VALUE (elt)))
4530 zero_count += this_node_count;
4533 /* Clear the entire array first if there are any missing
4534 elements, or if the incidence of zero elements is >=
4537 && (count < maxelt - minelt + 1
4538 || 4 * zero_count >= 3 * count))
4542 if (need_to_clear && size > 0)
4545 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4547 clear_storage (target, GEN_INT (size));
4551 if (!cleared && REG_P (target))
4552 /* Inform later passes that the old value is dead. */
4553 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4555 /* Store each element of the constructor into the
4556 corresponding element of TARGET, determined by counting the
4558 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4560 elt = TREE_CHAIN (elt), i++)
4562 enum machine_mode mode;
4563 HOST_WIDE_INT bitsize;
4564 HOST_WIDE_INT bitpos;
4566 tree value = TREE_VALUE (elt);
4567 tree index = TREE_PURPOSE (elt);
4568 rtx xtarget = target;
4570 if (cleared && initializer_zerop (value))
4573 unsignedp = TYPE_UNSIGNED (elttype);
4574 mode = TYPE_MODE (elttype);
4575 if (mode == BLKmode)
4576 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4577 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4580 bitsize = GET_MODE_BITSIZE (mode);
4582 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4584 tree lo_index = TREE_OPERAND (index, 0);
4585 tree hi_index = TREE_OPERAND (index, 1);
4586 rtx index_r, pos_rtx;
4587 HOST_WIDE_INT lo, hi, count;
4590 /* If the range is constant and "small", unroll the loop. */
4592 && host_integerp (lo_index, 0)
4593 && host_integerp (hi_index, 0)
4594 && (lo = tree_low_cst (lo_index, 0),
4595 hi = tree_low_cst (hi_index, 0),
4596 count = hi - lo + 1,
4599 || (host_integerp (TYPE_SIZE (elttype), 1)
4600 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4603 lo -= minelt; hi -= minelt;
4604 for (; lo <= hi; lo++)
4606 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4609 && !MEM_KEEP_ALIAS_SET_P (target)
4610 && TREE_CODE (type) == ARRAY_TYPE
4611 && TYPE_NONALIASED_COMPONENT (type))
4613 target = copy_rtx (target);
4614 MEM_KEEP_ALIAS_SET_P (target) = 1;
4617 store_constructor_field
4618 (target, bitsize, bitpos, mode, value, type, cleared,
4619 get_alias_set (elttype));
4624 rtx loop_start = gen_label_rtx ();
4625 rtx loop_end = gen_label_rtx ();
4628 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4629 unsignedp = TYPE_UNSIGNED (domain);
4631 index = build_decl (VAR_DECL, NULL_TREE, domain);
4634 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4636 SET_DECL_RTL (index, index_r);
4637 store_expr (lo_index, index_r, 0);
4639 /* Build the head of the loop. */
4640 do_pending_stack_adjust ();
4641 emit_label (loop_start);
4643 /* Assign value to element index. */
4645 = convert (ssizetype,
4646 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4647 index, TYPE_MIN_VALUE (domain))));
4648 position = size_binop (MULT_EXPR, position,
4650 TYPE_SIZE_UNIT (elttype)));
4652 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4653 xtarget = offset_address (target, pos_rtx,
4654 highest_pow2_factor (position));
4655 xtarget = adjust_address (xtarget, mode, 0);
4656 if (TREE_CODE (value) == CONSTRUCTOR)
4657 store_constructor (value, xtarget, cleared,
4658 bitsize / BITS_PER_UNIT);
4660 store_expr (value, xtarget, 0);
4662 /* Generate a conditional jump to exit the loop. */
4663 exit_cond = build2 (LT_EXPR, integer_type_node,
4665 jumpif (exit_cond, loop_end);
4667 /* Update the loop counter, and jump to the head of
4669 expand_assignment (index,
4670 build2 (PLUS_EXPR, TREE_TYPE (index),
4671 index, integer_one_node));
4673 emit_jump (loop_start);
4675 /* Build the end of the loop. */
4676 emit_label (loop_end);
4679 else if ((index != 0 && ! host_integerp (index, 0))
4680 || ! host_integerp (TYPE_SIZE (elttype), 1))
4685 index = ssize_int (1);
4688 index = fold_convert (ssizetype,
4689 fold (build2 (MINUS_EXPR,
4692 TYPE_MIN_VALUE (domain))));
4694 position = size_binop (MULT_EXPR, index,
4696 TYPE_SIZE_UNIT (elttype)));
4697 xtarget = offset_address (target,
4698 expand_expr (position, 0, VOIDmode, 0),
4699 highest_pow2_factor (position));
4700 xtarget = adjust_address (xtarget, mode, 0);
4701 store_expr (value, xtarget, 0);
4706 bitpos = ((tree_low_cst (index, 0) - minelt)
4707 * tree_low_cst (TYPE_SIZE (elttype), 1));
4709 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4711 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4712 && TREE_CODE (type) == ARRAY_TYPE
4713 && TYPE_NONALIASED_COMPONENT (type))
4715 target = copy_rtx (target);
4716 MEM_KEEP_ALIAS_SET_P (target) = 1;
4718 store_constructor_field (target, bitsize, bitpos, mode, value,
4719 type, cleared, get_alias_set (elttype));
4731 tree elttype = TREE_TYPE (type);
4732 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4733 enum machine_mode eltmode = TYPE_MODE (elttype);
4734 HOST_WIDE_INT bitsize;
4735 HOST_WIDE_INT bitpos;
4739 gcc_assert (eltmode != BLKmode);
4741 n_elts = TYPE_VECTOR_SUBPARTS (type);
4742 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4744 enum machine_mode mode = GET_MODE (target);
4746 icode = (int) vec_init_optab->handlers[mode].insn_code;
4747 if (icode != CODE_FOR_nothing)
4751 vector = alloca (n_elts);
4752 for (i = 0; i < n_elts; i++)
4753 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4757 /* If the constructor has fewer elements than the vector,
4758 clear the whole array first. Similarly if this is static
4759 constructor of a non-BLKmode object. */
4762 else if (REG_P (target) && TREE_STATIC (exp))
4766 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4768 for (elt = CONSTRUCTOR_ELTS (exp);
4770 elt = TREE_CHAIN (elt))
4772 int n_elts_here = tree_low_cst
4773 (int_const_binop (TRUNC_DIV_EXPR,
4774 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4775 TYPE_SIZE (elttype), 0), 1);
4777 count += n_elts_here;
4778 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 zero_count += n_elts_here;
4782 /* Clear the entire vector first if there are any missing elements,
4783 or if the incidence of zero elements is >= 75%. */
4784 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4787 if (need_to_clear && size > 0 && !vector)
4790 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4792 clear_storage (target, GEN_INT (size));
4796 if (!cleared && REG_P (target))
4797 /* Inform later passes that the old value is dead. */
4798 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4800 /* Store each element of the constructor into the corresponding
4801 element of TARGET, determined by counting the elements. */
4802 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4804 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4806 tree value = TREE_VALUE (elt);
4807 tree index = TREE_PURPOSE (elt);
4808 HOST_WIDE_INT eltpos;
4810 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4811 if (cleared && initializer_zerop (value))
4815 eltpos = tree_low_cst (index, 1);
4821 /* Vector CONSTRUCTORs should only be built from smaller
4822 vectors in the case of BLKmode vectors. */
4823 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4824 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4828 enum machine_mode value_mode =
4829 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4830 ? TYPE_MODE (TREE_TYPE (value))
4832 bitpos = eltpos * elt_size;
4833 store_constructor_field (target, bitsize, bitpos,
4834 value_mode, value, type,
4835 cleared, get_alias_set (elttype));
4840 emit_insn (GEN_FCN (icode)
4842 gen_rtx_PARALLEL (GET_MODE (target),
4843 gen_rtvec_v (n_elts, vector))));
4847 /* Set constructor assignments. */
4850 tree elt = CONSTRUCTOR_ELTS (exp);
4851 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4852 tree domain = TYPE_DOMAIN (type);
4853 tree domain_min, domain_max, bitlength;
4855 /* The default implementation strategy is to extract the
4856 constant parts of the constructor, use that to initialize
4857 the target, and then "or" in whatever non-constant ranges
4858 we need in addition.
4860 If a large set is all zero or all ones, it is probably
4861 better to set it using memset. Also, if a large set has
4862 just a single range, it may also be better to first clear
4863 all the first clear the set (using memset), and set the
4866 /* Check for all zeros. */
4867 if (elt == NULL_TREE && size > 0)
4870 clear_storage (target, GEN_INT (size));
4874 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4875 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4876 bitlength = size_binop (PLUS_EXPR,
4877 size_diffop (domain_max, domain_min),
4880 nbits = tree_low_cst (bitlength, 1);
4882 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4883 that are "complicated" (more than one range), initialize
4884 (the constant parts) by copying from a constant. */
4885 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4886 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4888 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4889 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4890 char *bit_buffer = alloca (nbits);
4891 HOST_WIDE_INT word = 0;
4892 unsigned int bit_pos = 0;
4893 unsigned int ibit = 0;
4894 unsigned int offset = 0; /* In bytes from beginning of set. */
4896 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4899 if (bit_buffer[ibit])
4901 if (BYTES_BIG_ENDIAN)
4902 word |= (1 << (set_word_size - 1 - bit_pos));
4904 word |= 1 << bit_pos;
4908 if (bit_pos >= set_word_size || ibit == nbits)
4910 if (word != 0 || ! cleared)
4912 rtx datum = gen_int_mode (word, mode);
4915 /* The assumption here is that it is safe to
4916 use XEXP if the set is multi-word, but not
4917 if it's single-word. */
4919 to_rtx = adjust_address (target, mode, offset);
4922 gcc_assert (!offset);
4925 emit_move_insn (to_rtx, datum);
4932 offset += set_word_size / BITS_PER_UNIT;
4937 /* Don't bother clearing storage if the set is all ones. */
4938 if (TREE_CHAIN (elt) != NULL_TREE
4939 || (TREE_PURPOSE (elt) == NULL_TREE
4941 : ( ! host_integerp (TREE_VALUE (elt), 0)
4942 || ! host_integerp (TREE_PURPOSE (elt), 0)
4943 || (tree_low_cst (TREE_VALUE (elt), 0)
4944 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4945 != (HOST_WIDE_INT) nbits))))
4946 clear_storage (target, expr_size (exp));
4948 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
4950 /* Start of range of element or NULL. */
4951 tree startbit = TREE_PURPOSE (elt);
4952 /* End of range of element, or element value. */
4953 tree endbit = TREE_VALUE (elt);
4954 HOST_WIDE_INT startb, endb;
4955 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4957 bitlength_rtx = expand_expr (bitlength,
4958 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4960 /* Handle non-range tuple element like [ expr ]. */
4961 if (startbit == NULL_TREE)
4963 startbit = save_expr (endbit);
4967 startbit = convert (sizetype, startbit);
4968 endbit = convert (sizetype, endbit);
4969 if (! integer_zerop (domain_min))
4971 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4972 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4974 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4975 EXPAND_CONST_ADDRESS);
4976 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4977 EXPAND_CONST_ADDRESS);
4983 ((build_qualified_type (lang_hooks.types.type_for_mode
4984 (GET_MODE (target), 0),
4987 emit_move_insn (targetx, target);
4992 gcc_assert (MEM_P (target));
4996 /* Optimization: If startbit and endbit are constants divisible
4997 by BITS_PER_UNIT, call memset instead. */
4998 if (TREE_CODE (startbit) == INTEGER_CST
4999 && TREE_CODE (endbit) == INTEGER_CST
5000 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5001 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5003 emit_library_call (memset_libfunc, LCT_NORMAL,
5005 plus_constant (XEXP (targetx, 0),
5006 startb / BITS_PER_UNIT),
5008 constm1_rtx, TYPE_MODE (integer_type_node),
5009 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5010 TYPE_MODE (sizetype));
5013 emit_library_call (setbits_libfunc, LCT_NORMAL,
5014 VOIDmode, 4, XEXP (targetx, 0),
5015 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5016 startbit_rtx, TYPE_MODE (sizetype),
5017 endbit_rtx, TYPE_MODE (sizetype));
5020 emit_move_insn (target, targetx);
5029 /* Store the value of EXP (an expression tree)
5030 into a subfield of TARGET which has mode MODE and occupies
5031 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5032 If MODE is VOIDmode, it means that we are storing into a bit-field.
5034 Always return const0_rtx unless we have something particular to
5037 TYPE is the type of the underlying object,
5039 ALIAS_SET is the alias set for the destination. This value will
5040 (in general) be different from that for TARGET, since TARGET is a
5041 reference to the containing structure. */
5044 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5045 enum machine_mode mode, tree exp, tree type, int alias_set)
5047 HOST_WIDE_INT width_mask = 0;
5049 if (TREE_CODE (exp) == ERROR_MARK)
5052 /* If we have nothing to store, do nothing unless the expression has
5055 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5056 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5057 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5059 /* If we are storing into an unaligned field of an aligned union that is
5060 in a register, we may have the mode of TARGET being an integer mode but
5061 MODE == BLKmode. In that case, get an aligned object whose size and
5062 alignment are the same as TARGET and store TARGET into it (we can avoid
5063 the store if the field being stored is the entire width of TARGET). Then
5064 call ourselves recursively to store the field into a BLKmode version of
5065 that object. Finally, load from the object into TARGET. This is not
5066 very efficient in general, but should only be slightly more expensive
5067 than the otherwise-required unaligned accesses. Perhaps this can be
5068 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5069 twice, once with emit_move_insn and once via store_field. */
5072 && (REG_P (target) || GET_CODE (target) == SUBREG))
5074 rtx object = assign_temp (type, 0, 1, 1);
5075 rtx blk_object = adjust_address (object, BLKmode, 0);
5077 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5078 emit_move_insn (object, target);
5080 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5082 emit_move_insn (target, object);
5084 /* We want to return the BLKmode version of the data. */
5088 if (GET_CODE (target) == CONCAT)
5090 /* We're storing into a struct containing a single __complex. */
5092 gcc_assert (!bitpos);
5093 return store_expr (exp, target, 0);
5096 /* If the structure is in a register or if the component
5097 is a bit field, we cannot use addressing to access it.
5098 Use bit-field techniques or SUBREG to store in it. */
5100 if (mode == VOIDmode
5101 || (mode != BLKmode && ! direct_store[(int) mode]
5102 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5103 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5105 || GET_CODE (target) == SUBREG
5106 /* If the field isn't aligned enough to store as an ordinary memref,
5107 store it as a bit field. */
5109 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5110 || bitpos % GET_MODE_ALIGNMENT (mode))
5111 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5112 || (bitpos % BITS_PER_UNIT != 0)))
5113 /* If the RHS and field are a constant size and the size of the
5114 RHS isn't the same size as the bitfield, we must use bitfield
5117 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5118 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5120 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5122 /* If BITSIZE is narrower than the size of the type of EXP
5123 we will be narrowing TEMP. Normally, what's wanted are the
5124 low-order bits. However, if EXP's type is a record and this is
5125 big-endian machine, we want the upper BITSIZE bits. */
5126 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5127 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5128 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5129 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5130 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5134 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5136 if (mode != VOIDmode && mode != BLKmode
5137 && mode != TYPE_MODE (TREE_TYPE (exp)))
5138 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5140 /* If the modes of TARGET and TEMP are both BLKmode, both
5141 must be in memory and BITPOS must be aligned on a byte
5142 boundary. If so, we simply do a block copy. */
5143 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5145 gcc_assert (MEM_P (target) && MEM_P (temp)
5146 && !(bitpos % BITS_PER_UNIT));
5148 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5149 emit_block_move (target, temp,
5150 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5157 /* Store the value in the bitfield. */
5158 store_bit_field (target, bitsize, bitpos, mode, temp);
5164 /* Now build a reference to just the desired component. */
5165 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5167 if (to_rtx == target)
5168 to_rtx = copy_rtx (to_rtx);
5170 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5171 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5172 set_mem_alias_set (to_rtx, alias_set);
5174 return store_expr (exp, to_rtx, 0);
5178 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5179 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5180 codes and find the ultimate containing object, which we return.
5182 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5183 bit position, and *PUNSIGNEDP to the signedness of the field.
5184 If the position of the field is variable, we store a tree
5185 giving the variable offset (in units) in *POFFSET.
5186 This offset is in addition to the bit position.
5187 If the position is not variable, we store 0 in *POFFSET.
5189 If any of the extraction expressions is volatile,
5190 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5192 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5193 is a mode that can be used to access the field. In that case, *PBITSIZE
5196 If the field describes a variable-sized object, *PMODE is set to
5197 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5198 this case, but the address of the object can be found. */
5201 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5202 HOST_WIDE_INT *pbitpos, tree *poffset,
5203 enum machine_mode *pmode, int *punsignedp,
5207 enum machine_mode mode = VOIDmode;
5208 tree offset = size_zero_node;
5209 tree bit_offset = bitsize_zero_node;
5212 /* First get the mode, signedness, and size. We do this from just the
5213 outermost expression. */
5214 if (TREE_CODE (exp) == COMPONENT_REF)
5216 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5217 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5218 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5220 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5222 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5224 size_tree = TREE_OPERAND (exp, 1);
5225 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5229 mode = TYPE_MODE (TREE_TYPE (exp));
5230 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5232 if (mode == BLKmode)
5233 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5235 *pbitsize = GET_MODE_BITSIZE (mode);
5240 if (! host_integerp (size_tree, 1))
5241 mode = BLKmode, *pbitsize = -1;
5243 *pbitsize = tree_low_cst (size_tree, 1);
5246 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5247 and find the ultimate containing object. */
5250 if (TREE_CODE (exp) == BIT_FIELD_REF)
5251 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5252 else if (TREE_CODE (exp) == COMPONENT_REF)
5254 tree field = TREE_OPERAND (exp, 1);
5255 tree this_offset = component_ref_field_offset (exp);
5257 /* If this field hasn't been filled in yet, don't go
5258 past it. This should only happen when folding expressions
5259 made during type construction. */
5260 if (this_offset == 0)
5263 offset = size_binop (PLUS_EXPR, offset, this_offset);
5264 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5265 DECL_FIELD_BIT_OFFSET (field));
5267 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5270 else if (TREE_CODE (exp) == ARRAY_REF
5271 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5273 tree index = TREE_OPERAND (exp, 1);
5274 tree low_bound = array_ref_low_bound (exp);
5275 tree unit_size = array_ref_element_size (exp);
5277 /* We assume all arrays have sizes that are a multiple of a byte.
5278 First subtract the lower bound, if any, in the type of the
5279 index, then convert to sizetype and multiply by the size of the
5281 if (! integer_zerop (low_bound))
5282 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5285 offset = size_binop (PLUS_EXPR, offset,
5286 size_binop (MULT_EXPR,
5287 convert (sizetype, index),
5291 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5292 conversions that don't change the mode, and all view conversions
5293 except those that need to "step up" the alignment. */
5294 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5295 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5296 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5297 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5299 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5300 < BIGGEST_ALIGNMENT)
5301 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5302 || TYPE_ALIGN_OK (TREE_TYPE
5303 (TREE_OPERAND (exp, 0))))))
5304 && ! ((TREE_CODE (exp) == NOP_EXPR
5305 || TREE_CODE (exp) == CONVERT_EXPR)
5306 && (TYPE_MODE (TREE_TYPE (exp))
5307 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5310 /* If any reference in the chain is volatile, the effect is volatile. */
5311 if (TREE_THIS_VOLATILE (exp))
5314 exp = TREE_OPERAND (exp, 0);
5317 /* If OFFSET is constant, see if we can return the whole thing as a
5318 constant bit position. Otherwise, split it up. */
5319 if (host_integerp (offset, 0)
5320 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5322 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5323 && host_integerp (tem, 0))
5324 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5326 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5332 /* Return a tree of sizetype representing the size, in bytes, of the element
5333 of EXP, an ARRAY_REF. */
5336 array_ref_element_size (tree exp)
5338 tree aligned_size = TREE_OPERAND (exp, 3);
5339 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5341 /* If a size was specified in the ARRAY_REF, it's the size measured
5342 in alignment units of the element type. So multiply by that value. */
5345 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5346 sizetype from another type of the same width and signedness. */
5347 if (TREE_TYPE (aligned_size) != sizetype)
5348 aligned_size = fold_convert (sizetype, aligned_size);
5349 return size_binop (MULT_EXPR, aligned_size,
5350 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5353 /* Otherwise, take the size from that of the element type. Substitute
5354 any PLACEHOLDER_EXPR that we have. */
5356 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5359 /* Return a tree representing the lower bound of the array mentioned in
5360 EXP, an ARRAY_REF. */
5363 array_ref_low_bound (tree exp)
5365 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5367 /* If a lower bound is specified in EXP, use it. */
5368 if (TREE_OPERAND (exp, 2))
5369 return TREE_OPERAND (exp, 2);
5371 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5372 substituting for a PLACEHOLDER_EXPR as needed. */
5373 if (domain_type && TYPE_MIN_VALUE (domain_type))
5374 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5376 /* Otherwise, return a zero of the appropriate type. */
5377 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5380 /* Return a tree representing the upper bound of the array mentioned in
5381 EXP, an ARRAY_REF. */
5384 array_ref_up_bound (tree exp)
5386 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5388 /* If there is a domain type and it has an upper bound, use it, substituting
5389 for a PLACEHOLDER_EXPR as needed. */
5390 if (domain_type && TYPE_MAX_VALUE (domain_type))
5391 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5393 /* Otherwise fail. */
5397 /* Return a tree representing the offset, in bytes, of the field referenced
5398 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5401 component_ref_field_offset (tree exp)
5403 tree aligned_offset = TREE_OPERAND (exp, 2);
5404 tree field = TREE_OPERAND (exp, 1);
5406 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5407 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5411 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5412 sizetype from another type of the same width and signedness. */
5413 if (TREE_TYPE (aligned_offset) != sizetype)
5414 aligned_offset = fold_convert (sizetype, aligned_offset);
5415 return size_binop (MULT_EXPR, aligned_offset,
5416 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5419 /* Otherwise, take the offset from that of the field. Substitute
5420 any PLACEHOLDER_EXPR that we have. */
5422 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5425 /* Return 1 if T is an expression that get_inner_reference handles. */
5428 handled_component_p (tree t)
5430 switch (TREE_CODE (t))
5435 case ARRAY_RANGE_REF:
5436 case NON_LVALUE_EXPR:
5437 case VIEW_CONVERT_EXPR:
5440 /* ??? Sure they are handled, but get_inner_reference may return
5441 a different PBITSIZE, depending upon whether the expression is
5442 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5445 return (TYPE_MODE (TREE_TYPE (t))
5446 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5453 /* Given an rtx VALUE that may contain additions and multiplications, return
5454 an equivalent value that just refers to a register, memory, or constant.
5455 This is done by generating instructions to perform the arithmetic and
5456 returning a pseudo-register containing the value.
5458 The returned value may be a REG, SUBREG, MEM or constant. */
5461 force_operand (rtx value, rtx target)
5464 /* Use subtarget as the target for operand 0 of a binary operation. */
5465 rtx subtarget = get_subtarget (target);
5466 enum rtx_code code = GET_CODE (value);
5468 /* Check for subreg applied to an expression produced by loop optimizer. */
5470 && !REG_P (SUBREG_REG (value))
5471 && !MEM_P (SUBREG_REG (value)))
5473 value = simplify_gen_subreg (GET_MODE (value),
5474 force_reg (GET_MODE (SUBREG_REG (value)),
5475 force_operand (SUBREG_REG (value),
5477 GET_MODE (SUBREG_REG (value)),
5478 SUBREG_BYTE (value));
5479 code = GET_CODE (value);
5482 /* Check for a PIC address load. */
5483 if ((code == PLUS || code == MINUS)
5484 && XEXP (value, 0) == pic_offset_table_rtx
5485 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5486 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5487 || GET_CODE (XEXP (value, 1)) == CONST))
5490 subtarget = gen_reg_rtx (GET_MODE (value));
5491 emit_move_insn (subtarget, value);
5495 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5498 target = gen_reg_rtx (GET_MODE (value));
5499 convert_move (target, force_operand (XEXP (value, 0), NULL),
5500 code == ZERO_EXTEND);
5504 if (ARITHMETIC_P (value))
5506 op2 = XEXP (value, 1);
5507 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5509 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5512 op2 = negate_rtx (GET_MODE (value), op2);
5515 /* Check for an addition with OP2 a constant integer and our first
5516 operand a PLUS of a virtual register and something else. In that
5517 case, we want to emit the sum of the virtual register and the
5518 constant first and then add the other value. This allows virtual
5519 register instantiation to simply modify the constant rather than
5520 creating another one around this addition. */
5521 if (code == PLUS && GET_CODE (op2) == CONST_INT
5522 && GET_CODE (XEXP (value, 0)) == PLUS
5523 && REG_P (XEXP (XEXP (value, 0), 0))
5524 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5525 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5527 rtx temp = expand_simple_binop (GET_MODE (value), code,
5528 XEXP (XEXP (value, 0), 0), op2,
5529 subtarget, 0, OPTAB_LIB_WIDEN);
5530 return expand_simple_binop (GET_MODE (value), code, temp,
5531 force_operand (XEXP (XEXP (value,
5533 target, 0, OPTAB_LIB_WIDEN);
5536 op1 = force_operand (XEXP (value, 0), subtarget);
5537 op2 = force_operand (op2, NULL_RTX);
5541 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5543 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5544 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5545 target, 1, OPTAB_LIB_WIDEN);
5547 return expand_divmod (0,
5548 FLOAT_MODE_P (GET_MODE (value))
5549 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5550 GET_MODE (value), op1, op2, target, 0);
5553 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5557 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5561 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5565 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5566 target, 0, OPTAB_LIB_WIDEN);
5569 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5570 target, 1, OPTAB_LIB_WIDEN);
5573 if (UNARY_P (value))
5575 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5576 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5579 #ifdef INSN_SCHEDULING
5580 /* On machines that have insn scheduling, we want all memory reference to be
5581 explicit, so we need to deal with such paradoxical SUBREGs. */
5582 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5583 && (GET_MODE_SIZE (GET_MODE (value))
5584 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5586 = simplify_gen_subreg (GET_MODE (value),
5587 force_reg (GET_MODE (SUBREG_REG (value)),
5588 force_operand (SUBREG_REG (value),
5590 GET_MODE (SUBREG_REG (value)),
5591 SUBREG_BYTE (value));
5597 /* Subroutine of expand_expr: return nonzero iff there is no way that
5598 EXP can reference X, which is being modified. TOP_P is nonzero if this
5599 call is going to be used to determine whether we need a temporary
5600 for EXP, as opposed to a recursive call to this function.
5602 It is always safe for this routine to return zero since it merely
5603 searches for optimization opportunities. */
5606 safe_from_p (rtx x, tree exp, int top_p)
5612 /* If EXP has varying size, we MUST use a target since we currently
5613 have no way of allocating temporaries of variable size
5614 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5615 So we assume here that something at a higher level has prevented a
5616 clash. This is somewhat bogus, but the best we can do. Only
5617 do this when X is BLKmode and when we are at the top level. */
5618 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5619 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5620 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5621 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5622 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5624 && GET_MODE (x) == BLKmode)
5625 /* If X is in the outgoing argument area, it is always safe. */
5627 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5628 || (GET_CODE (XEXP (x, 0)) == PLUS
5629 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5632 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5633 find the underlying pseudo. */
5634 if (GET_CODE (x) == SUBREG)
5637 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5641 /* Now look at our tree code and possibly recurse. */
5642 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5644 case tcc_declaration:
5645 exp_rtl = DECL_RTL_IF_SET (exp);
5651 case tcc_exceptional:
5652 if (TREE_CODE (exp) == TREE_LIST)
5656 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5658 exp = TREE_CHAIN (exp);
5661 if (TREE_CODE (exp) != TREE_LIST)
5662 return safe_from_p (x, exp, 0);
5665 else if (TREE_CODE (exp) == ERROR_MARK)
5666 return 1; /* An already-visited SAVE_EXPR? */
5671 /* The only case we look at here is the DECL_INITIAL inside a
5673 return (TREE_CODE (exp) != DECL_EXPR
5674 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5675 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5676 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5679 case tcc_comparison:
5680 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5685 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5687 case tcc_expression:
5689 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5690 the expression. If it is set, we conflict iff we are that rtx or
5691 both are in memory. Otherwise, we check all operands of the
5692 expression recursively. */
5694 switch (TREE_CODE (exp))
5697 /* If the operand is static or we are static, we can't conflict.
5698 Likewise if we don't conflict with the operand at all. */
5699 if (staticp (TREE_OPERAND (exp, 0))
5700 || TREE_STATIC (exp)
5701 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5704 /* Otherwise, the only way this can conflict is if we are taking
5705 the address of a DECL a that address if part of X, which is
5707 exp = TREE_OPERAND (exp, 0);
5710 if (!DECL_RTL_SET_P (exp)
5711 || !MEM_P (DECL_RTL (exp)))
5714 exp_rtl = XEXP (DECL_RTL (exp), 0);
5718 case MISALIGNED_INDIRECT_REF:
5719 case ALIGN_INDIRECT_REF:
5722 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5723 get_alias_set (exp)))
5728 /* Assume that the call will clobber all hard registers and
5730 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5735 case WITH_CLEANUP_EXPR:
5736 case CLEANUP_POINT_EXPR:
5737 /* Lowered by gimplify.c. */
5741 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5747 /* If we have an rtx, we do not need to scan our operands. */
5751 nops = first_rtl_op (TREE_CODE (exp));
5752 for (i = 0; i < nops; i++)
5753 if (TREE_OPERAND (exp, i) != 0
5754 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5757 /* If this is a language-specific tree code, it may require
5758 special handling. */
5759 if ((unsigned int) TREE_CODE (exp)
5760 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5761 && !lang_hooks.safe_from_p (x, exp))
5766 /* Should never get a type here. */
5770 /* If we have an rtl, find any enclosed object. Then see if we conflict
5774 if (GET_CODE (exp_rtl) == SUBREG)
5776 exp_rtl = SUBREG_REG (exp_rtl);
5778 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5782 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5783 are memory and they conflict. */
5784 return ! (rtx_equal_p (x, exp_rtl)
5785 || (MEM_P (x) && MEM_P (exp_rtl)
5786 && true_dependence (exp_rtl, VOIDmode, x,
5787 rtx_addr_varies_p)));
5790 /* If we reach here, it is safe. */
5795 /* Return the highest power of two that EXP is known to be a multiple of.
5796 This is used in updating alignment of MEMs in array references. */
5798 static unsigned HOST_WIDE_INT
5799 highest_pow2_factor (tree exp)
5801 unsigned HOST_WIDE_INT c0, c1;
5803 switch (TREE_CODE (exp))
5806 /* We can find the lowest bit that's a one. If the low
5807 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5808 We need to handle this case since we can find it in a COND_EXPR,
5809 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5810 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5812 if (TREE_CONSTANT_OVERFLOW (exp))
5813 return BIGGEST_ALIGNMENT;
5816 /* Note: tree_low_cst is intentionally not used here,
5817 we don't care about the upper bits. */
5818 c0 = TREE_INT_CST_LOW (exp);
5820 return c0 ? c0 : BIGGEST_ALIGNMENT;
5824 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5825 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5826 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5827 return MIN (c0, c1);
5830 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5831 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5834 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5836 if (integer_pow2p (TREE_OPERAND (exp, 1))
5837 && host_integerp (TREE_OPERAND (exp, 1), 1))
5839 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5840 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5841 return MAX (1, c0 / c1);
5845 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5847 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5850 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5853 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5854 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5855 return MIN (c0, c1);
5864 /* Similar, except that the alignment requirements of TARGET are
5865 taken into account. Assume it is at least as aligned as its
5866 type, unless it is a COMPONENT_REF in which case the layout of
5867 the structure gives the alignment. */
5869 static unsigned HOST_WIDE_INT
5870 highest_pow2_factor_for_target (tree target, tree exp)
5872 unsigned HOST_WIDE_INT target_align, factor;
5874 factor = highest_pow2_factor (exp);
5875 if (TREE_CODE (target) == COMPONENT_REF)
5876 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5878 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5879 return MAX (factor, target_align);
5882 /* Expands variable VAR. */
5885 expand_var (tree var)
5887 if (DECL_EXTERNAL (var))
5890 if (TREE_STATIC (var))
5891 /* If this is an inlined copy of a static local variable,
5892 look up the original decl. */
5893 var = DECL_ORIGIN (var);
5895 if (TREE_STATIC (var)
5896 ? !TREE_ASM_WRITTEN (var)
5897 : !DECL_RTL_SET_P (var))
5899 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5900 /* Should be ignored. */;
5901 else if (lang_hooks.expand_decl (var))
5903 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
5905 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
5906 rest_of_decl_compilation (var, 0, 0);
5908 /* No expansion needed. */
5909 gcc_assert (TREE_CODE (var) == TYPE_DECL
5910 || TREE_CODE (var) == CONST_DECL
5911 || TREE_CODE (var) == FUNCTION_DECL
5912 || TREE_CODE (var) == LABEL_DECL);
5916 /* Subroutine of expand_expr. Expand the two operands of a binary
5917 expression EXP0 and EXP1 placing the results in OP0 and OP1.
5918 The value may be stored in TARGET if TARGET is nonzero. The
5919 MODIFIER argument is as documented by expand_expr. */
5922 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
5923 enum expand_modifier modifier)
5925 if (! safe_from_p (target, exp1, 1))
5927 if (operand_equal_p (exp0, exp1, 0))
5929 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5930 *op1 = copy_rtx (*op0);
5934 /* If we need to preserve evaluation order, copy exp0 into its own
5935 temporary variable so that it can't be clobbered by exp1. */
5936 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
5937 exp0 = save_expr (exp0);
5938 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
5939 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
5944 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
5945 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
5948 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
5949 enum expand_modifier modifier)
5951 rtx result, subtarget;
5953 HOST_WIDE_INT bitsize, bitpos;
5954 int volatilep, unsignedp;
5955 enum machine_mode mode1;
5957 /* If we are taking the address of a constant and are at the top level,
5958 we have to use output_constant_def since we can't call force_const_mem
5960 /* ??? This should be considered a front-end bug. We should not be
5961 generating ADDR_EXPR of something that isn't an LVALUE. The only
5962 exception here is STRING_CST. */
5963 if (TREE_CODE (exp) == CONSTRUCTOR
5964 || CONSTANT_CLASS_P (exp))
5965 return XEXP (output_constant_def (exp, 0), 0);
5967 /* Everything must be something allowed by is_gimple_addressable. */
5968 switch (TREE_CODE (exp))
5971 /* This case will happen via recursion for &a->b. */
5972 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
5975 /* Recurse and make the output_constant_def clause above handle this. */
5976 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
5980 /* The real part of the complex number is always first, therefore
5981 the address is the same as the address of the parent object. */
5984 inner = TREE_OPERAND (exp, 0);
5988 /* The imaginary part of the complex number is always second.
5989 The expression is therefore always offset by the size of the
5992 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
5993 inner = TREE_OPERAND (exp, 0);
5997 /* If the object is a DECL, then expand it for its rtl. Don't bypass
5998 expand_expr, as that can have various side effects; LABEL_DECLs for
5999 example, may not have their DECL_RTL set yet. Assume language
6000 specific tree nodes can be expanded in some interesting way. */
6002 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6004 result = expand_expr (exp, target, tmode,
6005 modifier == EXPAND_INITIALIZER
6006 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6008 /* If the DECL isn't in memory, then the DECL wasn't properly
6009 marked TREE_ADDRESSABLE, which will be either a front-end
6010 or a tree optimizer bug. */
6011 gcc_assert (GET_CODE (result) == MEM);
6012 result = XEXP (result, 0);
6014 /* ??? Is this needed anymore? */
6015 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6017 assemble_external (exp);
6018 TREE_USED (exp) = 1;
6021 if (modifier != EXPAND_INITIALIZER
6022 && modifier != EXPAND_CONST_ADDRESS)
6023 result = force_operand (result, target);
6027 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6028 &mode1, &unsignedp, &volatilep);
6032 /* We must have made progress. */
6033 gcc_assert (inner != exp);
6035 subtarget = offset || bitpos ? NULL_RTX : target;
6036 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6042 if (modifier != EXPAND_NORMAL)
6043 result = force_operand (result, NULL);
6044 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6046 result = convert_memory_address (tmode, result);
6047 tmp = convert_memory_address (tmode, tmp);
6049 if (modifier == EXPAND_SUM)
6050 result = gen_rtx_PLUS (tmode, result, tmp);
6053 subtarget = bitpos ? NULL_RTX : target;
6054 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6055 1, OPTAB_LIB_WIDEN);
6061 /* Someone beforehand should have rejected taking the address
6062 of such an object. */
6063 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6065 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6066 if (modifier < EXPAND_SUM)
6067 result = force_operand (result, target);
6073 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6074 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6077 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6078 enum expand_modifier modifier)
6080 enum machine_mode rmode;
6083 /* Target mode of VOIDmode says "whatever's natural". */
6084 if (tmode == VOIDmode)
6085 tmode = TYPE_MODE (TREE_TYPE (exp));
6087 /* We can get called with some Weird Things if the user does silliness
6088 like "(short) &a". In that case, convert_memory_address won't do
6089 the right thing, so ignore the given target mode. */
6090 if (tmode != Pmode && tmode != ptr_mode)
6093 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6096 /* Despite expand_expr claims concerning ignoring TMODE when not
6097 strictly convenient, stuff breaks if we don't honor it. Note
6098 that combined with the above, we only do this for pointer modes. */
6099 rmode = GET_MODE (result);
6100 if (rmode == VOIDmode)
6103 result = convert_memory_address (tmode, result);
6109 /* expand_expr: generate code for computing expression EXP.
6110 An rtx for the computed value is returned. The value is never null.
6111 In the case of a void EXP, const0_rtx is returned.
6113 The value may be stored in TARGET if TARGET is nonzero.
6114 TARGET is just a suggestion; callers must assume that
6115 the rtx returned may not be the same as TARGET.
6117 If TARGET is CONST0_RTX, it means that the value will be ignored.
6119 If TMODE is not VOIDmode, it suggests generating the
6120 result in mode TMODE. But this is done only when convenient.
6121 Otherwise, TMODE is ignored and the value generated in its natural mode.
6122 TMODE is just a suggestion; callers must assume that
6123 the rtx returned may not have mode TMODE.
6125 Note that TARGET may have neither TMODE nor MODE. In that case, it
6126 probably will not be used.
6128 If MODIFIER is EXPAND_SUM then when EXP is an addition
6129 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6130 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6131 products as above, or REG or MEM, or constant.
6132 Ordinarily in such cases we would output mul or add instructions
6133 and then return a pseudo reg containing the sum.
6135 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6136 it also marks a label as absolutely required (it can't be dead).
6137 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6138 This is used for outputting expressions used in initializers.
6140 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6141 with a constant address even if that address is not normally legitimate.
6142 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6144 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6145 a call parameter. Such targets require special care as we haven't yet
6146 marked TARGET so that it's safe from being trashed by libcalls. We
6147 don't want to use TARGET for anything but the final result;
6148 Intermediate values must go elsewhere. Additionally, calls to
6149 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6151 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6152 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6153 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6154 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6157 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6158 enum expand_modifier, rtx *);
6161 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6162 enum expand_modifier modifier, rtx *alt_rtl)
6165 rtx ret, last = NULL;
6167 /* Handle ERROR_MARK before anybody tries to access its type. */
6168 if (TREE_CODE (exp) == ERROR_MARK
6169 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6171 ret = CONST0_RTX (tmode);
6172 return ret ? ret : const0_rtx;
6175 if (flag_non_call_exceptions)
6177 rn = lookup_stmt_eh_region (exp);
6178 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6180 last = get_last_insn ();
6183 /* If this is an expression of some kind and it has an associated line
6184 number, then emit the line number before expanding the expression.
6186 We need to save and restore the file and line information so that
6187 errors discovered during expansion are emitted with the right
6188 information. It would be better of the diagnostic routines
6189 used the file/line information embedded in the tree nodes rather
6191 if (cfun && EXPR_HAS_LOCATION (exp))
6193 location_t saved_location = input_location;
6194 input_location = EXPR_LOCATION (exp);
6195 emit_line_note (input_location);
6197 /* Record where the insns produced belong. */
6198 record_block_change (TREE_BLOCK (exp));
6200 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6202 input_location = saved_location;
6206 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6209 /* If using non-call exceptions, mark all insns that may trap.
6210 expand_call() will mark CALL_INSNs before we get to this code,
6211 but it doesn't handle libcalls, and these may trap. */
6215 for (insn = next_real_insn (last); insn;
6216 insn = next_real_insn (insn))
6218 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6219 /* If we want exceptions for non-call insns, any
6220 may_trap_p instruction may throw. */
6221 && GET_CODE (PATTERN (insn)) != CLOBBER
6222 && GET_CODE (PATTERN (insn)) != USE
6223 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6225 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6235 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6236 enum expand_modifier modifier, rtx *alt_rtl)
6239 tree type = TREE_TYPE (exp);
6241 enum machine_mode mode;
6242 enum tree_code code = TREE_CODE (exp);
6244 rtx subtarget, original_target;
6247 bool reduce_bit_field = false;
6248 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6249 ? reduce_to_bit_field_precision ((expr), \
6254 mode = TYPE_MODE (type);
6255 unsignedp = TYPE_UNSIGNED (type);
6256 if (lang_hooks.reduce_bit_field_operations
6257 && TREE_CODE (type) == INTEGER_TYPE
6258 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6260 /* An operation in what may be a bit-field type needs the
6261 result to be reduced to the precision of the bit-field type,
6262 which is narrower than that of the type's mode. */
6263 reduce_bit_field = true;
6264 if (modifier == EXPAND_STACK_PARM)
6268 /* Use subtarget as the target for operand 0 of a binary operation. */
6269 subtarget = get_subtarget (target);
6270 original_target = target;
6271 ignore = (target == const0_rtx
6272 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6273 || code == CONVERT_EXPR || code == COND_EXPR
6274 || code == VIEW_CONVERT_EXPR)
6275 && TREE_CODE (type) == VOID_TYPE));
6277 /* If we are going to ignore this result, we need only do something
6278 if there is a side-effect somewhere in the expression. If there
6279 is, short-circuit the most common cases here. Note that we must
6280 not call expand_expr with anything but const0_rtx in case this
6281 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6285 if (! TREE_SIDE_EFFECTS (exp))
6288 /* Ensure we reference a volatile object even if value is ignored, but
6289 don't do this if all we are doing is taking its address. */
6290 if (TREE_THIS_VOLATILE (exp)
6291 && TREE_CODE (exp) != FUNCTION_DECL
6292 && mode != VOIDmode && mode != BLKmode
6293 && modifier != EXPAND_CONST_ADDRESS)
6295 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6297 temp = copy_to_reg (temp);
6301 if (TREE_CODE_CLASS (code) == tcc_unary
6302 || code == COMPONENT_REF || code == INDIRECT_REF)
6303 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6306 else if (TREE_CODE_CLASS (code) == tcc_binary
6307 || TREE_CODE_CLASS (code) == tcc_comparison
6308 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6310 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6311 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6314 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6315 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6316 /* If the second operand has no side effects, just evaluate
6318 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6320 else if (code == BIT_FIELD_REF)
6322 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6323 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6324 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6331 /* If will do cse, generate all results into pseudo registers
6332 since 1) that allows cse to find more things
6333 and 2) otherwise cse could produce an insn the machine
6334 cannot support. An exception is a CONSTRUCTOR into a multi-word
6335 MEM: that's much more likely to be most efficient into the MEM.
6336 Another is a CALL_EXPR which must return in memory. */
6338 if (! cse_not_expected && mode != BLKmode && target
6339 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6340 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6341 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6348 tree function = decl_function_context (exp);
6350 temp = label_rtx (exp);
6351 temp = gen_rtx_LABEL_REF (Pmode, temp);
6353 if (function != current_function_decl
6355 LABEL_REF_NONLOCAL_P (temp) = 1;
6357 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6362 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6367 /* If a static var's type was incomplete when the decl was written,
6368 but the type is complete now, lay out the decl now. */
6369 if (DECL_SIZE (exp) == 0
6370 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6371 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6372 layout_decl (exp, 0);
6374 /* ... fall through ... */
6378 gcc_assert (DECL_RTL (exp));
6380 /* Ensure variable marked as used even if it doesn't go through
6381 a parser. If it hasn't be used yet, write out an external
6383 if (! TREE_USED (exp))
6385 assemble_external (exp);
6386 TREE_USED (exp) = 1;
6389 /* Show we haven't gotten RTL for this yet. */
6392 /* Variables inherited from containing functions should have
6393 been lowered by this point. */
6394 context = decl_function_context (exp);
6395 gcc_assert (!context
6396 || context == current_function_decl
6397 || TREE_STATIC (exp)
6398 /* ??? C++ creates functions that are not TREE_STATIC. */
6399 || TREE_CODE (exp) == FUNCTION_DECL);
6401 /* This is the case of an array whose size is to be determined
6402 from its initializer, while the initializer is still being parsed.
6405 if (MEM_P (DECL_RTL (exp))
6406 && REG_P (XEXP (DECL_RTL (exp), 0)))
6407 temp = validize_mem (DECL_RTL (exp));
6409 /* If DECL_RTL is memory, we are in the normal case and either
6410 the address is not valid or it is not a register and -fforce-addr
6411 is specified, get the address into a register. */
6413 else if (MEM_P (DECL_RTL (exp))
6414 && modifier != EXPAND_CONST_ADDRESS
6415 && modifier != EXPAND_SUM
6416 && modifier != EXPAND_INITIALIZER
6417 && (! memory_address_p (DECL_MODE (exp),
6418 XEXP (DECL_RTL (exp), 0))
6420 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6423 *alt_rtl = DECL_RTL (exp);
6424 temp = replace_equiv_address (DECL_RTL (exp),
6425 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6428 /* If we got something, return it. But first, set the alignment
6429 if the address is a register. */
6432 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6433 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6438 /* If the mode of DECL_RTL does not match that of the decl, it
6439 must be a promoted value. We return a SUBREG of the wanted mode,
6440 but mark it so that we know that it was already extended. */
6442 if (REG_P (DECL_RTL (exp))
6443 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6445 enum machine_mode pmode;
6447 /* Get the signedness used for this variable. Ensure we get the
6448 same mode we got when the variable was declared. */
6449 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6450 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6451 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6453 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6454 SUBREG_PROMOTED_VAR_P (temp) = 1;
6455 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6459 return DECL_RTL (exp);
6462 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6463 TREE_INT_CST_HIGH (exp), mode);
6465 /* ??? If overflow is set, fold will have done an incomplete job,
6466 which can result in (plus xx (const_int 0)), which can get
6467 simplified by validate_replace_rtx during virtual register
6468 instantiation, which can result in unrecognizable insns.
6469 Avoid this by forcing all overflows into registers. */
6470 if (TREE_CONSTANT_OVERFLOW (exp)
6471 && modifier != EXPAND_INITIALIZER)
6472 temp = force_reg (mode, temp);
6477 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6478 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6479 return const_vector_from_tree (exp);
6481 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6482 TREE_VECTOR_CST_ELTS (exp)),
6483 ignore ? const0_rtx : target, tmode, modifier);
6486 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6489 /* If optimized, generate immediate CONST_DOUBLE
6490 which will be turned into memory by reload if necessary.
6492 We used to force a register so that loop.c could see it. But
6493 this does not allow gen_* patterns to perform optimizations with
6494 the constants. It also produces two insns in cases like "x = 1.0;".
6495 On most machines, floating-point constants are not permitted in
6496 many insns, so we'd end up copying it to a register in any case.
6498 Now, we do the copying in expand_binop, if appropriate. */
6499 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6500 TYPE_MODE (TREE_TYPE (exp)));
6503 /* Handle evaluating a complex constant in a CONCAT target. */
6504 if (original_target && GET_CODE (original_target) == CONCAT)
6506 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6509 rtarg = XEXP (original_target, 0);
6510 itarg = XEXP (original_target, 1);
6512 /* Move the real and imaginary parts separately. */
6513 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6514 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6517 emit_move_insn (rtarg, op0);
6519 emit_move_insn (itarg, op1);
6521 return original_target;
6524 /* ... fall through ... */
6527 temp = output_constant_def (exp, 1);
6529 /* temp contains a constant address.
6530 On RISC machines where a constant address isn't valid,
6531 make some insns to get that address into a register. */
6532 if (modifier != EXPAND_CONST_ADDRESS
6533 && modifier != EXPAND_INITIALIZER
6534 && modifier != EXPAND_SUM
6535 && (! memory_address_p (mode, XEXP (temp, 0))
6536 || flag_force_addr))
6537 return replace_equiv_address (temp,
6538 copy_rtx (XEXP (temp, 0)));
6543 tree val = TREE_OPERAND (exp, 0);
6544 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6546 if (!SAVE_EXPR_RESOLVED_P (exp))
6548 /* We can indeed still hit this case, typically via builtin
6549 expanders calling save_expr immediately before expanding
6550 something. Assume this means that we only have to deal
6551 with non-BLKmode values. */
6552 gcc_assert (GET_MODE (ret) != BLKmode);
6554 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6555 DECL_ARTIFICIAL (val) = 1;
6556 DECL_IGNORED_P (val) = 1;
6557 TREE_OPERAND (exp, 0) = val;
6558 SAVE_EXPR_RESOLVED_P (exp) = 1;
6560 if (!CONSTANT_P (ret))
6561 ret = copy_to_reg (ret);
6562 SET_DECL_RTL (val, ret);
6569 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6570 expand_goto (TREE_OPERAND (exp, 0));
6572 expand_computed_goto (TREE_OPERAND (exp, 0));
6576 /* If we don't need the result, just ensure we evaluate any
6582 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6583 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6588 /* All elts simple constants => refer to a constant in memory. But
6589 if this is a non-BLKmode mode, let it store a field at a time
6590 since that should make a CONST_INT or CONST_DOUBLE when we
6591 fold. Likewise, if we have a target we can use, it is best to
6592 store directly into the target unless the type is large enough
6593 that memcpy will be used. If we are making an initializer and
6594 all operands are constant, put it in memory as well.
6596 FIXME: Avoid trying to fill vector constructors piece-meal.
6597 Output them with output_constant_def below unless we're sure
6598 they're zeros. This should go away when vector initializers
6599 are treated like VECTOR_CST instead of arrays.
6601 else if ((TREE_STATIC (exp)
6602 && ((mode == BLKmode
6603 && ! (target != 0 && safe_from_p (target, exp, 1)))
6604 || TREE_ADDRESSABLE (exp)
6605 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6606 && (! MOVE_BY_PIECES_P
6607 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6609 && ! mostly_zeros_p (exp))))
6610 || ((modifier == EXPAND_INITIALIZER
6611 || modifier == EXPAND_CONST_ADDRESS)
6612 && TREE_CONSTANT (exp)))
6614 rtx constructor = output_constant_def (exp, 1);
6616 if (modifier != EXPAND_CONST_ADDRESS
6617 && modifier != EXPAND_INITIALIZER
6618 && modifier != EXPAND_SUM)
6619 constructor = validize_mem (constructor);
6625 /* Handle calls that pass values in multiple non-contiguous
6626 locations. The Irix 6 ABI has examples of this. */
6627 if (target == 0 || ! safe_from_p (target, exp, 1)
6628 || GET_CODE (target) == PARALLEL
6629 || modifier == EXPAND_STACK_PARM)
6631 = assign_temp (build_qualified_type (type,
6633 | (TREE_READONLY (exp)
6634 * TYPE_QUAL_CONST))),
6635 0, TREE_ADDRESSABLE (exp), 1);
6637 store_constructor (exp, target, 0, int_expr_size (exp));
6641 case MISALIGNED_INDIRECT_REF:
6642 case ALIGN_INDIRECT_REF:
6645 tree exp1 = TREE_OPERAND (exp, 0);
6648 if (code == MISALIGNED_INDIRECT_REF
6649 && !targetm.vectorize.misaligned_mem_ok (mode))
6652 if (modifier != EXPAND_WRITE)
6656 t = fold_read_from_constant_string (exp);
6658 return expand_expr (t, target, tmode, modifier);
6661 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6662 op0 = memory_address (mode, op0);
6664 if (code == ALIGN_INDIRECT_REF)
6666 int align = TYPE_ALIGN_UNIT (type);
6667 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6668 op0 = memory_address (mode, op0);
6671 temp = gen_rtx_MEM (mode, op0);
6673 orig = REF_ORIGINAL (exp);
6676 set_mem_attributes (temp, orig, 0);
6684 tree array = TREE_OPERAND (exp, 0);
6685 tree low_bound = array_ref_low_bound (exp);
6686 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6689 gcc_assert (TREE_CODE (TREE_TYPE (array)) == ARRAY_TYPE);
6691 /* Optimize the special-case of a zero lower bound.
6693 We convert the low_bound to sizetype to avoid some problems
6694 with constant folding. (E.g. suppose the lower bound is 1,
6695 and its mode is QI. Without the conversion, (ARRAY
6696 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6697 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6699 if (! integer_zerop (low_bound))
6700 index = size_diffop (index, convert (sizetype, low_bound));
6702 /* Fold an expression like: "foo"[2].
6703 This is not done in fold so it won't happen inside &.
6704 Don't fold if this is for wide characters since it's too
6705 difficult to do correctly and this is a very rare case. */
6707 if (modifier != EXPAND_CONST_ADDRESS
6708 && modifier != EXPAND_INITIALIZER
6709 && modifier != EXPAND_MEMORY)
6711 tree t = fold_read_from_constant_string (exp);
6714 return expand_expr (t, target, tmode, modifier);
6717 /* If this is a constant index into a constant array,
6718 just get the value from the array. Handle both the cases when
6719 we have an explicit constructor and when our operand is a variable
6720 that was declared const. */
6722 if (modifier != EXPAND_CONST_ADDRESS
6723 && modifier != EXPAND_INITIALIZER
6724 && modifier != EXPAND_MEMORY
6725 && TREE_CODE (array) == CONSTRUCTOR
6726 && ! TREE_SIDE_EFFECTS (array)
6727 && TREE_CODE (index) == INTEGER_CST
6728 && 0 > compare_tree_int (index,
6729 list_length (CONSTRUCTOR_ELTS
6730 (TREE_OPERAND (exp, 0)))))
6734 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6735 i = TREE_INT_CST_LOW (index);
6736 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6740 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6744 else if (optimize >= 1
6745 && modifier != EXPAND_CONST_ADDRESS
6746 && modifier != EXPAND_INITIALIZER
6747 && modifier != EXPAND_MEMORY
6748 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6749 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6750 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6751 && targetm.binds_local_p (array))
6753 if (TREE_CODE (index) == INTEGER_CST)
6755 tree init = DECL_INITIAL (array);
6757 if (TREE_CODE (init) == CONSTRUCTOR)
6761 for (elem = CONSTRUCTOR_ELTS (init);
6763 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6764 elem = TREE_CHAIN (elem))
6767 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6768 return expand_expr (fold (TREE_VALUE (elem)), target,
6771 else if (TREE_CODE (init) == STRING_CST
6772 && 0 > compare_tree_int (index,
6773 TREE_STRING_LENGTH (init)))
6775 tree type = TREE_TYPE (TREE_TYPE (init));
6776 enum machine_mode mode = TYPE_MODE (type);
6778 if (GET_MODE_CLASS (mode) == MODE_INT
6779 && GET_MODE_SIZE (mode) == 1)
6780 return gen_int_mode (TREE_STRING_POINTER (init)
6781 [TREE_INT_CST_LOW (index)], mode);
6786 goto normal_inner_ref;
6789 /* If the operand is a CONSTRUCTOR, we can just extract the
6790 appropriate field if it is present. */
6791 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6795 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6796 elt = TREE_CHAIN (elt))
6797 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6798 /* We can normally use the value of the field in the
6799 CONSTRUCTOR. However, if this is a bitfield in
6800 an integral mode that we can fit in a HOST_WIDE_INT,
6801 we must mask only the number of bits in the bitfield,
6802 since this is done implicitly by the constructor. If
6803 the bitfield does not meet either of those conditions,
6804 we can't do this optimization. */
6805 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6806 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6808 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6809 <= HOST_BITS_PER_WIDE_INT))))
6811 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6812 && modifier == EXPAND_STACK_PARM)
6814 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6815 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6817 HOST_WIDE_INT bitsize
6818 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6819 enum machine_mode imode
6820 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6822 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6824 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6825 op0 = expand_and (imode, op0, op1, target);
6830 = build_int_cst (NULL_TREE,
6831 GET_MODE_BITSIZE (imode) - bitsize);
6833 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6835 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6843 goto normal_inner_ref;
6846 case ARRAY_RANGE_REF:
6849 enum machine_mode mode1;
6850 HOST_WIDE_INT bitsize, bitpos;
6853 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6854 &mode1, &unsignedp, &volatilep);
6857 /* If we got back the original object, something is wrong. Perhaps
6858 we are evaluating an expression too early. In any event, don't
6859 infinitely recurse. */
6860 gcc_assert (tem != exp);
6862 /* If TEM's type is a union of variable size, pass TARGET to the inner
6863 computation, since it will need a temporary and TARGET is known
6864 to have to do. This occurs in unchecked conversion in Ada. */
6868 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6869 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6871 && modifier != EXPAND_STACK_PARM
6872 ? target : NULL_RTX),
6874 (modifier == EXPAND_INITIALIZER
6875 || modifier == EXPAND_CONST_ADDRESS
6876 || modifier == EXPAND_STACK_PARM)
6877 ? modifier : EXPAND_NORMAL);
6879 /* If this is a constant, put it into a register if it is a
6880 legitimate constant and OFFSET is 0 and memory if it isn't. */
6881 if (CONSTANT_P (op0))
6883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6884 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6886 op0 = force_reg (mode, op0);
6888 op0 = validize_mem (force_const_mem (mode, op0));
6891 /* Otherwise, if this object not in memory and we either have an
6892 offset or a BLKmode result, put it there. This case can't occur in
6893 C, but can in Ada if we have unchecked conversion of an expression
6894 from a scalar type to an array or record type or for an
6895 ARRAY_RANGE_REF whose type is BLKmode. */
6896 else if (!MEM_P (op0)
6898 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6900 tree nt = build_qualified_type (TREE_TYPE (tem),
6901 (TYPE_QUALS (TREE_TYPE (tem))
6902 | TYPE_QUAL_CONST));
6903 rtx memloc = assign_temp (nt, 1, 1, 1);
6905 emit_move_insn (memloc, op0);
6911 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6914 gcc_assert (MEM_P (op0));
6916 #ifdef POINTERS_EXTEND_UNSIGNED
6917 if (GET_MODE (offset_rtx) != Pmode)
6918 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
6920 if (GET_MODE (offset_rtx) != ptr_mode)
6921 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
6924 if (GET_MODE (op0) == BLKmode
6925 /* A constant address in OP0 can have VOIDmode, we must
6926 not try to call force_reg in that case. */
6927 && GET_MODE (XEXP (op0, 0)) != VOIDmode
6929 && (bitpos % bitsize) == 0
6930 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6931 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
6933 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
6937 op0 = offset_address (op0, offset_rtx,
6938 highest_pow2_factor (offset));
6941 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
6942 record its alignment as BIGGEST_ALIGNMENT. */
6943 if (MEM_P (op0) && bitpos == 0 && offset != 0
6944 && is_aligning_offset (offset, tem))
6945 set_mem_align (op0, BIGGEST_ALIGNMENT);
6947 /* Don't forget about volatility even if this is a bitfield. */
6948 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
6950 if (op0 == orig_op0)
6951 op0 = copy_rtx (op0);
6953 MEM_VOLATILE_P (op0) = 1;
6956 /* The following code doesn't handle CONCAT.
6957 Assume only bitpos == 0 can be used for CONCAT, due to
6958 one element arrays having the same mode as its element. */
6959 if (GET_CODE (op0) == CONCAT)
6961 gcc_assert (bitpos == 0
6962 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
6966 /* In cases where an aligned union has an unaligned object
6967 as a field, we might be extracting a BLKmode value from
6968 an integer-mode (e.g., SImode) object. Handle this case
6969 by doing the extract into an object as wide as the field
6970 (which we know to be the width of a basic mode), then
6971 storing into memory, and changing the mode to BLKmode. */
6972 if (mode1 == VOIDmode
6973 || REG_P (op0) || GET_CODE (op0) == SUBREG
6974 || (mode1 != BLKmode && ! direct_load[(int) mode1]
6975 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6976 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
6977 && modifier != EXPAND_CONST_ADDRESS
6978 && modifier != EXPAND_INITIALIZER)
6979 /* If the field isn't aligned enough to fetch as a memref,
6980 fetch it as a bit field. */
6981 || (mode1 != BLKmode
6982 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
6983 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
6985 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
6986 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
6987 && ((modifier == EXPAND_CONST_ADDRESS
6988 || modifier == EXPAND_INITIALIZER)
6990 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
6991 || (bitpos % BITS_PER_UNIT != 0)))
6992 /* If the type and the field are a constant size and the
6993 size of the type isn't the same size as the bitfield,
6994 we must use bitfield operations. */
6996 && TYPE_SIZE (TREE_TYPE (exp))
6997 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6998 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7001 enum machine_mode ext_mode = mode;
7003 if (ext_mode == BLKmode
7004 && ! (target != 0 && MEM_P (op0)
7006 && bitpos % BITS_PER_UNIT == 0))
7007 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7009 if (ext_mode == BLKmode)
7012 target = assign_temp (type, 0, 1, 1);
7017 /* In this case, BITPOS must start at a byte boundary and
7018 TARGET, if specified, must be a MEM. */
7019 gcc_assert (MEM_P (op0)
7020 && (!target || MEM_P (target))
7021 && !(bitpos % BITS_PER_UNIT));
7023 emit_block_move (target,
7024 adjust_address (op0, VOIDmode,
7025 bitpos / BITS_PER_UNIT),
7026 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7028 (modifier == EXPAND_STACK_PARM
7029 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7034 op0 = validize_mem (op0);
7036 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7037 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7039 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7040 (modifier == EXPAND_STACK_PARM
7041 ? NULL_RTX : target),
7042 ext_mode, ext_mode);
7044 /* If the result is a record type and BITSIZE is narrower than
7045 the mode of OP0, an integral mode, and this is a big endian
7046 machine, we must put the field into the high-order bits. */
7047 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7048 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7049 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7050 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7051 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7055 /* If the result type is BLKmode, store the data into a temporary
7056 of the appropriate type, but with the mode corresponding to the
7057 mode for the data we have (op0's mode). It's tempting to make
7058 this a constant type, since we know it's only being stored once,
7059 but that can cause problems if we are taking the address of this
7060 COMPONENT_REF because the MEM of any reference via that address
7061 will have flags corresponding to the type, which will not
7062 necessarily be constant. */
7063 if (mode == BLKmode)
7066 = assign_stack_temp_for_type
7067 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7069 emit_move_insn (new, op0);
7070 op0 = copy_rtx (new);
7071 PUT_MODE (op0, BLKmode);
7072 set_mem_attributes (op0, exp, 1);
7078 /* If the result is BLKmode, use that to access the object
7080 if (mode == BLKmode)
7083 /* Get a reference to just this component. */
7084 if (modifier == EXPAND_CONST_ADDRESS
7085 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7086 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7088 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7090 if (op0 == orig_op0)
7091 op0 = copy_rtx (op0);
7093 set_mem_attributes (op0, exp, 0);
7094 if (REG_P (XEXP (op0, 0)))
7095 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7097 MEM_VOLATILE_P (op0) |= volatilep;
7098 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7099 || modifier == EXPAND_CONST_ADDRESS
7100 || modifier == EXPAND_INITIALIZER)
7102 else if (target == 0)
7103 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7105 convert_move (target, op0, unsignedp);
7110 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7113 /* Check for a built-in function. */
7114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7115 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7117 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7119 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7120 == BUILT_IN_FRONTEND)
7121 return lang_hooks.expand_expr (exp, original_target,
7125 return expand_builtin (exp, target, subtarget, tmode, ignore);
7128 return expand_call (exp, target, ignore);
7130 case NON_LVALUE_EXPR:
7133 if (TREE_OPERAND (exp, 0) == error_mark_node)
7136 if (TREE_CODE (type) == UNION_TYPE)
7138 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7140 /* If both input and output are BLKmode, this conversion isn't doing
7141 anything except possibly changing memory attribute. */
7142 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7144 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7147 result = copy_rtx (result);
7148 set_mem_attributes (result, exp, 0);
7154 if (TYPE_MODE (type) != BLKmode)
7155 target = gen_reg_rtx (TYPE_MODE (type));
7157 target = assign_temp (type, 0, 1, 1);
7161 /* Store data into beginning of memory target. */
7162 store_expr (TREE_OPERAND (exp, 0),
7163 adjust_address (target, TYPE_MODE (valtype), 0),
7164 modifier == EXPAND_STACK_PARM);
7168 gcc_assert (REG_P (target));
7170 /* Store this field into a union of the proper type. */
7171 store_field (target,
7172 MIN ((int_size_in_bytes (TREE_TYPE
7173 (TREE_OPERAND (exp, 0)))
7175 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7176 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7180 /* Return the entire union. */
7184 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7186 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7189 /* If the signedness of the conversion differs and OP0 is
7190 a promoted SUBREG, clear that indication since we now
7191 have to do the proper extension. */
7192 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7193 && GET_CODE (op0) == SUBREG)
7194 SUBREG_PROMOTED_VAR_P (op0) = 0;
7196 return REDUCE_BIT_FIELD (op0);
7199 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7200 op0 = REDUCE_BIT_FIELD (op0);
7201 if (GET_MODE (op0) == mode)
7204 /* If OP0 is a constant, just convert it into the proper mode. */
7205 if (CONSTANT_P (op0))
7207 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7208 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7210 if (modifier == EXPAND_INITIALIZER)
7211 return simplify_gen_subreg (mode, op0, inner_mode,
7212 subreg_lowpart_offset (mode,
7215 return convert_modes (mode, inner_mode, op0,
7216 TYPE_UNSIGNED (inner_type));
7219 if (modifier == EXPAND_INITIALIZER)
7220 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7224 convert_to_mode (mode, op0,
7225 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7227 convert_move (target, op0,
7228 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7231 case VIEW_CONVERT_EXPR:
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7234 /* If the input and output modes are both the same, we are done.
7235 Otherwise, if neither mode is BLKmode and both are integral and within
7236 a word, we can use gen_lowpart. If neither is true, make sure the
7237 operand is in memory and convert the MEM to the new mode. */
7238 if (TYPE_MODE (type) == GET_MODE (op0))
7240 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7241 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7242 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7243 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7244 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7245 op0 = gen_lowpart (TYPE_MODE (type), op0);
7246 else if (!MEM_P (op0))
7248 /* If the operand is not a MEM, force it into memory. Since we
7249 are going to be be changing the mode of the MEM, don't call
7250 force_const_mem for constants because we don't allow pool
7251 constants to change mode. */
7252 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7254 gcc_assert (!TREE_ADDRESSABLE (exp));
7256 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7258 = assign_stack_temp_for_type
7259 (TYPE_MODE (inner_type),
7260 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7262 emit_move_insn (target, op0);
7266 /* At this point, OP0 is in the correct mode. If the output type is such
7267 that the operand is known to be aligned, indicate that it is.
7268 Otherwise, we need only be concerned about alignment for non-BLKmode
7272 op0 = copy_rtx (op0);
7274 if (TYPE_ALIGN_OK (type))
7275 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7276 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7277 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7279 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7280 HOST_WIDE_INT temp_size
7281 = MAX (int_size_in_bytes (inner_type),
7282 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7283 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7284 temp_size, 0, type);
7285 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7287 gcc_assert (!TREE_ADDRESSABLE (exp));
7289 if (GET_MODE (op0) == BLKmode)
7290 emit_block_move (new_with_op0_mode, op0,
7291 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7292 (modifier == EXPAND_STACK_PARM
7293 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7295 emit_move_insn (new_with_op0_mode, op0);
7300 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7306 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7307 something else, make sure we add the register to the constant and
7308 then to the other thing. This case can occur during strength
7309 reduction and doing it this way will produce better code if the
7310 frame pointer or argument pointer is eliminated.
7312 fold-const.c will ensure that the constant is always in the inner
7313 PLUS_EXPR, so the only case we need to do anything about is if
7314 sp, ap, or fp is our second argument, in which case we must swap
7315 the innermost first argument and our second argument. */
7317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7318 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7319 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7320 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7321 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7322 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7324 tree t = TREE_OPERAND (exp, 1);
7326 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7327 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7330 /* If the result is to be ptr_mode and we are adding an integer to
7331 something, we might be forming a constant. So try to use
7332 plus_constant. If it produces a sum and we can't accept it,
7333 use force_operand. This allows P = &ARR[const] to generate
7334 efficient code on machines where a SYMBOL_REF is not a valid
7337 If this is an EXPAND_SUM call, always return the sum. */
7338 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7339 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7341 if (modifier == EXPAND_STACK_PARM)
7343 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7344 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7345 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7349 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7351 /* Use immed_double_const to ensure that the constant is
7352 truncated according to the mode of OP1, then sign extended
7353 to a HOST_WIDE_INT. Using the constant directly can result
7354 in non-canonical RTL in a 64x32 cross compile. */
7356 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7358 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7359 op1 = plus_constant (op1, INTVAL (constant_part));
7360 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7361 op1 = force_operand (op1, target);
7362 return REDUCE_BIT_FIELD (op1);
7365 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7367 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7371 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7372 (modifier == EXPAND_INITIALIZER
7373 ? EXPAND_INITIALIZER : EXPAND_SUM));
7374 if (! CONSTANT_P (op0))
7376 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7377 VOIDmode, modifier);
7378 /* Return a PLUS if modifier says it's OK. */
7379 if (modifier == EXPAND_SUM
7380 || modifier == EXPAND_INITIALIZER)
7381 return simplify_gen_binary (PLUS, mode, op0, op1);
7384 /* Use immed_double_const to ensure that the constant is
7385 truncated according to the mode of OP1, then sign extended
7386 to a HOST_WIDE_INT. Using the constant directly can result
7387 in non-canonical RTL in a 64x32 cross compile. */
7389 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7391 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7392 op0 = plus_constant (op0, INTVAL (constant_part));
7393 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7394 op0 = force_operand (op0, target);
7395 return REDUCE_BIT_FIELD (op0);
7399 /* No sense saving up arithmetic to be done
7400 if it's all in the wrong mode to form part of an address.
7401 And force_operand won't know whether to sign-extend or
7403 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7404 || mode != ptr_mode)
7406 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7407 subtarget, &op0, &op1, 0);
7408 if (op0 == const0_rtx)
7410 if (op1 == const0_rtx)
7415 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7416 subtarget, &op0, &op1, modifier);
7417 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7420 /* For initializers, we are allowed to return a MINUS of two
7421 symbolic constants. Here we handle all cases when both operands
7423 /* Handle difference of two symbolic constants,
7424 for the sake of an initializer. */
7425 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7426 && really_constant_p (TREE_OPERAND (exp, 0))
7427 && really_constant_p (TREE_OPERAND (exp, 1)))
7429 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7430 NULL_RTX, &op0, &op1, modifier);
7432 /* If the last operand is a CONST_INT, use plus_constant of
7433 the negated constant. Else make the MINUS. */
7434 if (GET_CODE (op1) == CONST_INT)
7435 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7437 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7440 /* No sense saving up arithmetic to be done
7441 if it's all in the wrong mode to form part of an address.
7442 And force_operand won't know whether to sign-extend or
7444 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7445 || mode != ptr_mode)
7448 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7449 subtarget, &op0, &op1, modifier);
7451 /* Convert A - const to A + (-const). */
7452 if (GET_CODE (op1) == CONST_INT)
7454 op1 = negate_rtx (mode, op1);
7455 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7461 /* If first operand is constant, swap them.
7462 Thus the following special case checks need only
7463 check the second operand. */
7464 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7466 tree t1 = TREE_OPERAND (exp, 0);
7467 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7468 TREE_OPERAND (exp, 1) = t1;
7471 /* Attempt to return something suitable for generating an
7472 indexed address, for machines that support that. */
7474 if (modifier == EXPAND_SUM && mode == ptr_mode
7475 && host_integerp (TREE_OPERAND (exp, 1), 0))
7477 tree exp1 = TREE_OPERAND (exp, 1);
7479 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7483 op0 = force_operand (op0, NULL_RTX);
7485 op0 = copy_to_mode_reg (mode, op0);
7487 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7488 gen_int_mode (tree_low_cst (exp1, 0),
7489 TYPE_MODE (TREE_TYPE (exp1)))));
7492 if (modifier == EXPAND_STACK_PARM)
7495 /* Check for multiplying things that have been extended
7496 from a narrower type. If this machine supports multiplying
7497 in that narrower type with a result in the desired type,
7498 do it that way, and avoid the explicit type-conversion. */
7499 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7500 && TREE_CODE (type) == INTEGER_TYPE
7501 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7502 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7503 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7504 && int_fits_type_p (TREE_OPERAND (exp, 1),
7505 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7506 /* Don't use a widening multiply if a shift will do. */
7507 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7508 > HOST_BITS_PER_WIDE_INT)
7509 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7511 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7512 && (TYPE_PRECISION (TREE_TYPE
7513 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7514 == TYPE_PRECISION (TREE_TYPE
7516 (TREE_OPERAND (exp, 0), 0))))
7517 /* If both operands are extended, they must either both
7518 be zero-extended or both be sign-extended. */
7519 && (TYPE_UNSIGNED (TREE_TYPE
7520 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7521 == TYPE_UNSIGNED (TREE_TYPE
7523 (TREE_OPERAND (exp, 0), 0)))))))
7525 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7526 enum machine_mode innermode = TYPE_MODE (op0type);
7527 bool zextend_p = TYPE_UNSIGNED (op0type);
7528 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7529 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7531 if (mode == GET_MODE_WIDER_MODE (innermode))
7533 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7535 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7536 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7537 TREE_OPERAND (exp, 1),
7538 NULL_RTX, &op0, &op1, 0);
7540 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7541 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7542 NULL_RTX, &op0, &op1, 0);
7545 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7546 && innermode == word_mode)
7549 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7550 NULL_RTX, VOIDmode, 0);
7551 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7552 op1 = convert_modes (innermode, mode,
7553 expand_expr (TREE_OPERAND (exp, 1),
7554 NULL_RTX, VOIDmode, 0),
7557 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7558 NULL_RTX, VOIDmode, 0);
7559 temp = expand_binop (mode, other_optab, op0, op1, target,
7560 unsignedp, OPTAB_LIB_WIDEN);
7561 hipart = gen_highpart (innermode, temp);
7562 htem = expand_mult_highpart_adjust (innermode, hipart,
7566 emit_move_insn (hipart, htem);
7567 return REDUCE_BIT_FIELD (temp);
7571 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7572 subtarget, &op0, &op1, 0);
7573 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7575 case TRUNC_DIV_EXPR:
7576 case FLOOR_DIV_EXPR:
7578 case ROUND_DIV_EXPR:
7579 case EXACT_DIV_EXPR:
7580 if (modifier == EXPAND_STACK_PARM)
7582 /* Possible optimization: compute the dividend with EXPAND_SUM
7583 then if the divisor is constant can optimize the case
7584 where some terms of the dividend have coeffs divisible by it. */
7585 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7586 subtarget, &op0, &op1, 0);
7587 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7590 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7591 expensive divide. If not, combine will rebuild the original
7593 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7594 && TREE_CODE (type) == REAL_TYPE
7595 && !real_onep (TREE_OPERAND (exp, 0)))
7596 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7597 build2 (RDIV_EXPR, type,
7598 build_real (type, dconst1),
7599 TREE_OPERAND (exp, 1))),
7600 target, tmode, modifier);
7604 case TRUNC_MOD_EXPR:
7605 case FLOOR_MOD_EXPR:
7607 case ROUND_MOD_EXPR:
7608 if (modifier == EXPAND_STACK_PARM)
7610 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7611 subtarget, &op0, &op1, 0);
7612 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7614 case FIX_ROUND_EXPR:
7615 case FIX_FLOOR_EXPR:
7617 gcc_unreachable (); /* Not used for C. */
7619 case FIX_TRUNC_EXPR:
7620 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7621 if (target == 0 || modifier == EXPAND_STACK_PARM)
7622 target = gen_reg_rtx (mode);
7623 expand_fix (target, op0, unsignedp);
7627 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7628 if (target == 0 || modifier == EXPAND_STACK_PARM)
7629 target = gen_reg_rtx (mode);
7630 /* expand_float can't figure out what to do if FROM has VOIDmode.
7631 So give it the correct mode. With -O, cse will optimize this. */
7632 if (GET_MODE (op0) == VOIDmode)
7633 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7635 expand_float (target, op0,
7636 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7640 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7641 if (modifier == EXPAND_STACK_PARM)
7643 temp = expand_unop (mode,
7644 optab_for_tree_code (NEGATE_EXPR, type),
7647 return REDUCE_BIT_FIELD (temp);
7650 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7651 if (modifier == EXPAND_STACK_PARM)
7654 /* ABS_EXPR is not valid for complex arguments. */
7655 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7656 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7658 /* Unsigned abs is simply the operand. Testing here means we don't
7659 risk generating incorrect code below. */
7660 if (TYPE_UNSIGNED (type))
7663 return expand_abs (mode, op0, target, unsignedp,
7664 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7668 target = original_target;
7670 || modifier == EXPAND_STACK_PARM
7671 || (MEM_P (target) && MEM_VOLATILE_P (target))
7672 || GET_MODE (target) != mode
7674 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7675 target = gen_reg_rtx (mode);
7676 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7677 target, &op0, &op1, 0);
7679 /* First try to do it with a special MIN or MAX instruction.
7680 If that does not win, use a conditional jump to select the proper
7682 this_optab = optab_for_tree_code (code, type);
7683 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7688 /* At this point, a MEM target is no longer useful; we will get better
7692 target = gen_reg_rtx (mode);
7694 /* If op1 was placed in target, swap op0 and op1. */
7695 if (target != op0 && target == op1)
7703 emit_move_insn (target, op0);
7705 op0 = gen_label_rtx ();
7707 /* If this mode is an integer too wide to compare properly,
7708 compare word by word. Rely on cse to optimize constant cases. */
7709 if (GET_MODE_CLASS (mode) == MODE_INT
7710 && ! can_compare_p (GE, mode, ccp_jump))
7712 if (code == MAX_EXPR)
7713 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7716 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7721 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7722 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7724 emit_move_insn (target, op1);
7729 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7730 if (modifier == EXPAND_STACK_PARM)
7732 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7736 /* ??? Can optimize bitwise operations with one arg constant.
7737 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7738 and (a bitwise1 b) bitwise2 b (etc)
7739 but that is probably not worth while. */
7741 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7742 boolean values when we want in all cases to compute both of them. In
7743 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7744 as actual zero-or-1 values and then bitwise anding. In cases where
7745 there cannot be any side effects, better code would be made by
7746 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7747 how to recognize those cases. */
7749 case TRUTH_AND_EXPR:
7750 code = BIT_AND_EXPR;
7755 code = BIT_IOR_EXPR;
7759 case TRUTH_XOR_EXPR:
7760 code = BIT_XOR_EXPR;
7768 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7770 if (modifier == EXPAND_STACK_PARM)
7772 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7773 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7776 /* Could determine the answer when only additive constants differ. Also,
7777 the addition of one can be handled by changing the condition. */
7784 case UNORDERED_EXPR:
7792 temp = do_store_flag (exp,
7793 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7794 tmode != VOIDmode ? tmode : mode, 0);
7798 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7799 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7801 && REG_P (original_target)
7802 && (GET_MODE (original_target)
7803 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7805 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7808 /* If temp is constant, we can just compute the result. */
7809 if (GET_CODE (temp) == CONST_INT)
7811 if (INTVAL (temp) != 0)
7812 emit_move_insn (target, const1_rtx);
7814 emit_move_insn (target, const0_rtx);
7819 if (temp != original_target)
7821 enum machine_mode mode1 = GET_MODE (temp);
7822 if (mode1 == VOIDmode)
7823 mode1 = tmode != VOIDmode ? tmode : mode;
7825 temp = copy_to_mode_reg (mode1, temp);
7828 op1 = gen_label_rtx ();
7829 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7830 GET_MODE (temp), unsignedp, op1);
7831 emit_move_insn (temp, const1_rtx);
7836 /* If no set-flag instruction, must generate a conditional store
7837 into a temporary variable. Drop through and handle this
7842 || modifier == EXPAND_STACK_PARM
7843 || ! safe_from_p (target, exp, 1)
7844 /* Make sure we don't have a hard reg (such as function's return
7845 value) live across basic blocks, if not optimizing. */
7846 || (!optimize && REG_P (target)
7847 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7848 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7851 emit_move_insn (target, const0_rtx);
7853 op1 = gen_label_rtx ();
7854 jumpifnot (exp, op1);
7857 emit_move_insn (target, const1_rtx);
7860 return ignore ? const0_rtx : target;
7862 case TRUTH_NOT_EXPR:
7863 if (modifier == EXPAND_STACK_PARM)
7865 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7866 /* The parser is careful to generate TRUTH_NOT_EXPR
7867 only with operands that are always zero or one. */
7868 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7869 target, 1, OPTAB_LIB_WIDEN);
7873 case STATEMENT_LIST:
7875 tree_stmt_iterator iter;
7877 gcc_assert (ignore);
7879 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7880 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7885 /* If it's void, we don't need to worry about computing a value. */
7886 if (VOID_TYPE_P (TREE_TYPE (exp)))
7888 tree pred = TREE_OPERAND (exp, 0);
7889 tree then_ = TREE_OPERAND (exp, 1);
7890 tree else_ = TREE_OPERAND (exp, 2);
7892 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7893 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7894 && TREE_CODE (else_) == GOTO_EXPR
7895 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7897 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7898 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7901 /* Note that COND_EXPRs whose type is a structure or union
7902 are required to be constructed to contain assignments of
7903 a temporary variable, so that we can evaluate them here
7904 for side effect only. If type is void, we must do likewise. */
7906 gcc_assert (!TREE_ADDRESSABLE (type)
7908 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7909 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7911 /* If we are not to produce a result, we have no target. Otherwise,
7912 if a target was specified use it; it will not be used as an
7913 intermediate target unless it is safe. If no target, use a
7916 if (modifier != EXPAND_STACK_PARM
7918 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
7919 && GET_MODE (original_target) == mode
7920 #ifdef HAVE_conditional_move
7921 && (! can_conditionally_move_p (mode)
7922 || REG_P (original_target))
7924 && !MEM_P (original_target))
7925 temp = original_target;
7927 temp = assign_temp (type, 0, 0, 1);
7929 do_pending_stack_adjust ();
7931 op0 = gen_label_rtx ();
7932 op1 = gen_label_rtx ();
7933 jumpifnot (TREE_OPERAND (exp, 0), op0);
7934 store_expr (TREE_OPERAND (exp, 1), temp,
7935 modifier == EXPAND_STACK_PARM);
7937 emit_jump_insn (gen_jump (op1));
7940 store_expr (TREE_OPERAND (exp, 2), temp,
7941 modifier == EXPAND_STACK_PARM);
7948 target = expand_vec_cond_expr (exp, target);
7953 tree lhs = TREE_OPERAND (exp, 0);
7954 tree rhs = TREE_OPERAND (exp, 1);
7956 gcc_assert (ignore);
7958 /* Check for |= or &= of a bitfield of size one into another bitfield
7959 of size 1. In this case, (unless we need the result of the
7960 assignment) we can do this more efficiently with a
7961 test followed by an assignment, if necessary.
7963 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7964 things change so we do, this code should be enhanced to
7966 if (TREE_CODE (lhs) == COMPONENT_REF
7967 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7968 || TREE_CODE (rhs) == BIT_AND_EXPR)
7969 && TREE_OPERAND (rhs, 0) == lhs
7970 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7971 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
7972 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
7974 rtx label = gen_label_rtx ();
7976 do_jump (TREE_OPERAND (rhs, 1),
7977 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7978 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7979 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7980 (TREE_CODE (rhs) == BIT_IOR_EXPR
7982 : integer_zero_node)));
7983 do_pending_stack_adjust ();
7988 expand_assignment (lhs, rhs);
7994 if (!TREE_OPERAND (exp, 0))
7995 expand_null_return ();
7997 expand_return (TREE_OPERAND (exp, 0));
8001 return expand_expr_addr_expr (exp, target, tmode, modifier);
8003 /* COMPLEX type for Extended Pascal & Fortran */
8006 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8009 /* Get the rtx code of the operands. */
8010 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8011 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8014 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8018 /* Move the real (op0) and imaginary (op1) parts to their location. */
8019 emit_move_insn (gen_realpart (mode, target), op0);
8020 emit_move_insn (gen_imagpart (mode, target), op1);
8022 insns = get_insns ();
8025 /* Complex construction should appear as a single unit. */
8026 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8027 each with a separate pseudo as destination.
8028 It's not correct for flow to treat them as a unit. */
8029 if (GET_CODE (target) != CONCAT)
8030 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8038 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8039 return gen_realpart (mode, op0);
8042 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8043 return gen_imagpart (mode, op0);
8046 expand_resx_expr (exp);
8049 case TRY_CATCH_EXPR:
8051 case EH_FILTER_EXPR:
8052 case TRY_FINALLY_EXPR:
8053 /* Lowered by tree-eh.c. */
8056 case WITH_CLEANUP_EXPR:
8057 case CLEANUP_POINT_EXPR:
8059 case CASE_LABEL_EXPR:
8065 case PREINCREMENT_EXPR:
8066 case PREDECREMENT_EXPR:
8067 case POSTINCREMENT_EXPR:
8068 case POSTDECREMENT_EXPR:
8071 case LABELED_BLOCK_EXPR:
8072 case EXIT_BLOCK_EXPR:
8073 case TRUTH_ANDIF_EXPR:
8074 case TRUTH_ORIF_EXPR:
8075 /* Lowered by gimplify.c. */
8079 return get_exception_pointer (cfun);
8082 return get_exception_filter (cfun);
8085 /* Function descriptors are not valid except for as
8086 initialization constants, and should not be expanded. */
8094 expand_label (TREE_OPERAND (exp, 0));
8098 expand_asm_expr (exp);
8101 case WITH_SIZE_EXPR:
8102 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8103 have pulled out the size to use in whatever context it needed. */
8104 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8107 case REALIGN_LOAD_EXPR:
8109 tree oprnd0 = TREE_OPERAND (exp, 0);
8110 tree oprnd1 = TREE_OPERAND (exp, 1);
8111 tree oprnd2 = TREE_OPERAND (exp, 2);
8114 this_optab = optab_for_tree_code (code, type);
8115 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8116 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8117 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8126 return lang_hooks.expand_expr (exp, original_target, tmode,
8130 /* Here to do an ordinary binary operator. */
8132 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8133 subtarget, &op0, &op1, 0);
8135 this_optab = optab_for_tree_code (code, type);
8137 if (modifier == EXPAND_STACK_PARM)
8139 temp = expand_binop (mode, this_optab, op0, op1, target,
8140 unsignedp, OPTAB_LIB_WIDEN);
8142 return REDUCE_BIT_FIELD (temp);
8144 #undef REDUCE_BIT_FIELD
8146 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8147 signedness of TYPE), possibly returning the result in TARGET. */
8149 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8151 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8152 if (target && GET_MODE (target) != GET_MODE (exp))
8154 if (TYPE_UNSIGNED (type))
8157 if (prec < HOST_BITS_PER_WIDE_INT)
8158 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8161 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8162 ((unsigned HOST_WIDE_INT) 1
8163 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8165 return expand_and (GET_MODE (exp), exp, mask, target);
8169 tree count = build_int_cst (NULL_TREE,
8170 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8171 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8172 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8176 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8177 when applied to the address of EXP produces an address known to be
8178 aligned more than BIGGEST_ALIGNMENT. */
8181 is_aligning_offset (tree offset, tree exp)
8183 /* Strip off any conversions. */
8184 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8185 || TREE_CODE (offset) == NOP_EXPR
8186 || TREE_CODE (offset) == CONVERT_EXPR)
8187 offset = TREE_OPERAND (offset, 0);
8189 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8190 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8191 if (TREE_CODE (offset) != BIT_AND_EXPR
8192 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8193 || compare_tree_int (TREE_OPERAND (offset, 1),
8194 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8195 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8198 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8199 It must be NEGATE_EXPR. Then strip any more conversions. */
8200 offset = TREE_OPERAND (offset, 0);
8201 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8202 || TREE_CODE (offset) == NOP_EXPR
8203 || TREE_CODE (offset) == CONVERT_EXPR)
8204 offset = TREE_OPERAND (offset, 0);
8206 if (TREE_CODE (offset) != NEGATE_EXPR)
8209 offset = TREE_OPERAND (offset, 0);
8210 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8211 || TREE_CODE (offset) == NOP_EXPR
8212 || TREE_CODE (offset) == CONVERT_EXPR)
8213 offset = TREE_OPERAND (offset, 0);
8215 /* This must now be the address of EXP. */
8216 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8219 /* Return the tree node if an ARG corresponds to a string constant or zero
8220 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8221 in bytes within the string that ARG is accessing. The type of the
8222 offset will be `sizetype'. */
8225 string_constant (tree arg, tree *ptr_offset)
8230 if (TREE_CODE (arg) == ADDR_EXPR)
8232 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8234 *ptr_offset = size_zero_node;
8235 return TREE_OPERAND (arg, 0);
8237 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8239 array = TREE_OPERAND (arg, 0);
8240 offset = size_zero_node;
8242 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8244 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8245 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8246 if (TREE_CODE (array) != STRING_CST
8247 && TREE_CODE (array) != VAR_DECL)
8253 else if (TREE_CODE (arg) == PLUS_EXPR)
8255 tree arg0 = TREE_OPERAND (arg, 0);
8256 tree arg1 = TREE_OPERAND (arg, 1);
8261 if (TREE_CODE (arg0) == ADDR_EXPR
8262 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8263 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8265 array = TREE_OPERAND (arg0, 0);
8268 else if (TREE_CODE (arg1) == ADDR_EXPR
8269 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8270 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8272 array = TREE_OPERAND (arg1, 0);
8281 if (TREE_CODE (array) == STRING_CST)
8283 *ptr_offset = convert (sizetype, offset);
8286 else if (TREE_CODE (array) == VAR_DECL)
8290 /* Variables initialized to string literals can be handled too. */
8291 if (DECL_INITIAL (array) == NULL_TREE
8292 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8295 /* If they are read-only, non-volatile and bind locally. */
8296 if (! TREE_READONLY (array)
8297 || TREE_SIDE_EFFECTS (array)
8298 || ! targetm.binds_local_p (array))
8301 /* Avoid const char foo[4] = "abcde"; */
8302 if (DECL_SIZE_UNIT (array) == NULL_TREE
8303 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8304 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8305 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8308 /* If variable is bigger than the string literal, OFFSET must be constant
8309 and inside of the bounds of the string literal. */
8310 offset = convert (sizetype, offset);
8311 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8312 && (! host_integerp (offset, 1)
8313 || compare_tree_int (offset, length) >= 0))
8316 *ptr_offset = offset;
8317 return DECL_INITIAL (array);
8323 /* Generate code to calculate EXP using a store-flag instruction
8324 and return an rtx for the result. EXP is either a comparison
8325 or a TRUTH_NOT_EXPR whose operand is a comparison.
8327 If TARGET is nonzero, store the result there if convenient.
8329 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8332 Return zero if there is no suitable set-flag instruction
8333 available on this machine.
8335 Once expand_expr has been called on the arguments of the comparison,
8336 we are committed to doing the store flag, since it is not safe to
8337 re-evaluate the expression. We emit the store-flag insn by calling
8338 emit_store_flag, but only expand the arguments if we have a reason
8339 to believe that emit_store_flag will be successful. If we think that
8340 it will, but it isn't, we have to simulate the store-flag with a
8341 set/jump/set sequence. */
8344 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8347 tree arg0, arg1, type;
8349 enum machine_mode operand_mode;
8353 enum insn_code icode;
8354 rtx subtarget = target;
8357 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8358 result at the end. We can't simply invert the test since it would
8359 have already been inverted if it were valid. This case occurs for
8360 some floating-point comparisons. */
8362 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8363 invert = 1, exp = TREE_OPERAND (exp, 0);
8365 arg0 = TREE_OPERAND (exp, 0);
8366 arg1 = TREE_OPERAND (exp, 1);
8368 /* Don't crash if the comparison was erroneous. */
8369 if (arg0 == error_mark_node || arg1 == error_mark_node)
8372 type = TREE_TYPE (arg0);
8373 operand_mode = TYPE_MODE (type);
8374 unsignedp = TYPE_UNSIGNED (type);
8376 /* We won't bother with BLKmode store-flag operations because it would mean
8377 passing a lot of information to emit_store_flag. */
8378 if (operand_mode == BLKmode)
8381 /* We won't bother with store-flag operations involving function pointers
8382 when function pointers must be canonicalized before comparisons. */
8383 #ifdef HAVE_canonicalize_funcptr_for_compare
8384 if (HAVE_canonicalize_funcptr_for_compare
8385 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8386 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8388 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8389 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8390 == FUNCTION_TYPE))))
8397 /* Get the rtx comparison code to use. We know that EXP is a comparison
8398 operation of some type. Some comparisons against 1 and -1 can be
8399 converted to comparisons with zero. Do so here so that the tests
8400 below will be aware that we have a comparison with zero. These
8401 tests will not catch constants in the first operand, but constants
8402 are rarely passed as the first operand. */
8404 switch (TREE_CODE (exp))
8413 if (integer_onep (arg1))
8414 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8416 code = unsignedp ? LTU : LT;
8419 if (! unsignedp && integer_all_onesp (arg1))
8420 arg1 = integer_zero_node, code = LT;
8422 code = unsignedp ? LEU : LE;
8425 if (! unsignedp && integer_all_onesp (arg1))
8426 arg1 = integer_zero_node, code = GE;
8428 code = unsignedp ? GTU : GT;
8431 if (integer_onep (arg1))
8432 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8434 code = unsignedp ? GEU : GE;
8437 case UNORDERED_EXPR:
8466 /* Put a constant second. */
8467 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8469 tem = arg0; arg0 = arg1; arg1 = tem;
8470 code = swap_condition (code);
8473 /* If this is an equality or inequality test of a single bit, we can
8474 do this by shifting the bit being tested to the low-order bit and
8475 masking the result with the constant 1. If the condition was EQ,
8476 we xor it with 1. This does not require an scc insn and is faster
8477 than an scc insn even if we have it.
8479 The code to make this transformation was moved into fold_single_bit_test,
8480 so we just call into the folder and expand its result. */
8482 if ((code == NE || code == EQ)
8483 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8484 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8486 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8487 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8489 target, VOIDmode, EXPAND_NORMAL);
8492 /* Now see if we are likely to be able to do this. Return if not. */
8493 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8496 icode = setcc_gen_code[(int) code];
8497 if (icode == CODE_FOR_nothing
8498 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8500 /* We can only do this if it is one of the special cases that
8501 can be handled without an scc insn. */
8502 if ((code == LT && integer_zerop (arg1))
8503 || (! only_cheap && code == GE && integer_zerop (arg1)))
8505 else if (BRANCH_COST >= 0
8506 && ! only_cheap && (code == NE || code == EQ)
8507 && TREE_CODE (type) != REAL_TYPE
8508 && ((abs_optab->handlers[(int) operand_mode].insn_code
8509 != CODE_FOR_nothing)
8510 || (ffs_optab->handlers[(int) operand_mode].insn_code
8511 != CODE_FOR_nothing)))
8517 if (! get_subtarget (target)
8518 || GET_MODE (subtarget) != operand_mode)
8521 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8524 target = gen_reg_rtx (mode);
8526 result = emit_store_flag (target, code, op0, op1,
8527 operand_mode, unsignedp, 1);
8532 result = expand_binop (mode, xor_optab, result, const1_rtx,
8533 result, 0, OPTAB_LIB_WIDEN);
8537 /* If this failed, we have to do this with set/compare/jump/set code. */
8539 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8540 target = gen_reg_rtx (GET_MODE (target));
8542 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8543 result = compare_from_rtx (op0, op1, code, unsignedp,
8544 operand_mode, NULL_RTX);
8545 if (GET_CODE (result) == CONST_INT)
8546 return (((result == const0_rtx && ! invert)
8547 || (result != const0_rtx && invert))
8548 ? const0_rtx : const1_rtx);
8550 /* The code of RESULT may not match CODE if compare_from_rtx
8551 decided to swap its operands and reverse the original code.
8553 We know that compare_from_rtx returns either a CONST_INT or
8554 a new comparison code, so it is safe to just extract the
8555 code from RESULT. */
8556 code = GET_CODE (result);
8558 label = gen_label_rtx ();
8559 gcc_assert (bcc_gen_fctn[(int) code]);
8561 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8562 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8569 /* Stubs in case we haven't got a casesi insn. */
8571 # define HAVE_casesi 0
8572 # define gen_casesi(a, b, c, d, e) (0)
8573 # define CODE_FOR_casesi CODE_FOR_nothing
8576 /* If the machine does not have a case insn that compares the bounds,
8577 this means extra overhead for dispatch tables, which raises the
8578 threshold for using them. */
8579 #ifndef CASE_VALUES_THRESHOLD
8580 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8581 #endif /* CASE_VALUES_THRESHOLD */
8584 case_values_threshold (void)
8586 return CASE_VALUES_THRESHOLD;
8589 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8590 0 otherwise (i.e. if there is no casesi instruction). */
8592 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8593 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8595 enum machine_mode index_mode = SImode;
8596 int index_bits = GET_MODE_BITSIZE (index_mode);
8597 rtx op1, op2, index;
8598 enum machine_mode op_mode;
8603 /* Convert the index to SImode. */
8604 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8606 enum machine_mode omode = TYPE_MODE (index_type);
8607 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8609 /* We must handle the endpoints in the original mode. */
8610 index_expr = build2 (MINUS_EXPR, index_type,
8611 index_expr, minval);
8612 minval = integer_zero_node;
8613 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8614 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8615 omode, 1, default_label);
8616 /* Now we can safely truncate. */
8617 index = convert_to_mode (index_mode, index, 0);
8621 if (TYPE_MODE (index_type) != index_mode)
8623 index_expr = convert (lang_hooks.types.type_for_size
8624 (index_bits, 0), index_expr);
8625 index_type = TREE_TYPE (index_expr);
8628 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8631 do_pending_stack_adjust ();
8633 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8634 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8636 index = copy_to_mode_reg (op_mode, index);
8638 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8640 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8641 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8642 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8643 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8645 op1 = copy_to_mode_reg (op_mode, op1);
8647 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8649 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8650 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8651 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8652 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8654 op2 = copy_to_mode_reg (op_mode, op2);
8656 emit_jump_insn (gen_casesi (index, op1, op2,
8657 table_label, default_label));
8661 /* Attempt to generate a tablejump instruction; same concept. */
8662 #ifndef HAVE_tablejump
8663 #define HAVE_tablejump 0
8664 #define gen_tablejump(x, y) (0)
8667 /* Subroutine of the next function.
8669 INDEX is the value being switched on, with the lowest value
8670 in the table already subtracted.
8671 MODE is its expected mode (needed if INDEX is constant).
8672 RANGE is the length of the jump table.
8673 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8675 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8676 index value is out of range. */
8679 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8684 if (INTVAL (range) > cfun->max_jumptable_ents)
8685 cfun->max_jumptable_ents = INTVAL (range);
8687 /* Do an unsigned comparison (in the proper mode) between the index
8688 expression and the value which represents the length of the range.
8689 Since we just finished subtracting the lower bound of the range
8690 from the index expression, this comparison allows us to simultaneously
8691 check that the original index expression value is both greater than
8692 or equal to the minimum value of the range and less than or equal to
8693 the maximum value of the range. */
8695 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8698 /* If index is in range, it must fit in Pmode.
8699 Convert to Pmode so we can index with it. */
8701 index = convert_to_mode (Pmode, index, 1);
8703 /* Don't let a MEM slip through, because then INDEX that comes
8704 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8705 and break_out_memory_refs will go to work on it and mess it up. */
8706 #ifdef PIC_CASE_VECTOR_ADDRESS
8707 if (flag_pic && !REG_P (index))
8708 index = copy_to_mode_reg (Pmode, index);
8711 /* If flag_force_addr were to affect this address
8712 it could interfere with the tricky assumptions made
8713 about addresses that contain label-refs,
8714 which may be valid only very near the tablejump itself. */
8715 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8716 GET_MODE_SIZE, because this indicates how large insns are. The other
8717 uses should all be Pmode, because they are addresses. This code
8718 could fail if addresses and insns are not the same size. */
8719 index = gen_rtx_PLUS (Pmode,
8720 gen_rtx_MULT (Pmode, index,
8721 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8722 gen_rtx_LABEL_REF (Pmode, table_label));
8723 #ifdef PIC_CASE_VECTOR_ADDRESS
8725 index = PIC_CASE_VECTOR_ADDRESS (index);
8728 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8729 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8730 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8731 convert_move (temp, vector, 0);
8733 emit_jump_insn (gen_tablejump (temp, table_label));
8735 /* If we are generating PIC code or if the table is PC-relative, the
8736 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8737 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8742 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8743 rtx table_label, rtx default_label)
8747 if (! HAVE_tablejump)
8750 index_expr = fold (build2 (MINUS_EXPR, index_type,
8751 convert (index_type, index_expr),
8752 convert (index_type, minval)));
8753 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8754 do_pending_stack_adjust ();
8756 do_tablejump (index, TYPE_MODE (index_type),
8757 convert_modes (TYPE_MODE (index_type),
8758 TYPE_MODE (TREE_TYPE (range)),
8759 expand_expr (range, NULL_RTX,
8761 TYPE_UNSIGNED (TREE_TYPE (range))),
8762 table_label, default_label);
8766 /* Nonzero if the mode is a valid vector mode for this architecture.
8767 This returns nonzero even if there is no hardware support for the
8768 vector mode, but we can emulate with narrower modes. */
8771 vector_mode_valid_p (enum machine_mode mode)
8773 enum mode_class class = GET_MODE_CLASS (mode);
8774 enum machine_mode innermode;
8776 /* Doh! What's going on? */
8777 if (class != MODE_VECTOR_INT
8778 && class != MODE_VECTOR_FLOAT)
8781 /* Hardware support. Woo hoo! */
8782 if (targetm.vector_mode_supported_p (mode))
8785 innermode = GET_MODE_INNER (mode);
8787 /* We should probably return 1 if requesting V4DI and we have no DI,
8788 but we have V2DI, but this is probably very unlikely. */
8790 /* If we have support for the inner mode, we can safely emulate it.
8791 We may not have V2DI, but me can emulate with a pair of DIs. */
8792 return targetm.scalar_mode_supported_p (innermode);
8795 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8797 const_vector_from_tree (tree exp)
8802 enum machine_mode inner, mode;
8804 mode = TYPE_MODE (TREE_TYPE (exp));
8806 if (initializer_zerop (exp))
8807 return CONST0_RTX (mode);
8809 units = GET_MODE_NUNITS (mode);
8810 inner = GET_MODE_INNER (mode);
8812 v = rtvec_alloc (units);
8814 link = TREE_VECTOR_CST_ELTS (exp);
8815 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8817 elt = TREE_VALUE (link);
8819 if (TREE_CODE (elt) == REAL_CST)
8820 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8823 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8824 TREE_INT_CST_HIGH (elt),
8828 /* Initialize remaining elements to 0. */
8829 for (; i < units; ++i)
8830 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8832 return gen_rtx_CONST_VECTOR (mode, v);
8834 #include "gt-expr.h"