1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
149 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
152 static int is_aligning_offset (tree, tree);
153 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
154 enum expand_modifier);
155 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
156 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
158 static void emit_single_push_insn (enum machine_mode, rtx, tree);
160 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
161 static rtx const_vector_from_tree (tree);
163 /* Record for each mode whether we can move a register directly to or
164 from an object of that mode in memory. If we can't, we won't try
165 to use that mode directly when accessing a field of that mode. */
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
170 /* Record for each mode whether we can float-extend from memory. */
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
174 /* This macro is used to determine whether move_by_pieces should be called
175 to perform a structure copy. */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179 < (unsigned int) MOVE_RATIO)
182 /* This macro is used to determine whether clear_by_pieces should be
183 called to clear storage. */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187 < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
195 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
196 < (unsigned int) MOVE_RATIO)
199 /* This array records the insn_code of insns to perform block moves. */
200 enum insn_code movmem_optab[NUM_MACHINE_MODES];
202 /* This array records the insn_code of insns to perform block clears. */
203 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
205 /* These arrays record the insn_code of two different kinds of insns
206 to perform block compares. */
207 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
208 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
220 init_expr_once (void)
223 enum machine_mode mode;
228 /* Try indexing by frame ptr and try by stack ptr.
229 It is known that on the Convex the stack ptr isn't a valid index.
230 With luck, one or the other is valid on any machine. */
231 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
232 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
234 /* A scratch register we can modify in-place below to avoid
235 useless RTL allocations. */
236 reg = gen_rtx_REG (VOIDmode, -1);
238 insn = rtx_alloc (INSN);
239 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
240 PATTERN (insn) = pat;
242 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
243 mode = (enum machine_mode) ((int) mode + 1))
247 direct_load[(int) mode] = direct_store[(int) mode] = 0;
248 PUT_MODE (mem, mode);
249 PUT_MODE (mem1, mode);
250 PUT_MODE (reg, mode);
252 /* See if there is some register that can be used in this mode and
253 directly loaded or stored from memory. */
255 if (mode != VOIDmode && mode != BLKmode)
256 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
257 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
260 if (! HARD_REGNO_MODE_OK (regno, mode))
266 SET_DEST (pat) = reg;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_load[(int) mode] = 1;
270 SET_SRC (pat) = mem1;
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
276 SET_DEST (pat) = mem;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_store[(int) mode] = 1;
281 SET_DEST (pat) = mem1;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
287 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
290 mode = GET_MODE_WIDER_MODE (mode))
292 enum machine_mode srcmode;
293 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
294 srcmode = GET_MODE_WIDER_MODE (srcmode))
298 ic = can_extend_p (mode, srcmode, 0);
299 if (ic == CODE_FOR_nothing)
302 PUT_MODE (mem, srcmode);
304 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
305 float_extend_from_mem[mode][srcmode] = true;
310 /* This is run at the start of compiling a function. */
315 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
318 /* Copy data from FROM to TO, where the machine modes are not the same.
319 Both modes may be integer, or both may be floating.
320 UNSIGNEDP should be nonzero if FROM is an unsigned type.
321 This causes zero-extension instead of sign-extension. */
324 convert_move (rtx to, rtx from, int unsignedp)
326 enum machine_mode to_mode = GET_MODE (to);
327 enum machine_mode from_mode = GET_MODE (from);
328 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
329 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
333 /* rtx code for making an equivalent value. */
334 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
335 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
338 gcc_assert (to_real == from_real);
340 /* If the source and destination are already the same, then there's
345 /* If FROM is a SUBREG that indicates that we have already done at least
346 the required extension, strip it. We don't handle such SUBREGs as
349 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
350 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
351 >= GET_MODE_SIZE (to_mode))
352 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
353 from = gen_lowpart (to_mode, from), from_mode = to_mode;
355 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
357 if (to_mode == from_mode
358 || (from_mode == VOIDmode && CONSTANT_P (from)))
360 emit_move_insn (to, from);
364 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
366 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
368 if (VECTOR_MODE_P (to_mode))
369 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
371 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
373 emit_move_insn (to, from);
377 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
379 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
380 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
389 gcc_assert (GET_MODE_PRECISION (from_mode)
390 != GET_MODE_PRECISION (to_mode));
392 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
397 /* Try converting directly if the insn is supported. */
399 code = tab->handlers[to_mode][from_mode].insn_code;
400 if (code != CODE_FOR_nothing)
402 emit_unop_insn (code, to, from,
403 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
407 /* Otherwise use a libcall. */
408 libcall = tab->handlers[to_mode][from_mode].libfunc;
410 /* Is this conversion implemented yet? */
411 gcc_assert (libcall);
414 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
416 insns = get_insns ();
418 emit_libcall_block (insns, to, value,
419 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
421 : gen_rtx_FLOAT_EXTEND (to_mode, from));
425 /* Handle pointer conversion. */ /* SPEE 900220. */
426 /* Targets are expected to provide conversion insns between PxImode and
427 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
428 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
430 enum machine_mode full_mode
431 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
433 gcc_assert (trunc_optab->handlers[to_mode][full_mode].insn_code
434 != CODE_FOR_nothing);
436 if (full_mode != from_mode)
437 from = convert_to_mode (full_mode, from, unsignedp);
438 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
442 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
444 enum machine_mode full_mode
445 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
447 gcc_assert (sext_optab->handlers[full_mode][from_mode].insn_code
448 != CODE_FOR_nothing);
450 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
452 if (to_mode == full_mode)
455 /* else proceed to integer conversions below. */
456 from_mode = full_mode;
459 /* Now both modes are integers. */
461 /* Handle expanding beyond a word. */
462 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
463 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
470 enum machine_mode lowpart_mode;
471 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
473 /* Try converting directly if the insn is supported. */
474 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
477 /* If FROM is a SUBREG, put it into a register. Do this
478 so that we always generate the same set of insns for
479 better cse'ing; if an intermediate assignment occurred,
480 we won't be doing the operation directly on the SUBREG. */
481 if (optimize > 0 && GET_CODE (from) == SUBREG)
482 from = force_reg (from_mode, from);
483 emit_unop_insn (code, to, from, equiv_code);
486 /* Next, try converting via full word. */
487 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
488 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
489 != CODE_FOR_nothing))
493 if (reg_overlap_mentioned_p (to, from))
494 from = force_reg (from_mode, from);
495 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
497 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
498 emit_unop_insn (code, to,
499 gen_lowpart (word_mode, to), equiv_code);
503 /* No special multiword conversion insn; do it by hand. */
506 /* Since we will turn this into a no conflict block, we must ensure
507 that the source does not overlap the target. */
509 if (reg_overlap_mentioned_p (to, from))
510 from = force_reg (from_mode, from);
512 /* Get a copy of FROM widened to a word, if necessary. */
513 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
514 lowpart_mode = word_mode;
516 lowpart_mode = from_mode;
518 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
520 lowpart = gen_lowpart (lowpart_mode, to);
521 emit_move_insn (lowpart, lowfrom);
523 /* Compute the value to put in each remaining word. */
525 fill_value = const0_rtx;
530 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
531 && STORE_FLAG_VALUE == -1)
533 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
535 fill_value = gen_reg_rtx (word_mode);
536 emit_insn (gen_slt (fill_value));
542 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
543 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
545 fill_value = convert_to_mode (word_mode, fill_value, 1);
549 /* Fill the remaining words. */
550 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
552 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
553 rtx subword = operand_subword (to, index, 1, to_mode);
555 gcc_assert (subword);
557 if (fill_value != subword)
558 emit_move_insn (subword, fill_value);
561 insns = get_insns ();
564 emit_no_conflict_block (insns, to, from, NULL_RTX,
565 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
569 /* Truncating multi-word to a word or less. */
570 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
571 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
574 && ! MEM_VOLATILE_P (from)
575 && direct_load[(int) to_mode]
576 && ! mode_dependent_address_p (XEXP (from, 0)))
578 || GET_CODE (from) == SUBREG))
579 from = force_reg (from_mode, from);
580 convert_move (to, gen_lowpart (word_mode, from), 0);
584 /* Now follow all the conversions between integers
585 no more than a word long. */
587 /* For truncation, usually we can just refer to FROM in a narrower mode. */
588 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
589 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
590 GET_MODE_BITSIZE (from_mode)))
593 && ! MEM_VOLATILE_P (from)
594 && direct_load[(int) to_mode]
595 && ! mode_dependent_address_p (XEXP (from, 0)))
597 || GET_CODE (from) == SUBREG))
598 from = force_reg (from_mode, from);
599 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
600 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
601 from = copy_to_reg (from);
602 emit_move_insn (to, gen_lowpart (to_mode, from));
606 /* Handle extension. */
607 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
609 /* Convert directly if that works. */
610 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
614 from = force_not_mem (from);
616 emit_unop_insn (code, to, from, equiv_code);
621 enum machine_mode intermediate;
625 /* Search for a mode to convert via. */
626 for (intermediate = from_mode; intermediate != VOIDmode;
627 intermediate = GET_MODE_WIDER_MODE (intermediate))
628 if (((can_extend_p (to_mode, intermediate, unsignedp)
630 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
631 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
632 GET_MODE_BITSIZE (intermediate))))
633 && (can_extend_p (intermediate, from_mode, unsignedp)
634 != CODE_FOR_nothing))
636 convert_move (to, convert_to_mode (intermediate, from,
637 unsignedp), unsignedp);
641 /* No suitable intermediate mode.
642 Generate what we need with shifts. */
643 shift_amount = build_int_cst (NULL_TREE,
644 GET_MODE_BITSIZE (to_mode)
645 - GET_MODE_BITSIZE (from_mode));
646 from = gen_lowpart (to_mode, force_reg (from_mode, from));
647 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
649 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
652 emit_move_insn (to, tmp);
657 /* Support special truncate insns for certain modes. */
658 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
660 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
665 /* Handle truncation of volatile memrefs, and so on;
666 the things that couldn't be truncated directly,
667 and for which there was no special instruction.
669 ??? Code above formerly short-circuited this, for most integer
670 mode pairs, with a force_reg in from_mode followed by a recursive
671 call to this routine. Appears always to have been wrong. */
672 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
674 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
675 emit_move_insn (to, temp);
679 /* Mode combination is not recognized. */
683 /* Return an rtx for a value that would result
684 from converting X to mode MODE.
685 Both X and MODE may be floating, or both integer.
686 UNSIGNEDP is nonzero if X is an unsigned value.
687 This can be done by referring to a part of X in place
688 or by copying to a new temporary with conversion. */
691 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
693 return convert_modes (mode, VOIDmode, x, unsignedp);
696 /* Return an rtx for a value that would result
697 from converting X from mode OLDMODE to mode MODE.
698 Both modes may be floating, or both integer.
699 UNSIGNEDP is nonzero if X is an unsigned value.
701 This can be done by referring to a part of X in place
702 or by copying to a new temporary with conversion.
704 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
707 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
711 /* If FROM is a SUBREG that indicates that we have already done at least
712 the required extension, strip it. */
714 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
715 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
716 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
717 x = gen_lowpart (mode, x);
719 if (GET_MODE (x) != VOIDmode)
720 oldmode = GET_MODE (x);
725 /* There is one case that we must handle specially: If we are converting
726 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
727 we are to interpret the constant as unsigned, gen_lowpart will do
728 the wrong if the constant appears negative. What we want to do is
729 make the high-order word of the constant zero, not all ones. */
731 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
732 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
733 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
735 HOST_WIDE_INT val = INTVAL (x);
737 if (oldmode != VOIDmode
738 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
740 int width = GET_MODE_BITSIZE (oldmode);
742 /* We need to zero extend VAL. */
743 val &= ((HOST_WIDE_INT) 1 << width) - 1;
746 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
749 /* We can do this with a gen_lowpart if both desired and current modes
750 are integer, and this is either a constant integer, a register, or a
751 non-volatile MEM. Except for the constant case where MODE is no
752 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
754 if ((GET_CODE (x) == CONST_INT
755 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
756 || (GET_MODE_CLASS (mode) == MODE_INT
757 && GET_MODE_CLASS (oldmode) == MODE_INT
758 && (GET_CODE (x) == CONST_DOUBLE
759 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
760 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
761 && direct_load[(int) mode])
763 && (! HARD_REGISTER_P (x)
764 || HARD_REGNO_MODE_OK (REGNO (x), mode))
765 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
766 GET_MODE_BITSIZE (GET_MODE (x)))))))))
768 /* ?? If we don't know OLDMODE, we have to assume here that
769 X does not need sign- or zero-extension. This may not be
770 the case, but it's the best we can do. */
771 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
772 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
774 HOST_WIDE_INT val = INTVAL (x);
775 int width = GET_MODE_BITSIZE (oldmode);
777 /* We must sign or zero-extend in this case. Start by
778 zero-extending, then sign extend if we need to. */
779 val &= ((HOST_WIDE_INT) 1 << width) - 1;
781 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
782 val |= (HOST_WIDE_INT) (-1) << width;
784 return gen_int_mode (val, mode);
787 return gen_lowpart (mode, x);
790 /* Converting from integer constant into mode is always equivalent to an
792 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
794 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
795 return simplify_gen_subreg (mode, x, oldmode, 0);
798 temp = gen_reg_rtx (mode);
799 convert_move (temp, x, unsignedp);
803 /* STORE_MAX_PIECES is the number of bytes at a time that we can
804 store efficiently. Due to internal GCC limitations, this is
805 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
806 for an immediate constant. */
808 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
810 /* Determine whether the LEN bytes can be moved by using several move
811 instructions. Return nonzero if a call to move_by_pieces should
815 can_move_by_pieces (unsigned HOST_WIDE_INT len,
816 unsigned int align ATTRIBUTE_UNUSED)
818 return MOVE_BY_PIECES_P (len, align);
821 /* Generate several move instructions to copy LEN bytes from block FROM to
822 block TO. (These are MEM rtx's with BLKmode).
824 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
825 used to push FROM to the stack.
827 ALIGN is maximum stack alignment we can assume.
829 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
830 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
834 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
835 unsigned int align, int endp)
837 struct move_by_pieces data;
838 rtx to_addr, from_addr = XEXP (from, 0);
839 unsigned int max_size = MOVE_MAX_PIECES + 1;
840 enum machine_mode mode = VOIDmode, tmode;
841 enum insn_code icode;
843 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
846 data.from_addr = from_addr;
849 to_addr = XEXP (to, 0);
852 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
853 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
855 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
862 #ifdef STACK_GROWS_DOWNWARD
868 data.to_addr = to_addr;
871 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
872 || GET_CODE (from_addr) == POST_INC
873 || GET_CODE (from_addr) == POST_DEC);
875 data.explicit_inc_from = 0;
876 data.explicit_inc_to = 0;
877 if (data.reverse) data.offset = len;
880 /* If copying requires more than two move insns,
881 copy addresses to registers (to make displacements shorter)
882 and use post-increment if available. */
883 if (!(data.autinc_from && data.autinc_to)
884 && move_by_pieces_ninsns (len, align, max_size) > 2)
886 /* Find the mode of the largest move... */
887 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
888 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
889 if (GET_MODE_SIZE (tmode) < max_size)
892 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
894 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
895 data.autinc_from = 1;
896 data.explicit_inc_from = -1;
898 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
900 data.from_addr = copy_addr_to_reg (from_addr);
901 data.autinc_from = 1;
902 data.explicit_inc_from = 1;
904 if (!data.autinc_from && CONSTANT_P (from_addr))
905 data.from_addr = copy_addr_to_reg (from_addr);
906 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
908 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
910 data.explicit_inc_to = -1;
912 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
914 data.to_addr = copy_addr_to_reg (to_addr);
916 data.explicit_inc_to = 1;
918 if (!data.autinc_to && CONSTANT_P (to_addr))
919 data.to_addr = copy_addr_to_reg (to_addr);
922 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
923 if (align >= GET_MODE_ALIGNMENT (tmode))
924 align = GET_MODE_ALIGNMENT (tmode);
927 enum machine_mode xmode;
929 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
931 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
932 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
933 || SLOW_UNALIGNED_ACCESS (tmode, align))
936 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
939 /* First move what we can in the largest integer mode, then go to
940 successively smaller modes. */
944 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
945 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
946 if (GET_MODE_SIZE (tmode) < max_size)
949 if (mode == VOIDmode)
952 icode = mov_optab->handlers[(int) mode].insn_code;
953 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
954 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
956 max_size = GET_MODE_SIZE (mode);
959 /* The code above should have handled everything. */
960 gcc_assert (!data.len);
966 gcc_assert (!data.reverse);
971 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
972 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
974 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
977 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
984 to1 = adjust_address (data.to, QImode, data.offset);
992 /* Return number of insns required to move L bytes by pieces.
993 ALIGN (in bits) is maximum alignment we can assume. */
995 static unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 unsigned int max_size)
999 unsigned HOST_WIDE_INT n_insns = 0;
1000 enum machine_mode tmode;
1002 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1003 if (align >= GET_MODE_ALIGNMENT (tmode))
1004 align = GET_MODE_ALIGNMENT (tmode);
1007 enum machine_mode tmode, xmode;
1009 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1011 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1012 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1013 || SLOW_UNALIGNED_ACCESS (tmode, align))
1016 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1019 while (max_size > 1)
1021 enum machine_mode mode = VOIDmode;
1022 enum insn_code icode;
1024 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1025 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1026 if (GET_MODE_SIZE (tmode) < max_size)
1029 if (mode == VOIDmode)
1032 icode = mov_optab->handlers[(int) mode].insn_code;
1033 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1034 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1036 max_size = GET_MODE_SIZE (mode);
1043 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1044 with move instructions for mode MODE. GENFUN is the gen_... function
1045 to make a move insn for that mode. DATA has all the other info. */
1048 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1049 struct move_by_pieces *data)
1051 unsigned int size = GET_MODE_SIZE (mode);
1052 rtx to1 = NULL_RTX, from1;
1054 while (data->len >= size)
1057 data->offset -= size;
1061 if (data->autinc_to)
1062 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1065 to1 = adjust_address (data->to, mode, data->offset);
1068 if (data->autinc_from)
1069 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1072 from1 = adjust_address (data->from, mode, data->offset);
1074 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1075 emit_insn (gen_add2_insn (data->to_addr,
1076 GEN_INT (-(HOST_WIDE_INT)size)));
1077 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1078 emit_insn (gen_add2_insn (data->from_addr,
1079 GEN_INT (-(HOST_WIDE_INT)size)));
1082 emit_insn ((*genfun) (to1, from1));
1085 #ifdef PUSH_ROUNDING
1086 emit_single_push_insn (mode, from1, NULL);
1092 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1093 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1094 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1095 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1097 if (! data->reverse)
1098 data->offset += size;
1104 /* Emit code to move a block Y to a block X. This may be done with
1105 string-move instructions, with multiple scalar move instructions,
1106 or with a library call.
1108 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1109 SIZE is an rtx that says how long they are.
1110 ALIGN is the maximum alignment we can assume they have.
1111 METHOD describes what kind of copy this is, and what mechanisms may be used.
1113 Return the address of the new block, if memcpy is called and returns it,
1117 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1125 case BLOCK_OP_NORMAL:
1126 may_use_call = true;
1129 case BLOCK_OP_CALL_PARM:
1130 may_use_call = block_move_libcall_safe_for_call_parm ();
1132 /* Make inhibit_defer_pop nonzero around the library call
1133 to force it to pop the arguments right away. */
1137 case BLOCK_OP_NO_LIBCALL:
1138 may_use_call = false;
1145 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147 gcc_assert (MEM_P (x));
1148 gcc_assert (MEM_P (y));
1151 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1152 block copy is more efficient for other large modes, e.g. DCmode. */
1153 x = adjust_address (x, BLKmode, 0);
1154 y = adjust_address (y, BLKmode, 0);
1156 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1157 can be incorrect is coming from __builtin_memcpy. */
1158 if (GET_CODE (size) == CONST_INT)
1160 if (INTVAL (size) == 0)
1163 x = shallow_copy_rtx (x);
1164 y = shallow_copy_rtx (y);
1165 set_mem_size (x, size);
1166 set_mem_size (y, size);
1169 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1170 move_by_pieces (x, y, INTVAL (size), align, 0);
1171 else if (emit_block_move_via_movmem (x, y, size, align))
1173 else if (may_use_call)
1174 retval = emit_block_move_via_libcall (x, y, size);
1176 emit_block_move_via_loop (x, y, size, align);
1178 if (method == BLOCK_OP_CALL_PARM)
1184 /* A subroutine of emit_block_move. Returns true if calling the
1185 block move libcall will not clobber any parameters which may have
1186 already been placed on the stack. */
1189 block_move_libcall_safe_for_call_parm (void)
1191 /* If arguments are pushed on the stack, then they're safe. */
1195 /* If registers go on the stack anyway, any argument is sure to clobber
1196 an outgoing argument. */
1197 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1199 tree fn = emit_block_move_libcall_fn (false);
1201 if (REG_PARM_STACK_SPACE (fn) != 0)
1206 /* If any argument goes in memory, then it might clobber an outgoing
1209 CUMULATIVE_ARGS args_so_far;
1212 fn = emit_block_move_libcall_fn (false);
1213 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1215 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1216 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1218 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1219 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1220 if (!tmp || !REG_P (tmp))
1222 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1225 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1231 /* A subroutine of emit_block_move. Expand a movmem pattern;
1232 return true if successful. */
1235 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1237 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1238 int save_volatile_ok = volatile_ok;
1239 enum machine_mode mode;
1241 /* Since this is a move insn, we don't care about volatility. */
1244 /* Try the most limited insn first, because there's no point
1245 including more than one in the machine description unless
1246 the more limited one has some advantage. */
1248 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1249 mode = GET_MODE_WIDER_MODE (mode))
1251 enum insn_code code = movmem_optab[(int) mode];
1252 insn_operand_predicate_fn pred;
1254 if (code != CODE_FOR_nothing
1255 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1256 here because if SIZE is less than the mode mask, as it is
1257 returned by the macro, it will definitely be less than the
1258 actual mode mask. */
1259 && ((GET_CODE (size) == CONST_INT
1260 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1261 <= (GET_MODE_MASK (mode) >> 1)))
1262 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1263 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1264 || (*pred) (x, BLKmode))
1265 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1266 || (*pred) (y, BLKmode))
1267 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1268 || (*pred) (opalign, VOIDmode)))
1271 rtx last = get_last_insn ();
1274 op2 = convert_to_mode (mode, size, 1);
1275 pred = insn_data[(int) code].operand[2].predicate;
1276 if (pred != 0 && ! (*pred) (op2, mode))
1277 op2 = copy_to_mode_reg (mode, op2);
1279 /* ??? When called via emit_block_move_for_call, it'd be
1280 nice if there were some way to inform the backend, so
1281 that it doesn't fail the expansion because it thinks
1282 emitting the libcall would be more efficient. */
1284 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1288 volatile_ok = save_volatile_ok;
1292 delete_insns_since (last);
1296 volatile_ok = save_volatile_ok;
1300 /* A subroutine of emit_block_move. Expand a call to memcpy.
1301 Return the return value from memcpy, 0 otherwise. */
1304 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1306 rtx dst_addr, src_addr;
1307 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1308 enum machine_mode size_mode;
1311 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1312 pseudos. We can then place those new pseudos into a VAR_DECL and
1315 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1316 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1318 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1319 src_addr = convert_memory_address (ptr_mode, src_addr);
1321 dst_tree = make_tree (ptr_type_node, dst_addr);
1322 src_tree = make_tree (ptr_type_node, src_addr);
1324 size_mode = TYPE_MODE (sizetype);
1326 size = convert_to_mode (size_mode, size, 1);
1327 size = copy_to_mode_reg (size_mode, size);
1329 /* It is incorrect to use the libcall calling conventions to call
1330 memcpy in this context. This could be a user call to memcpy and
1331 the user may wish to examine the return value from memcpy. For
1332 targets where libcalls and normal calls have different conventions
1333 for returning pointers, we could end up generating incorrect code. */
1335 size_tree = make_tree (sizetype, size);
1337 fn = emit_block_move_libcall_fn (true);
1338 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1339 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1340 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1342 /* Now we have to build up the CALL_EXPR itself. */
1343 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1344 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1345 call_expr, arg_list, NULL_TREE);
1347 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1352 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1353 for the function we use for block copies. The first time FOR_CALL
1354 is true, we call assemble_external. */
1356 static GTY(()) tree block_move_fn;
1359 init_block_move_fn (const char *asmspec)
1365 fn = get_identifier ("memcpy");
1366 args = build_function_type_list (ptr_type_node, ptr_type_node,
1367 const_ptr_type_node, sizetype,
1370 fn = build_decl (FUNCTION_DECL, fn, args);
1371 DECL_EXTERNAL (fn) = 1;
1372 TREE_PUBLIC (fn) = 1;
1373 DECL_ARTIFICIAL (fn) = 1;
1374 TREE_NOTHROW (fn) = 1;
1380 set_user_assembler_name (block_move_fn, asmspec);
1384 emit_block_move_libcall_fn (int for_call)
1386 static bool emitted_extern;
1389 init_block_move_fn (NULL);
1391 if (for_call && !emitted_extern)
1393 emitted_extern = true;
1394 make_decl_rtl (block_move_fn);
1395 assemble_external (block_move_fn);
1398 return block_move_fn;
1401 /* A subroutine of emit_block_move. Copy the data via an explicit
1402 loop. This is used only when libcalls are forbidden. */
1403 /* ??? It'd be nice to copy in hunks larger than QImode. */
1406 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1407 unsigned int align ATTRIBUTE_UNUSED)
1409 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1410 enum machine_mode iter_mode;
1412 iter_mode = GET_MODE (size);
1413 if (iter_mode == VOIDmode)
1414 iter_mode = word_mode;
1416 top_label = gen_label_rtx ();
1417 cmp_label = gen_label_rtx ();
1418 iter = gen_reg_rtx (iter_mode);
1420 emit_move_insn (iter, const0_rtx);
1422 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1423 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1424 do_pending_stack_adjust ();
1426 emit_jump (cmp_label);
1427 emit_label (top_label);
1429 tmp = convert_modes (Pmode, iter_mode, iter, true);
1430 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1431 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1432 x = change_address (x, QImode, x_addr);
1433 y = change_address (y, QImode, y_addr);
1435 emit_move_insn (x, y);
1437 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1438 true, OPTAB_LIB_WIDEN);
1440 emit_move_insn (iter, tmp);
1442 emit_label (cmp_label);
1444 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1448 /* Copy all or part of a value X into registers starting at REGNO.
1449 The number of registers to be filled is NREGS. */
1452 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1455 #ifdef HAVE_load_multiple
1463 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1464 x = validize_mem (force_const_mem (mode, x));
1466 /* See if the machine can do this with a load multiple insn. */
1467 #ifdef HAVE_load_multiple
1468 if (HAVE_load_multiple)
1470 last = get_last_insn ();
1471 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1479 delete_insns_since (last);
1483 for (i = 0; i < nregs; i++)
1484 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1485 operand_subword_force (x, i, mode));
1488 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1489 The number of registers to be filled is NREGS. */
1492 move_block_from_reg (int regno, rtx x, int nregs)
1499 /* See if the machine can do this with a store multiple insn. */
1500 #ifdef HAVE_store_multiple
1501 if (HAVE_store_multiple)
1503 rtx last = get_last_insn ();
1504 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1512 delete_insns_since (last);
1516 for (i = 0; i < nregs; i++)
1518 rtx tem = operand_subword (x, i, 1, BLKmode);
1522 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1526 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1527 ORIG, where ORIG is a non-consecutive group of registers represented by
1528 a PARALLEL. The clone is identical to the original except in that the
1529 original set of registers is replaced by a new set of pseudo registers.
1530 The new set has the same modes as the original set. */
1533 gen_group_rtx (rtx orig)
1538 gcc_assert (GET_CODE (orig) == PARALLEL);
1540 length = XVECLEN (orig, 0);
1541 tmps = alloca (sizeof (rtx) * length);
1543 /* Skip a NULL entry in first slot. */
1544 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1549 for (; i < length; i++)
1551 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1552 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1554 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1557 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1560 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1561 except that values are placed in TMPS[i], and must later be moved
1562 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1565 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1569 enum machine_mode m = GET_MODE (orig_src);
1571 gcc_assert (GET_CODE (dst) == PARALLEL);
1574 && !SCALAR_INT_MODE_P (m)
1575 && !MEM_P (orig_src)
1576 && GET_CODE (orig_src) != CONCAT)
1578 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1579 if (imode == BLKmode)
1580 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1582 src = gen_reg_rtx (imode);
1583 if (imode != BLKmode)
1584 src = gen_lowpart (GET_MODE (orig_src), src);
1585 emit_move_insn (src, orig_src);
1586 /* ...and back again. */
1587 if (imode != BLKmode)
1588 src = gen_lowpart (imode, src);
1589 emit_group_load_1 (tmps, dst, src, type, ssize);
1593 /* Check for a NULL entry, used to indicate that the parameter goes
1594 both on the stack and in registers. */
1595 if (XEXP (XVECEXP (dst, 0, 0), 0))
1600 /* Process the pieces. */
1601 for (i = start; i < XVECLEN (dst, 0); i++)
1603 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1604 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1605 unsigned int bytelen = GET_MODE_SIZE (mode);
1608 /* Handle trailing fragments that run over the size of the struct. */
1609 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1611 /* Arrange to shift the fragment to where it belongs.
1612 extract_bit_field loads to the lsb of the reg. */
1614 #ifdef BLOCK_REG_PADDING
1615 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1616 == (BYTES_BIG_ENDIAN ? upward : downward)
1621 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1622 bytelen = ssize - bytepos;
1623 gcc_assert (bytelen > 0);
1626 /* If we won't be loading directly from memory, protect the real source
1627 from strange tricks we might play; but make sure that the source can
1628 be loaded directly into the destination. */
1630 if (!MEM_P (orig_src)
1631 && (!CONSTANT_P (orig_src)
1632 || (GET_MODE (orig_src) != mode
1633 && GET_MODE (orig_src) != VOIDmode)))
1635 if (GET_MODE (orig_src) == VOIDmode)
1636 src = gen_reg_rtx (mode);
1638 src = gen_reg_rtx (GET_MODE (orig_src));
1640 emit_move_insn (src, orig_src);
1643 /* Optimize the access just a bit. */
1645 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1646 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1647 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1648 && bytelen == GET_MODE_SIZE (mode))
1650 tmps[i] = gen_reg_rtx (mode);
1651 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1653 else if (GET_CODE (src) == CONCAT)
1655 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1656 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1658 if ((bytepos == 0 && bytelen == slen0)
1659 || (bytepos != 0 && bytepos + bytelen <= slen))
1661 /* The following assumes that the concatenated objects all
1662 have the same size. In this case, a simple calculation
1663 can be used to determine the object and the bit field
1665 tmps[i] = XEXP (src, bytepos / slen0);
1666 if (! CONSTANT_P (tmps[i])
1667 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1668 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1669 (bytepos % slen0) * BITS_PER_UNIT,
1670 1, NULL_RTX, mode, mode);
1676 gcc_assert (!bytepos);
1677 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1678 emit_move_insn (mem, src);
1679 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1680 0, 1, NULL_RTX, mode, mode);
1683 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1684 SIMD register, which is currently broken. While we get GCC
1685 to emit proper RTL for these cases, let's dump to memory. */
1686 else if (VECTOR_MODE_P (GET_MODE (dst))
1689 int slen = GET_MODE_SIZE (GET_MODE (src));
1692 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1693 emit_move_insn (mem, src);
1694 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1696 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1697 && XVECLEN (dst, 0) > 1)
1698 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1699 else if (CONSTANT_P (src)
1700 || (REG_P (src) && GET_MODE (src) == mode))
1703 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1704 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1708 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1709 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1713 /* Emit code to move a block SRC of type TYPE to a block DST,
1714 where DST is non-consecutive registers represented by a PARALLEL.
1715 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1719 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1724 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1725 emit_group_load_1 (tmps, dst, src, type, ssize);
1727 /* Copy the extracted pieces into the proper (probable) hard regs. */
1728 for (i = 0; i < XVECLEN (dst, 0); i++)
1730 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1733 emit_move_insn (d, tmps[i]);
1737 /* Similar, but load SRC into new pseudos in a format that looks like
1738 PARALLEL. This can later be fed to emit_group_move to get things
1739 in the right place. */
1742 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1747 vec = rtvec_alloc (XVECLEN (parallel, 0));
1748 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1750 /* Convert the vector to look just like the original PARALLEL, except
1751 with the computed values. */
1752 for (i = 0; i < XVECLEN (parallel, 0); i++)
1754 rtx e = XVECEXP (parallel, 0, i);
1755 rtx d = XEXP (e, 0);
1759 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1760 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1762 RTVEC_ELT (vec, i) = e;
1765 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1768 /* Emit code to move a block SRC to block DST, where SRC and DST are
1769 non-consecutive groups of registers, each represented by a PARALLEL. */
1772 emit_group_move (rtx dst, rtx src)
1776 gcc_assert (GET_CODE (src) == PARALLEL
1777 && GET_CODE (dst) == PARALLEL
1778 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1780 /* Skip first entry if NULL. */
1781 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1782 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1783 XEXP (XVECEXP (src, 0, i), 0));
1786 /* Move a group of registers represented by a PARALLEL into pseudos. */
1789 emit_group_move_into_temps (rtx src)
1791 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1794 for (i = 0; i < XVECLEN (src, 0); i++)
1796 rtx e = XVECEXP (src, 0, i);
1797 rtx d = XEXP (e, 0);
1800 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1801 RTVEC_ELT (vec, i) = e;
1804 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1807 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1808 where SRC is non-consecutive registers represented by a PARALLEL.
1809 SSIZE represents the total size of block ORIG_DST, or -1 if not
1813 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 enum machine_mode m = GET_MODE (orig_dst);
1819 gcc_assert (GET_CODE (src) == PARALLEL);
1821 if (!SCALAR_INT_MODE_P (m)
1822 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1824 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1825 if (imode == BLKmode)
1826 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1828 dst = gen_reg_rtx (imode);
1829 emit_group_store (dst, src, type, ssize);
1830 if (imode != BLKmode)
1831 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1832 emit_move_insn (orig_dst, dst);
1836 /* Check for a NULL entry, used to indicate that the parameter goes
1837 both on the stack and in registers. */
1838 if (XEXP (XVECEXP (src, 0, 0), 0))
1843 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1845 /* Copy the (probable) hard regs into pseudos. */
1846 for (i = start; i < XVECLEN (src, 0); i++)
1848 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1849 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1850 emit_move_insn (tmps[i], reg);
1853 /* If we won't be storing directly into memory, protect the real destination
1854 from strange tricks we might play. */
1856 if (GET_CODE (dst) == PARALLEL)
1860 /* We can get a PARALLEL dst if there is a conditional expression in
1861 a return statement. In that case, the dst and src are the same,
1862 so no action is necessary. */
1863 if (rtx_equal_p (dst, src))
1866 /* It is unclear if we can ever reach here, but we may as well handle
1867 it. Allocate a temporary, and split this into a store/load to/from
1870 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1871 emit_group_store (temp, src, type, ssize);
1872 emit_group_load (dst, temp, type, ssize);
1875 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1877 dst = gen_reg_rtx (GET_MODE (orig_dst));
1878 /* Make life a bit easier for combine. */
1879 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
1882 /* Process the pieces. */
1883 for (i = start; i < XVECLEN (src, 0); i++)
1885 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1886 enum machine_mode mode = GET_MODE (tmps[i]);
1887 unsigned int bytelen = GET_MODE_SIZE (mode);
1890 /* Handle trailing fragments that run over the size of the struct. */
1891 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1893 /* store_bit_field always takes its value from the lsb.
1894 Move the fragment to the lsb if it's not already there. */
1896 #ifdef BLOCK_REG_PADDING
1897 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
1898 == (BYTES_BIG_ENDIAN ? upward : downward)
1904 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1905 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
1906 build_int_cst (NULL_TREE, shift),
1909 bytelen = ssize - bytepos;
1912 if (GET_CODE (dst) == CONCAT)
1914 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1915 dest = XEXP (dst, 0);
1916 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1918 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1919 dest = XEXP (dst, 1);
1923 gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1924 dest = assign_stack_temp (GET_MODE (dest),
1925 GET_MODE_SIZE (GET_MODE (dest)), 0);
1926 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
1933 /* Optimize the access just a bit. */
1935 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
1936 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
1937 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1938 && bytelen == GET_MODE_SIZE (mode))
1939 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
1941 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
1945 /* Copy from the pseudo into the (probable) hard reg. */
1946 if (orig_dst != dst)
1947 emit_move_insn (orig_dst, dst);
1950 /* Generate code to copy a BLKmode object of TYPE out of a
1951 set of registers starting with SRCREG into TGTBLK. If TGTBLK
1952 is null, a stack temporary is created. TGTBLK is returned.
1954 The purpose of this routine is to handle functions that return
1955 BLKmode structures in registers. Some machines (the PA for example)
1956 want to return all small structures in registers regardless of the
1957 structure's alignment. */
1960 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
1962 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
1963 rtx src = NULL, dst = NULL;
1964 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
1965 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
1969 tgtblk = assign_temp (build_qualified_type (type,
1971 | TYPE_QUAL_CONST)),
1973 preserve_temp_slots (tgtblk);
1976 /* This code assumes srcreg is at least a full word. If it isn't, copy it
1977 into a new pseudo which is a full word. */
1979 if (GET_MODE (srcreg) != BLKmode
1980 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
1981 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
1983 /* If the structure doesn't take up a whole number of words, see whether
1984 SRCREG is padded on the left or on the right. If it's on the left,
1985 set PADDING_CORRECTION to the number of bits to skip.
1987 In most ABIs, the structure will be returned at the least end of
1988 the register, which translates to right padding on little-endian
1989 targets and left padding on big-endian targets. The opposite
1990 holds if the structure is returned at the most significant
1991 end of the register. */
1992 if (bytes % UNITS_PER_WORD != 0
1993 && (targetm.calls.return_in_msb (type)
1995 : BYTES_BIG_ENDIAN))
1997 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
1999 /* Copy the structure BITSIZE bites at a time.
2001 We could probably emit more efficient code for machines which do not use
2002 strict alignment, but it doesn't seem worth the effort at the current
2004 for (bitpos = 0, xbitpos = padding_correction;
2005 bitpos < bytes * BITS_PER_UNIT;
2006 bitpos += bitsize, xbitpos += bitsize)
2008 /* We need a new source operand each time xbitpos is on a
2009 word boundary and when xbitpos == padding_correction
2010 (the first time through). */
2011 if (xbitpos % BITS_PER_WORD == 0
2012 || xbitpos == padding_correction)
2013 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2016 /* We need a new destination operand each time bitpos is on
2018 if (bitpos % BITS_PER_WORD == 0)
2019 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2021 /* Use xbitpos for the source extraction (right justified) and
2022 xbitpos for the destination store (left justified). */
2023 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2024 extract_bit_field (src, bitsize,
2025 xbitpos % BITS_PER_WORD, 1,
2026 NULL_RTX, word_mode, word_mode));
2032 /* Add a USE expression for REG to the (possibly empty) list pointed
2033 to by CALL_FUSAGE. REG must denote a hard register. */
2036 use_reg (rtx *call_fusage, rtx reg)
2038 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2041 = gen_rtx_EXPR_LIST (VOIDmode,
2042 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2045 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2046 starting at REGNO. All of these registers must be hard registers. */
2049 use_regs (rtx *call_fusage, int regno, int nregs)
2053 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2055 for (i = 0; i < nregs; i++)
2056 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2059 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2060 PARALLEL REGS. This is for calls that pass values in multiple
2061 non-contiguous locations. The Irix 6 ABI has examples of this. */
2064 use_group_regs (rtx *call_fusage, rtx regs)
2068 for (i = 0; i < XVECLEN (regs, 0); i++)
2070 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2072 /* A NULL entry means the parameter goes both on the stack and in
2073 registers. This can also be a MEM for targets that pass values
2074 partially on the stack and partially in registers. */
2075 if (reg != 0 && REG_P (reg))
2076 use_reg (call_fusage, reg);
2081 /* Determine whether the LEN bytes generated by CONSTFUN can be
2082 stored to memory using several move instructions. CONSTFUNDATA is
2083 a pointer which will be passed as argument in every CONSTFUN call.
2084 ALIGN is maximum alignment we can assume. Return nonzero if a
2085 call to store_by_pieces should succeed. */
2088 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2089 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2090 void *constfundata, unsigned int align)
2092 unsigned HOST_WIDE_INT l;
2093 unsigned int max_size;
2094 HOST_WIDE_INT offset = 0;
2095 enum machine_mode mode, tmode;
2096 enum insn_code icode;
2103 if (! STORE_BY_PIECES_P (len, align))
2106 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2107 if (align >= GET_MODE_ALIGNMENT (tmode))
2108 align = GET_MODE_ALIGNMENT (tmode);
2111 enum machine_mode xmode;
2113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2115 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2116 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2117 || SLOW_UNALIGNED_ACCESS (tmode, align))
2120 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2123 /* We would first store what we can in the largest integer mode, then go to
2124 successively smaller modes. */
2127 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2132 max_size = STORE_MAX_PIECES + 1;
2133 while (max_size > 1)
2135 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2136 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2137 if (GET_MODE_SIZE (tmode) < max_size)
2140 if (mode == VOIDmode)
2143 icode = mov_optab->handlers[(int) mode].insn_code;
2144 if (icode != CODE_FOR_nothing
2145 && align >= GET_MODE_ALIGNMENT (mode))
2147 unsigned int size = GET_MODE_SIZE (mode);
2154 cst = (*constfun) (constfundata, offset, mode);
2155 if (!LEGITIMATE_CONSTANT_P (cst))
2165 max_size = GET_MODE_SIZE (mode);
2168 /* The code above should have handled everything. */
2175 /* Generate several move instructions to store LEN bytes generated by
2176 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2177 pointer which will be passed as argument in every CONSTFUN call.
2178 ALIGN is maximum alignment we can assume.
2179 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2180 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2184 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2185 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2186 void *constfundata, unsigned int align, int endp)
2188 struct store_by_pieces data;
2192 gcc_assert (endp != 2);
2196 gcc_assert (STORE_BY_PIECES_P (len, align));
2197 data.constfun = constfun;
2198 data.constfundata = constfundata;
2201 store_by_pieces_1 (&data, align);
2206 gcc_assert (!data.reverse);
2211 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2212 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2214 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2217 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2224 to1 = adjust_address (data.to, QImode, data.offset);
2232 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2233 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2236 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2238 struct store_by_pieces data;
2243 data.constfun = clear_by_pieces_1;
2244 data.constfundata = NULL;
2247 store_by_pieces_1 (&data, align);
2250 /* Callback routine for clear_by_pieces.
2251 Return const0_rtx unconditionally. */
2254 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2255 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2256 enum machine_mode mode ATTRIBUTE_UNUSED)
2261 /* Subroutine of clear_by_pieces and store_by_pieces.
2262 Generate several move instructions to store LEN bytes of block TO. (A MEM
2263 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2266 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2267 unsigned int align ATTRIBUTE_UNUSED)
2269 rtx to_addr = XEXP (data->to, 0);
2270 unsigned int max_size = STORE_MAX_PIECES + 1;
2271 enum machine_mode mode = VOIDmode, tmode;
2272 enum insn_code icode;
2275 data->to_addr = to_addr;
2277 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2278 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2280 data->explicit_inc_to = 0;
2282 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2284 data->offset = data->len;
2286 /* If storing requires more than two move insns,
2287 copy addresses to registers (to make displacements shorter)
2288 and use post-increment if available. */
2289 if (!data->autinc_to
2290 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2292 /* Determine the main mode we'll be using. */
2293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2295 if (GET_MODE_SIZE (tmode) < max_size)
2298 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2300 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2301 data->autinc_to = 1;
2302 data->explicit_inc_to = -1;
2305 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2306 && ! data->autinc_to)
2308 data->to_addr = copy_addr_to_reg (to_addr);
2309 data->autinc_to = 1;
2310 data->explicit_inc_to = 1;
2313 if ( !data->autinc_to && CONSTANT_P (to_addr))
2314 data->to_addr = copy_addr_to_reg (to_addr);
2317 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2318 if (align >= GET_MODE_ALIGNMENT (tmode))
2319 align = GET_MODE_ALIGNMENT (tmode);
2322 enum machine_mode xmode;
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2326 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2327 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2328 || SLOW_UNALIGNED_ACCESS (tmode, align))
2331 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2334 /* First store what we can in the largest integer mode, then go to
2335 successively smaller modes. */
2337 while (max_size > 1)
2339 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2340 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2341 if (GET_MODE_SIZE (tmode) < max_size)
2344 if (mode == VOIDmode)
2347 icode = mov_optab->handlers[(int) mode].insn_code;
2348 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2349 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2351 max_size = GET_MODE_SIZE (mode);
2354 /* The code above should have handled everything. */
2355 gcc_assert (!data->len);
2358 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2359 with move instructions for mode MODE. GENFUN is the gen_... function
2360 to make a move insn for that mode. DATA has all the other info. */
2363 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2364 struct store_by_pieces *data)
2366 unsigned int size = GET_MODE_SIZE (mode);
2369 while (data->len >= size)
2372 data->offset -= size;
2374 if (data->autinc_to)
2375 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2378 to1 = adjust_address (data->to, mode, data->offset);
2380 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2381 emit_insn (gen_add2_insn (data->to_addr,
2382 GEN_INT (-(HOST_WIDE_INT) size)));
2384 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2385 emit_insn ((*genfun) (to1, cst));
2387 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2388 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2390 if (! data->reverse)
2391 data->offset += size;
2397 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2398 its length in bytes. */
2401 clear_storage (rtx object, rtx size)
2404 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2405 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2407 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2408 just move a zero. Otherwise, do this a piece at a time. */
2409 if (GET_MODE (object) != BLKmode
2410 && GET_CODE (size) == CONST_INT
2411 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2412 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2415 if (size == const0_rtx)
2417 else if (GET_CODE (size) == CONST_INT
2418 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2419 clear_by_pieces (object, INTVAL (size), align);
2420 else if (clear_storage_via_clrmem (object, size, align))
2423 retval = clear_storage_via_libcall (object, size);
2429 /* A subroutine of clear_storage. Expand a clrmem pattern;
2430 return true if successful. */
2433 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2435 /* Try the most limited insn first, because there's no point
2436 including more than one in the machine description unless
2437 the more limited one has some advantage. */
2439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2440 enum machine_mode mode;
2442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2443 mode = GET_MODE_WIDER_MODE (mode))
2445 enum insn_code code = clrmem_optab[(int) mode];
2446 insn_operand_predicate_fn pred;
2448 if (code != CODE_FOR_nothing
2449 /* We don't need MODE to be narrower than
2450 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2451 the mode mask, as it is returned by the macro, it will
2452 definitely be less than the actual mode mask. */
2453 && ((GET_CODE (size) == CONST_INT
2454 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2455 <= (GET_MODE_MASK (mode) >> 1)))
2456 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2457 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2458 || (*pred) (object, BLKmode))
2459 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2460 || (*pred) (opalign, VOIDmode)))
2463 rtx last = get_last_insn ();
2466 op1 = convert_to_mode (mode, size, 1);
2467 pred = insn_data[(int) code].operand[1].predicate;
2468 if (pred != 0 && ! (*pred) (op1, mode))
2469 op1 = copy_to_mode_reg (mode, op1);
2471 pat = GEN_FCN ((int) code) (object, op1, opalign);
2478 delete_insns_since (last);
2485 /* A subroutine of clear_storage. Expand a call to memset.
2486 Return the return value of memset, 0 otherwise. */
2489 clear_storage_via_libcall (rtx object, rtx size)
2491 tree call_expr, arg_list, fn, object_tree, size_tree;
2492 enum machine_mode size_mode;
2495 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2496 place those into new pseudos into a VAR_DECL and use them later. */
2498 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2500 size_mode = TYPE_MODE (sizetype);
2501 size = convert_to_mode (size_mode, size, 1);
2502 size = copy_to_mode_reg (size_mode, size);
2504 /* It is incorrect to use the libcall calling conventions to call
2505 memset in this context. This could be a user call to memset and
2506 the user may wish to examine the return value from memset. For
2507 targets where libcalls and normal calls have different conventions
2508 for returning pointers, we could end up generating incorrect code. */
2510 object_tree = make_tree (ptr_type_node, object);
2511 size_tree = make_tree (sizetype, size);
2513 fn = clear_storage_libcall_fn (true);
2514 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2515 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2516 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2518 /* Now we have to build up the CALL_EXPR itself. */
2519 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2520 call_expr = build3 (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2521 call_expr, arg_list, NULL_TREE);
2523 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2528 /* A subroutine of clear_storage_via_libcall. Create the tree node
2529 for the function we use for block clears. The first time FOR_CALL
2530 is true, we call assemble_external. */
2532 static GTY(()) tree block_clear_fn;
2535 init_block_clear_fn (const char *asmspec)
2537 if (!block_clear_fn)
2541 fn = get_identifier ("memset");
2542 args = build_function_type_list (ptr_type_node, ptr_type_node,
2543 integer_type_node, sizetype,
2546 fn = build_decl (FUNCTION_DECL, fn, args);
2547 DECL_EXTERNAL (fn) = 1;
2548 TREE_PUBLIC (fn) = 1;
2549 DECL_ARTIFICIAL (fn) = 1;
2550 TREE_NOTHROW (fn) = 1;
2552 block_clear_fn = fn;
2556 set_user_assembler_name (block_clear_fn, asmspec);
2560 clear_storage_libcall_fn (int for_call)
2562 static bool emitted_extern;
2564 if (!block_clear_fn)
2565 init_block_clear_fn (NULL);
2567 if (for_call && !emitted_extern)
2569 emitted_extern = true;
2570 make_decl_rtl (block_clear_fn);
2571 assemble_external (block_clear_fn);
2574 return block_clear_fn;
2577 /* Generate code to copy Y into X.
2578 Both Y and X must have the same mode, except that
2579 Y can be a constant with VOIDmode.
2580 This mode cannot be BLKmode; use emit_block_move for that.
2582 Return the last instruction emitted. */
2585 emit_move_insn (rtx x, rtx y)
2587 enum machine_mode mode = GET_MODE (x);
2588 rtx y_cst = NULL_RTX;
2591 gcc_assert (mode != BLKmode
2592 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
2597 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2598 && (last_insn = compress_float_constant (x, y)))
2603 if (!LEGITIMATE_CONSTANT_P (y))
2605 y = force_const_mem (mode, y);
2607 /* If the target's cannot_force_const_mem prevented the spill,
2608 assume that the target's move expanders will also take care
2609 of the non-legitimate constant. */
2615 /* If X or Y are memory references, verify that their addresses are valid
2618 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2619 && ! push_operand (x, GET_MODE (x)))
2621 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2622 x = validize_mem (x);
2625 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2627 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2628 y = validize_mem (y);
2630 gcc_assert (mode != BLKmode);
2632 last_insn = emit_move_insn_1 (x, y);
2634 if (y_cst && REG_P (x)
2635 && (set = single_set (last_insn)) != NULL_RTX
2636 && SET_DEST (set) == x
2637 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2638 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2643 /* Low level part of emit_move_insn.
2644 Called just like emit_move_insn, but assumes X and Y
2645 are basically valid. */
2648 emit_move_insn_1 (rtx x, rtx y)
2650 enum machine_mode mode = GET_MODE (x);
2651 enum machine_mode submode;
2653 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
2655 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2657 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2659 /* Expand complex moves by moving real part and imag part, if possible. */
2660 else if (COMPLEX_MODE_P (mode)
2661 && BLKmode != (submode = GET_MODE_INNER (mode))
2662 && (mov_optab->handlers[(int) submode].insn_code
2663 != CODE_FOR_nothing))
2665 unsigned int modesize = GET_MODE_SIZE (mode);
2666 unsigned int submodesize = GET_MODE_SIZE (submode);
2668 /* Don't split destination if it is a stack push. */
2669 int stack = push_operand (x, mode);
2671 #ifdef PUSH_ROUNDING
2672 /* In case we output to the stack, but the size is smaller than the
2673 machine can push exactly, we need to use move instructions. */
2674 if (stack && PUSH_ROUNDING (submodesize) != submodesize)
2677 HOST_WIDE_INT offset1, offset2;
2679 /* Do not use anti_adjust_stack, since we don't want to update
2680 stack_pointer_delta. */
2681 temp = expand_binop (Pmode,
2682 #ifdef STACK_GROWS_DOWNWARD
2688 GEN_INT (PUSH_ROUNDING (modesize)),
2689 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2691 if (temp != stack_pointer_rtx)
2692 emit_move_insn (stack_pointer_rtx, temp);
2694 #ifdef STACK_GROWS_DOWNWARD
2696 offset2 = submodesize;
2698 offset1 = -PUSH_ROUNDING (modesize);
2699 offset2 = -PUSH_ROUNDING (modesize) + submodesize;
2702 emit_move_insn (change_address (x, submode,
2703 gen_rtx_PLUS (Pmode,
2705 GEN_INT (offset1))),
2706 gen_realpart (submode, y));
2707 emit_move_insn (change_address (x, submode,
2708 gen_rtx_PLUS (Pmode,
2710 GEN_INT (offset2))),
2711 gen_imagpart (submode, y));
2715 /* If this is a stack, push the highpart first, so it
2716 will be in the argument order.
2718 In that case, change_address is used only to convert
2719 the mode, not to change the address. */
2722 /* Note that the real part always precedes the imag part in memory
2723 regardless of machine's endianness. */
2724 #ifdef STACK_GROWS_DOWNWARD
2725 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2726 gen_imagpart (submode, y));
2727 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2728 gen_realpart (submode, y));
2730 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2731 gen_realpart (submode, y));
2732 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2733 gen_imagpart (submode, y));
2738 rtx realpart_x, realpart_y;
2739 rtx imagpart_x, imagpart_y;
2741 /* If this is a complex value with each part being smaller than a
2742 word, the usual calling sequence will likely pack the pieces into
2743 a single register. Unfortunately, SUBREG of hard registers only
2744 deals in terms of words, so we have a problem converting input
2745 arguments to the CONCAT of two registers that is used elsewhere
2746 for complex values. If this is before reload, we can copy it into
2747 memory and reload. FIXME, we should see about using extract and
2748 insert on integer registers, but complex short and complex char
2749 variables should be rarely used. */
2750 if ((reload_in_progress | reload_completed) == 0
2751 && (!validate_subreg (submode, mode, NULL, submodesize)
2752 || !validate_subreg (submode, mode, NULL, 0)))
2754 if (REG_P (x) || REG_P (y))
2757 enum machine_mode reg_mode
2758 = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 1);
2760 gcc_assert (reg_mode != BLKmode);
2762 mem = assign_stack_temp (reg_mode, modesize, 0);
2763 cmem = adjust_address (mem, mode, 0);
2767 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2768 emit_move_insn_1 (cmem, y);
2769 return emit_move_insn_1 (sreg, mem);
2773 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2774 emit_move_insn_1 (mem, sreg);
2775 return emit_move_insn_1 (x, cmem);
2780 realpart_x = gen_realpart (submode, x);
2781 realpart_y = gen_realpart (submode, y);
2782 imagpart_x = gen_imagpart (submode, x);
2783 imagpart_y = gen_imagpart (submode, y);
2785 /* Show the output dies here. This is necessary for SUBREGs
2786 of pseudos since we cannot track their lifetimes correctly;
2787 hard regs shouldn't appear here except as return values.
2788 We never want to emit such a clobber after reload. */
2790 && ! (reload_in_progress || reload_completed)
2791 && (GET_CODE (realpart_x) == SUBREG
2792 || GET_CODE (imagpart_x) == SUBREG))
2793 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2795 emit_move_insn (realpart_x, realpart_y);
2796 emit_move_insn (imagpart_x, imagpart_y);
2799 return get_last_insn ();
2802 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2803 find a mode to do it in. If we have a movcc, use it. Otherwise,
2804 find the MODE_INT mode of the same width. */
2805 else if (GET_MODE_CLASS (mode) == MODE_CC
2806 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2808 enum insn_code insn_code;
2809 enum machine_mode tmode = VOIDmode;
2813 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2816 for (tmode = QImode; tmode != VOIDmode;
2817 tmode = GET_MODE_WIDER_MODE (tmode))
2818 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2821 gcc_assert (tmode != VOIDmode);
2823 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2824 may call change_address which is not appropriate if we were
2825 called when a reload was in progress. We don't have to worry
2826 about changing the address since the size in bytes is supposed to
2827 be the same. Copy the MEM to change the mode and move any
2828 substitutions from the old MEM to the new one. */
2830 if (reload_in_progress)
2832 x = gen_lowpart_common (tmode, x1);
2833 if (x == 0 && MEM_P (x1))
2835 x = adjust_address_nv (x1, tmode, 0);
2836 copy_replacements (x1, x);
2839 y = gen_lowpart_common (tmode, y1);
2840 if (y == 0 && MEM_P (y1))
2842 y = adjust_address_nv (y1, tmode, 0);
2843 copy_replacements (y1, y);
2848 x = gen_lowpart (tmode, x);
2849 y = gen_lowpart (tmode, y);
2852 insn_code = mov_optab->handlers[(int) tmode].insn_code;
2853 return emit_insn (GEN_FCN (insn_code) (x, y));
2856 /* Try using a move pattern for the corresponding integer mode. This is
2857 only safe when simplify_subreg can convert MODE constants into integer
2858 constants. At present, it can only do this reliably if the value
2859 fits within a HOST_WIDE_INT. */
2860 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2861 && (submode = int_mode_for_mode (mode)) != BLKmode
2862 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
2863 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
2864 (simplify_gen_subreg (submode, x, mode, 0),
2865 simplify_gen_subreg (submode, y, mode, 0)));
2867 /* This will handle any multi-word or full-word mode that lacks a move_insn
2868 pattern. However, you will get better code if you define such patterns,
2869 even if they must turn into multiple assembler instructions. */
2877 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
2879 #ifdef PUSH_ROUNDING
2881 /* If X is a push on the stack, do the push now and replace
2882 X with a reference to the stack pointer. */
2883 if (push_operand (x, GET_MODE (x)))
2888 /* Do not use anti_adjust_stack, since we don't want to update
2889 stack_pointer_delta. */
2890 temp = expand_binop (Pmode,
2891 #ifdef STACK_GROWS_DOWNWARD
2899 (GET_MODE_SIZE (GET_MODE (x)))),
2900 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2902 if (temp != stack_pointer_rtx)
2903 emit_move_insn (stack_pointer_rtx, temp);
2905 code = GET_CODE (XEXP (x, 0));
2907 /* Just hope that small offsets off SP are OK. */
2908 if (code == POST_INC)
2909 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2910 GEN_INT (-((HOST_WIDE_INT)
2911 GET_MODE_SIZE (GET_MODE (x)))));
2912 else if (code == POST_DEC)
2913 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2914 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2916 temp = stack_pointer_rtx;
2918 x = change_address (x, VOIDmode, temp);
2922 /* If we are in reload, see if either operand is a MEM whose address
2923 is scheduled for replacement. */
2924 if (reload_in_progress && MEM_P (x)
2925 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2926 x = replace_equiv_address_nv (x, inner);
2927 if (reload_in_progress && MEM_P (y)
2928 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2929 y = replace_equiv_address_nv (y, inner);
2935 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2938 rtx xpart = operand_subword (x, i, 1, mode);
2939 rtx ypart = operand_subword (y, i, 1, mode);
2941 /* If we can't get a part of Y, put Y into memory if it is a
2942 constant. Otherwise, force it into a register. If we still
2943 can't get a part of Y, abort. */
2944 if (ypart == 0 && CONSTANT_P (y))
2946 y = force_const_mem (mode, y);
2947 ypart = operand_subword (y, i, 1, mode);
2949 else if (ypart == 0)
2950 ypart = operand_subword_force (y, i, mode);
2952 gcc_assert (xpart && ypart);
2954 need_clobber |= (GET_CODE (xpart) == SUBREG);
2956 last_insn = emit_move_insn (xpart, ypart);
2962 /* Show the output dies here. This is necessary for SUBREGs
2963 of pseudos since we cannot track their lifetimes correctly;
2964 hard regs shouldn't appear here except as return values.
2965 We never want to emit such a clobber after reload. */
2967 && ! (reload_in_progress || reload_completed)
2968 && need_clobber != 0)
2969 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2977 /* If Y is representable exactly in a narrower mode, and the target can
2978 perform the extension directly from constant or memory, then emit the
2979 move as an extension. */
2982 compress_float_constant (rtx x, rtx y)
2984 enum machine_mode dstmode = GET_MODE (x);
2985 enum machine_mode orig_srcmode = GET_MODE (y);
2986 enum machine_mode srcmode;
2989 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
2991 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
2992 srcmode != orig_srcmode;
2993 srcmode = GET_MODE_WIDER_MODE (srcmode))
2996 rtx trunc_y, last_insn;
2998 /* Skip if the target can't extend this way. */
2999 ic = can_extend_p (dstmode, srcmode, 0);
3000 if (ic == CODE_FOR_nothing)
3003 /* Skip if the narrowed value isn't exact. */
3004 if (! exact_real_truncate (srcmode, &r))
3007 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3009 if (LEGITIMATE_CONSTANT_P (trunc_y))
3011 /* Skip if the target needs extra instructions to perform
3013 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3016 else if (float_extend_from_mem[dstmode][srcmode])
3017 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3021 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3022 last_insn = get_last_insn ();
3025 set_unique_reg_note (last_insn, REG_EQUAL, y);
3033 /* Pushing data onto the stack. */
3035 /* Push a block of length SIZE (perhaps variable)
3036 and return an rtx to address the beginning of the block.
3037 The value may be virtual_outgoing_args_rtx.
3039 EXTRA is the number of bytes of padding to push in addition to SIZE.
3040 BELOW nonzero means this padding comes at low addresses;
3041 otherwise, the padding comes at high addresses. */
3044 push_block (rtx size, int extra, int below)
3048 size = convert_modes (Pmode, ptr_mode, size, 1);
3049 if (CONSTANT_P (size))
3050 anti_adjust_stack (plus_constant (size, extra));
3051 else if (REG_P (size) && extra == 0)
3052 anti_adjust_stack (size);
3055 temp = copy_to_mode_reg (Pmode, size);
3057 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3058 temp, 0, OPTAB_LIB_WIDEN);
3059 anti_adjust_stack (temp);
3062 #ifndef STACK_GROWS_DOWNWARD
3068 temp = virtual_outgoing_args_rtx;
3069 if (extra != 0 && below)
3070 temp = plus_constant (temp, extra);
3074 if (GET_CODE (size) == CONST_INT)
3075 temp = plus_constant (virtual_outgoing_args_rtx,
3076 -INTVAL (size) - (below ? 0 : extra));
3077 else if (extra != 0 && !below)
3078 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3079 negate_rtx (Pmode, plus_constant (size, extra)));
3081 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3082 negate_rtx (Pmode, size));
3085 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3088 #ifdef PUSH_ROUNDING
3090 /* Emit single push insn. */
3093 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3096 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3098 enum insn_code icode;
3099 insn_operand_predicate_fn pred;
3101 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3102 /* If there is push pattern, use it. Otherwise try old way of throwing
3103 MEM representing push operation to move expander. */
3104 icode = push_optab->handlers[(int) mode].insn_code;
3105 if (icode != CODE_FOR_nothing)
3107 if (((pred = insn_data[(int) icode].operand[0].predicate)
3108 && !((*pred) (x, mode))))
3109 x = force_reg (mode, x);
3110 emit_insn (GEN_FCN (icode) (x));
3113 if (GET_MODE_SIZE (mode) == rounded_size)
3114 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3115 /* If we are to pad downward, adjust the stack pointer first and
3116 then store X into the stack location using an offset. This is
3117 because emit_move_insn does not know how to pad; it does not have
3119 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3121 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3122 HOST_WIDE_INT offset;
3124 emit_move_insn (stack_pointer_rtx,
3125 expand_binop (Pmode,
3126 #ifdef STACK_GROWS_DOWNWARD
3132 GEN_INT (rounded_size),
3133 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3135 offset = (HOST_WIDE_INT) padding_size;
3136 #ifdef STACK_GROWS_DOWNWARD
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 /* We have already decremented the stack pointer, so get the
3140 offset += (HOST_WIDE_INT) rounded_size;
3142 if (STACK_PUSH_CODE == POST_INC)
3143 /* We have already incremented the stack pointer, so get the
3145 offset -= (HOST_WIDE_INT) rounded_size;
3147 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3151 #ifdef STACK_GROWS_DOWNWARD
3152 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3153 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3154 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3156 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3157 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3158 GEN_INT (rounded_size));
3160 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3163 dest = gen_rtx_MEM (mode, dest_addr);
3167 set_mem_attributes (dest, type, 1);
3169 if (flag_optimize_sibling_calls)
3170 /* Function incoming arguments may overlap with sibling call
3171 outgoing arguments and we cannot allow reordering of reads
3172 from function arguments with stores to outgoing arguments
3173 of sibling calls. */
3174 set_mem_alias_set (dest, 0);
3176 emit_move_insn (dest, x);
3180 /* Generate code to push X onto the stack, assuming it has mode MODE and
3182 MODE is redundant except when X is a CONST_INT (since they don't
3184 SIZE is an rtx for the size of data to be copied (in bytes),
3185 needed only if X is BLKmode.
3187 ALIGN (in bits) is maximum alignment we can assume.
3189 If PARTIAL and REG are both nonzero, then copy that many of the first
3190 words of X into registers starting with REG, and push the rest of X.
3191 The amount of space pushed is decreased by PARTIAL words,
3192 rounded *down* to a multiple of PARM_BOUNDARY.
3193 REG must be a hard register in this case.
3194 If REG is zero but PARTIAL is not, take any all others actions for an
3195 argument partially in registers, but do not actually load any
3198 EXTRA is the amount in bytes of extra space to leave next to this arg.
3199 This is ignored if an argument block has already been allocated.
3201 On a machine that lacks real push insns, ARGS_ADDR is the address of
3202 the bottom of the argument block for this call. We use indexing off there
3203 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3204 argument block has not been preallocated.
3206 ARGS_SO_FAR is the size of args previously pushed for this call.
3208 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3209 for arguments passed in registers. If nonzero, it will be the number
3210 of bytes required. */
3213 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3214 unsigned int align, int partial, rtx reg, int extra,
3215 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3219 enum direction stack_direction
3220 #ifdef STACK_GROWS_DOWNWARD
3226 /* Decide where to pad the argument: `downward' for below,
3227 `upward' for above, or `none' for don't pad it.
3228 Default is below for small data on big-endian machines; else above. */
3229 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3231 /* Invert direction if stack is post-decrement.
3233 if (STACK_PUSH_CODE == POST_DEC)
3234 if (where_pad != none)
3235 where_pad = (where_pad == downward ? upward : downward);
3239 if (mode == BLKmode)
3241 /* Copy a block into the stack, entirely or partially. */
3244 int used = partial * UNITS_PER_WORD;
3248 if (reg && GET_CODE (reg) == PARALLEL)
3250 /* Use the size of the elt to compute offset. */
3251 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3252 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3253 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3256 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3262 /* USED is now the # of bytes we need not copy to the stack
3263 because registers will take care of them. */
3266 xinner = adjust_address (xinner, BLKmode, used);
3268 /* If the partial register-part of the arg counts in its stack size,
3269 skip the part of stack space corresponding to the registers.
3270 Otherwise, start copying to the beginning of the stack space,
3271 by setting SKIP to 0. */
3272 skip = (reg_parm_stack_space == 0) ? 0 : used;
3274 #ifdef PUSH_ROUNDING
3275 /* Do it with several push insns if that doesn't take lots of insns
3276 and if there is no difficulty with push insns that skip bytes
3277 on the stack for alignment purposes. */
3280 && GET_CODE (size) == CONST_INT
3282 && MEM_ALIGN (xinner) >= align
3283 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3284 /* Here we avoid the case of a structure whose weak alignment
3285 forces many pushes of a small amount of data,
3286 and such small pushes do rounding that causes trouble. */
3287 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3288 || align >= BIGGEST_ALIGNMENT
3289 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3290 == (align / BITS_PER_UNIT)))
3291 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3293 /* Push padding now if padding above and stack grows down,
3294 or if padding below and stack grows up.
3295 But if space already allocated, this has already been done. */
3296 if (extra && args_addr == 0
3297 && where_pad != none && where_pad != stack_direction)
3298 anti_adjust_stack (GEN_INT (extra));
3300 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3303 #endif /* PUSH_ROUNDING */
3307 /* Otherwise make space on the stack and copy the data
3308 to the address of that space. */
3310 /* Deduct words put into registers from the size we must copy. */
3313 if (GET_CODE (size) == CONST_INT)
3314 size = GEN_INT (INTVAL (size) - used);
3316 size = expand_binop (GET_MODE (size), sub_optab, size,
3317 GEN_INT (used), NULL_RTX, 0,
3321 /* Get the address of the stack space.
3322 In this case, we do not deal with EXTRA separately.
3323 A single stack adjust will do. */
3326 temp = push_block (size, extra, where_pad == downward);
3329 else if (GET_CODE (args_so_far) == CONST_INT)
3330 temp = memory_address (BLKmode,
3331 plus_constant (args_addr,
3332 skip + INTVAL (args_so_far)));
3334 temp = memory_address (BLKmode,
3335 plus_constant (gen_rtx_PLUS (Pmode,
3340 if (!ACCUMULATE_OUTGOING_ARGS)
3342 /* If the source is referenced relative to the stack pointer,
3343 copy it to another register to stabilize it. We do not need
3344 to do this if we know that we won't be changing sp. */
3346 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3347 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3348 temp = copy_to_reg (temp);
3351 target = gen_rtx_MEM (BLKmode, temp);
3353 /* We do *not* set_mem_attributes here, because incoming arguments
3354 may overlap with sibling call outgoing arguments and we cannot
3355 allow reordering of reads from function arguments with stores
3356 to outgoing arguments of sibling calls. We do, however, want
3357 to record the alignment of the stack slot. */
3358 /* ALIGN may well be better aligned than TYPE, e.g. due to
3359 PARM_BOUNDARY. Assume the caller isn't lying. */
3360 set_mem_align (target, align);
3362 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3365 else if (partial > 0)
3367 /* Scalar partly in registers. */
3369 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3372 /* # words of start of argument
3373 that we must make space for but need not store. */
3374 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3375 int args_offset = INTVAL (args_so_far);
3378 /* Push padding now if padding above and stack grows down,
3379 or if padding below and stack grows up.
3380 But if space already allocated, this has already been done. */
3381 if (extra && args_addr == 0
3382 && where_pad != none && where_pad != stack_direction)
3383 anti_adjust_stack (GEN_INT (extra));
3385 /* If we make space by pushing it, we might as well push
3386 the real data. Otherwise, we can leave OFFSET nonzero
3387 and leave the space uninitialized. */
3391 /* Now NOT_STACK gets the number of words that we don't need to
3392 allocate on the stack. */
3393 not_stack = partial - offset;
3395 /* If the partial register-part of the arg counts in its stack size,
3396 skip the part of stack space corresponding to the registers.
3397 Otherwise, start copying to the beginning of the stack space,
3398 by setting SKIP to 0. */
3399 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3401 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3402 x = validize_mem (force_const_mem (mode, x));
3404 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3405 SUBREGs of such registers are not allowed. */
3406 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3407 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3408 x = copy_to_reg (x);
3410 /* Loop over all the words allocated on the stack for this arg. */
3411 /* We can do it by words, because any scalar bigger than a word
3412 has a size a multiple of a word. */
3413 #ifndef PUSH_ARGS_REVERSED
3414 for (i = not_stack; i < size; i++)
3416 for (i = size - 1; i >= not_stack; i--)
3418 if (i >= not_stack + offset)
3419 emit_push_insn (operand_subword_force (x, i, mode),
3420 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3422 GEN_INT (args_offset + ((i - not_stack + skip)
3424 reg_parm_stack_space, alignment_pad);
3431 /* Push padding now if padding above and stack grows down,
3432 or if padding below and stack grows up.
3433 But if space already allocated, this has already been done. */
3434 if (extra && args_addr == 0
3435 && where_pad != none && where_pad != stack_direction)
3436 anti_adjust_stack (GEN_INT (extra));
3438 #ifdef PUSH_ROUNDING
3439 if (args_addr == 0 && PUSH_ARGS)
3440 emit_single_push_insn (mode, x, type);
3444 if (GET_CODE (args_so_far) == CONST_INT)
3446 = memory_address (mode,
3447 plus_constant (args_addr,
3448 INTVAL (args_so_far)));
3450 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3452 dest = gen_rtx_MEM (mode, addr);
3454 /* We do *not* set_mem_attributes here, because incoming arguments
3455 may overlap with sibling call outgoing arguments and we cannot
3456 allow reordering of reads from function arguments with stores
3457 to outgoing arguments of sibling calls. We do, however, want
3458 to record the alignment of the stack slot. */
3459 /* ALIGN may well be better aligned than TYPE, e.g. due to
3460 PARM_BOUNDARY. Assume the caller isn't lying. */
3461 set_mem_align (dest, align);
3463 emit_move_insn (dest, x);
3467 /* If part should go in registers, copy that part
3468 into the appropriate registers. Do this now, at the end,
3469 since mem-to-mem copies above may do function calls. */
3470 if (partial > 0 && reg != 0)
3472 /* Handle calls that pass values in multiple non-contiguous locations.
3473 The Irix 6 ABI has examples of this. */
3474 if (GET_CODE (reg) == PARALLEL)
3475 emit_group_load (reg, x, type, -1);
3477 move_block_to_reg (REGNO (reg), x, partial, mode);
3480 if (extra && args_addr == 0 && where_pad == stack_direction)
3481 anti_adjust_stack (GEN_INT (extra));
3483 if (alignment_pad && args_addr == 0)
3484 anti_adjust_stack (alignment_pad);
3487 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3491 get_subtarget (rtx x)
3495 /* Only registers can be subtargets. */
3497 /* Don't use hard regs to avoid extending their life. */
3498 || REGNO (x) < FIRST_PSEUDO_REGISTER
3502 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
3503 FIELD is a bitfield. Returns true if the optimization was successful,
3504 and there's nothing else to do. */
3507 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
3508 unsigned HOST_WIDE_INT bitpos,
3509 enum machine_mode mode1, rtx str_rtx,
3512 enum machine_mode str_mode = GET_MODE (str_rtx);
3513 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
3518 if (mode1 != VOIDmode
3519 || bitsize >= BITS_PER_WORD
3520 || str_bitsize > BITS_PER_WORD
3521 || TREE_SIDE_EFFECTS (to)
3522 || TREE_THIS_VOLATILE (to))
3526 if (!BINARY_CLASS_P (src)
3527 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
3530 op0 = TREE_OPERAND (src, 0);
3531 op1 = TREE_OPERAND (src, 1);
3534 if (!operand_equal_p (to, op0, 0))
3537 if (MEM_P (str_rtx))
3539 unsigned HOST_WIDE_INT offset1;
3541 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
3542 str_mode = word_mode;
3543 str_mode = get_best_mode (bitsize, bitpos,
3544 MEM_ALIGN (str_rtx), str_mode, 0);
3545 if (str_mode == VOIDmode)
3547 str_bitsize = GET_MODE_BITSIZE (str_mode);
3550 bitpos %= str_bitsize;
3551 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
3552 str_rtx = adjust_address (str_rtx, str_mode, offset1);
3554 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
3557 /* If the bit field covers the whole REG/MEM, store_field
3558 will likely generate better code. */
3559 if (bitsize >= str_bitsize)
3562 /* We can't handle fields split across multiple entities. */
3563 if (bitpos + bitsize > str_bitsize)
3566 if (BYTES_BIG_ENDIAN)
3567 bitpos = str_bitsize - bitpos - bitsize;
3569 switch (TREE_CODE (src))
3573 /* For now, just optimize the case of the topmost bitfield
3574 where we don't need to do any masking and also
3575 1 bit bitfields where xor can be used.
3576 We might win by one instruction for the other bitfields
3577 too if insv/extv instructions aren't used, so that
3578 can be added later. */
3579 if (bitpos + bitsize != str_bitsize
3580 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3583 value = expand_expr (op1, NULL_RTX, str_mode, 0);
3584 value = convert_modes (str_mode,
3585 TYPE_MODE (TREE_TYPE (op1)), value,
3586 TYPE_UNSIGNED (TREE_TYPE (op1)));
3588 /* We may be accessing data outside the field, which means
3589 we can alias adjacent data. */
3590 if (MEM_P (str_rtx))
3592 str_rtx = shallow_copy_rtx (str_rtx);
3593 set_mem_alias_set (str_rtx, 0);
3594 set_mem_expr (str_rtx, 0);
3597 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3598 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
3600 value = expand_and (str_mode, value, const1_rtx, NULL);
3603 value = expand_shift (LSHIFT_EXPR, str_mode, value,
3604 build_int_cst (NULL_TREE, bitpos),
3606 result = expand_binop (str_mode, binop, str_rtx,
3607 value, str_rtx, 1, OPTAB_WIDEN);
3608 if (result != str_rtx)
3609 emit_move_insn (str_rtx, result);
3620 /* Expand an assignment that stores the value of FROM into TO. */
3623 expand_assignment (tree to, tree from)
3628 /* Don't crash if the lhs of the assignment was erroneous. */
3630 if (TREE_CODE (to) == ERROR_MARK)
3632 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3636 /* Assignment of a structure component needs special treatment
3637 if the structure component's rtx is not simply a MEM.
3638 Assignment of an array element at a constant index, and assignment of
3639 an array element in an unaligned packed structure field, has the same
3641 if (handled_component_p (to)
3642 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3644 enum machine_mode mode1;
3645 HOST_WIDE_INT bitsize, bitpos;
3653 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3654 &unsignedp, &volatilep);
3656 /* If we are going to use store_bit_field and extract_bit_field,
3657 make sure to_rtx will be safe for multiple use. */
3659 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3663 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3665 gcc_assert (MEM_P (to_rtx));
3667 #ifdef POINTERS_EXTEND_UNSIGNED
3668 if (GET_MODE (offset_rtx) != Pmode)
3669 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3671 if (GET_MODE (offset_rtx) != ptr_mode)
3672 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3675 /* A constant address in TO_RTX can have VOIDmode, we must not try
3676 to call force_reg for that case. Avoid that case. */
3678 && GET_MODE (to_rtx) == BLKmode
3679 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3681 && (bitpos % bitsize) == 0
3682 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3683 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3685 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3689 to_rtx = offset_address (to_rtx, offset_rtx,
3690 highest_pow2_factor_for_target (to,
3694 /* Handle expand_expr of a complex value returning a CONCAT. */
3695 if (GET_CODE (to_rtx) == CONCAT)
3697 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
3698 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false);
3704 /* If the field is at offset zero, we could have been given the
3705 DECL_RTX of the parent struct. Don't munge it. */
3706 to_rtx = shallow_copy_rtx (to_rtx);
3708 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3710 /* Deal with volatile and readonly fields. The former is only
3711 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3713 MEM_VOLATILE_P (to_rtx) = 1;
3715 if (!can_address_p (to))
3716 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3719 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
3723 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3724 TREE_TYPE (tem), get_alias_set (to));
3728 preserve_temp_slots (result);
3734 /* If the rhs is a function call and its value is not an aggregate,
3735 call the function before we start to compute the lhs.
3736 This is needed for correct code for cases such as
3737 val = setjmp (buf) on machines where reference to val
3738 requires loading up part of an address in a separate insn.
3740 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3741 since it might be a promoted variable where the zero- or sign- extension
3742 needs to be done. Handling this in the normal way is safe because no
3743 computation is done before the call. */
3744 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3745 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3746 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3747 && REG_P (DECL_RTL (to))))
3752 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3754 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3756 /* Handle calls that return values in multiple non-contiguous locations.
3757 The Irix 6 ABI has examples of this. */
3758 if (GET_CODE (to_rtx) == PARALLEL)
3759 emit_group_load (to_rtx, value, TREE_TYPE (from),
3760 int_size_in_bytes (TREE_TYPE (from)));
3761 else if (GET_MODE (to_rtx) == BLKmode)
3762 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3765 if (POINTER_TYPE_P (TREE_TYPE (to)))
3766 value = convert_memory_address (GET_MODE (to_rtx), value);
3767 emit_move_insn (to_rtx, value);
3769 preserve_temp_slots (to_rtx);
3775 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3776 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3779 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3781 /* Don't move directly into a return register. */
3782 if (TREE_CODE (to) == RESULT_DECL
3783 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3788 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3790 if (GET_CODE (to_rtx) == PARALLEL)
3791 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3792 int_size_in_bytes (TREE_TYPE (from)));
3794 emit_move_insn (to_rtx, temp);
3796 preserve_temp_slots (to_rtx);
3802 /* In case we are returning the contents of an object which overlaps
3803 the place the value is being stored, use a safe function when copying
3804 a value through a pointer into a structure value return block. */
3805 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3806 && current_function_returns_struct
3807 && !current_function_returns_pcc_struct)
3812 size = expr_size (from);
3813 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3815 emit_library_call (memmove_libfunc, LCT_NORMAL,
3816 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3817 XEXP (from_rtx, 0), Pmode,
3818 convert_to_mode (TYPE_MODE (sizetype),
3819 size, TYPE_UNSIGNED (sizetype)),
3820 TYPE_MODE (sizetype));
3822 preserve_temp_slots (to_rtx);
3828 /* Compute FROM and store the value in the rtx we got. */
3831 result = store_expr (from, to_rtx, 0);
3832 preserve_temp_slots (result);
3838 /* Generate code for computing expression EXP,
3839 and storing the value into TARGET.
3841 If the mode is BLKmode then we may return TARGET itself.
3842 It turns out that in BLKmode it doesn't cause a problem.
3843 because C has no operators that could combine two different
3844 assignments into the same BLKmode object with different values
3845 with no sequence point. Will other languages need this to
3848 If CALL_PARAM_P is nonzero, this is a store into a call param on the
3849 stack, and block moves may need to be treated specially. */
3852 store_expr (tree exp, rtx target, int call_param_p)
3855 rtx alt_rtl = NULL_RTX;
3856 int dont_return_target = 0;
3858 if (VOID_TYPE_P (TREE_TYPE (exp)))
3860 /* C++ can generate ?: expressions with a throw expression in one
3861 branch and an rvalue in the other. Here, we resolve attempts to
3862 store the throw expression's nonexistent result. */
3863 gcc_assert (!call_param_p);
3864 expand_expr (exp, const0_rtx, VOIDmode, 0);
3867 if (TREE_CODE (exp) == COMPOUND_EXPR)
3869 /* Perform first part of compound expression, then assign from second
3871 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
3872 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3873 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3875 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3877 /* For conditional expression, get safe form of the target. Then
3878 test the condition, doing the appropriate assignment on either
3879 side. This avoids the creation of unnecessary temporaries.
3880 For non-BLKmode, it is more efficient not to do this. */
3882 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3884 do_pending_stack_adjust ();
3886 jumpifnot (TREE_OPERAND (exp, 0), lab1);
3887 store_expr (TREE_OPERAND (exp, 1), target, call_param_p);
3888 emit_jump_insn (gen_jump (lab2));
3891 store_expr (TREE_OPERAND (exp, 2), target, call_param_p);
3897 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3898 /* If this is a scalar in a register that is stored in a wider mode
3899 than the declared mode, compute the result into its declared mode
3900 and then convert to the wider mode. Our value is the computed
3903 rtx inner_target = 0;
3905 /* We can do the conversion inside EXP, which will often result
3906 in some optimizations. Do the conversion in two steps: first
3907 change the signedness, if needed, then the extend. But don't
3908 do this if the type of EXP is a subtype of something else
3909 since then the conversion might involve more than just
3910 converting modes. */
3911 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
3912 && TREE_TYPE (TREE_TYPE (exp)) == 0
3913 && (!lang_hooks.reduce_bit_field_operations
3914 || (GET_MODE_PRECISION (GET_MODE (target))
3915 == TYPE_PRECISION (TREE_TYPE (exp)))))
3917 if (TYPE_UNSIGNED (TREE_TYPE (exp))
3918 != SUBREG_PROMOTED_UNSIGNED_P (target))
3920 (lang_hooks.types.signed_or_unsigned_type
3921 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
3923 exp = convert (lang_hooks.types.type_for_mode
3924 (GET_MODE (SUBREG_REG (target)),
3925 SUBREG_PROMOTED_UNSIGNED_P (target)),
3928 inner_target = SUBREG_REG (target);
3931 temp = expand_expr (exp, inner_target, VOIDmode,
3932 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
3934 /* If TEMP is a VOIDmode constant, use convert_modes to make
3935 sure that we properly convert it. */
3936 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3938 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3939 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3940 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3941 GET_MODE (target), temp,
3942 SUBREG_PROMOTED_UNSIGNED_P (target));
3945 convert_move (SUBREG_REG (target), temp,
3946 SUBREG_PROMOTED_UNSIGNED_P (target));
3952 temp = expand_expr_real (exp, target, GET_MODE (target),
3954 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
3956 /* Return TARGET if it's a specified hardware register.
3957 If TARGET is a volatile mem ref, either return TARGET
3958 or return a reg copied *from* TARGET; ANSI requires this.
3960 Otherwise, if TEMP is not TARGET, return TEMP
3961 if it is constant (for efficiency),
3962 or if we really want the correct value. */
3963 if (!(target && REG_P (target)
3964 && REGNO (target) < FIRST_PSEUDO_REGISTER)
3965 && !(MEM_P (target) && MEM_VOLATILE_P (target))
3966 && ! rtx_equal_p (temp, target)
3967 && CONSTANT_P (temp))
3968 dont_return_target = 1;
3971 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3972 the same as that of TARGET, adjust the constant. This is needed, for
3973 example, in case it is a CONST_DOUBLE and we want only a word-sized
3975 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
3976 && TREE_CODE (exp) != ERROR_MARK
3977 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3978 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3979 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
3981 /* If value was not generated in the target, store it there.
3982 Convert the value to TARGET's type first if necessary and emit the
3983 pending incrementations that have been queued when expanding EXP.
3984 Note that we cannot emit the whole queue blindly because this will
3985 effectively disable the POST_INC optimization later.
3987 If TEMP and TARGET compare equal according to rtx_equal_p, but
3988 one or both of them are volatile memory refs, we have to distinguish
3990 - expand_expr has used TARGET. In this case, we must not generate
3991 another copy. This can be detected by TARGET being equal according
3993 - expand_expr has not used TARGET - that means that the source just
3994 happens to have the same RTX form. Since temp will have been created
3995 by expand_expr, it will compare unequal according to == .
3996 We must generate a copy in this case, to reach the correct number
3997 of volatile memory references. */
3999 if ((! rtx_equal_p (temp, target)
4000 || (temp != target && (side_effects_p (temp)
4001 || side_effects_p (target))))
4002 && TREE_CODE (exp) != ERROR_MARK
4003 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4004 but TARGET is not valid memory reference, TEMP will differ
4005 from TARGET although it is really the same location. */
4006 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4007 /* If there's nothing to copy, don't bother. Don't call expr_size
4008 unless necessary, because some front-ends (C++) expr_size-hook
4009 aborts on objects that are not supposed to be bit-copied or
4011 && expr_size (exp) != const0_rtx)
4013 if (GET_MODE (temp) != GET_MODE (target)
4014 && GET_MODE (temp) != VOIDmode)
4016 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4017 if (dont_return_target)
4019 /* In this case, we will return TEMP,
4020 so make sure it has the proper mode.
4021 But don't forget to store the value into TARGET. */
4022 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4023 emit_move_insn (target, temp);
4026 convert_move (target, temp, unsignedp);
4029 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4031 /* Handle copying a string constant into an array. The string
4032 constant may be shorter than the array. So copy just the string's
4033 actual length, and clear the rest. First get the size of the data
4034 type of the string, which is actually the size of the target. */
4035 rtx size = expr_size (exp);
4037 if (GET_CODE (size) == CONST_INT
4038 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4039 emit_block_move (target, temp, size,
4041 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4044 /* Compute the size of the data to copy from the string. */
4046 = size_binop (MIN_EXPR,
4047 make_tree (sizetype, size),
4048 size_int (TREE_STRING_LENGTH (exp)));
4050 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4052 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4055 /* Copy that much. */
4056 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4057 TYPE_UNSIGNED (sizetype));
4058 emit_block_move (target, temp, copy_size_rtx,
4060 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4062 /* Figure out how much is left in TARGET that we have to clear.
4063 Do all calculations in ptr_mode. */
4064 if (GET_CODE (copy_size_rtx) == CONST_INT)
4066 size = plus_constant (size, -INTVAL (copy_size_rtx));
4067 target = adjust_address (target, BLKmode,
4068 INTVAL (copy_size_rtx));
4072 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4073 copy_size_rtx, NULL_RTX, 0,
4076 #ifdef POINTERS_EXTEND_UNSIGNED
4077 if (GET_MODE (copy_size_rtx) != Pmode)
4078 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4079 TYPE_UNSIGNED (sizetype));
4082 target = offset_address (target, copy_size_rtx,
4083 highest_pow2_factor (copy_size));
4084 label = gen_label_rtx ();
4085 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4086 GET_MODE (size), 0, label);
4089 if (size != const0_rtx)
4090 clear_storage (target, size);
4096 /* Handle calls that return values in multiple non-contiguous locations.
4097 The Irix 6 ABI has examples of this. */
4098 else if (GET_CODE (target) == PARALLEL)
4099 emit_group_load (target, temp, TREE_TYPE (exp),
4100 int_size_in_bytes (TREE_TYPE (exp)));
4101 else if (GET_MODE (temp) == BLKmode)
4102 emit_block_move (target, temp, expr_size (exp),
4104 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4107 temp = force_operand (temp, target);
4109 emit_move_insn (target, temp);
4116 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4117 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4118 are set to non-constant values and place it in *P_NC_ELTS. */
4121 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4122 HOST_WIDE_INT *p_nc_elts)
4124 HOST_WIDE_INT nz_elts, nc_elts;
4130 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4132 tree value = TREE_VALUE (list);
4133 tree purpose = TREE_PURPOSE (list);
4137 if (TREE_CODE (purpose) == RANGE_EXPR)
4139 tree lo_index = TREE_OPERAND (purpose, 0);
4140 tree hi_index = TREE_OPERAND (purpose, 1);
4142 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4143 mult = (tree_low_cst (hi_index, 1)
4144 - tree_low_cst (lo_index, 1) + 1);
4147 switch (TREE_CODE (value))
4151 HOST_WIDE_INT nz = 0, nc = 0;
4152 categorize_ctor_elements_1 (value, &nz, &nc);
4153 nz_elts += mult * nz;
4154 nc_elts += mult * nc;
4160 if (!initializer_zerop (value))
4165 nz_elts += mult * TREE_STRING_LENGTH (value);
4169 if (!initializer_zerop (TREE_REALPART (value)))
4171 if (!initializer_zerop (TREE_IMAGPART (value)))
4178 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4179 if (!initializer_zerop (TREE_VALUE (v)))
4186 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4192 *p_nz_elts += nz_elts;
4193 *p_nc_elts += nc_elts;
4197 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4198 HOST_WIDE_INT *p_nc_elts)
4202 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4205 /* Count the number of scalars in TYPE. Return -1 on overflow or
4209 count_type_elements (tree type)
4211 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4212 switch (TREE_CODE (type))
4216 tree telts = array_type_nelts (type);
4217 if (telts && host_integerp (telts, 1))
4219 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4220 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4223 else if (max / n > m)
4231 HOST_WIDE_INT n = 0, t;
4234 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4235 if (TREE_CODE (f) == FIELD_DECL)
4237 t = count_type_elements (TREE_TYPE (f));
4247 case QUAL_UNION_TYPE:
4249 /* Ho hum. How in the world do we guess here? Clearly it isn't
4250 right to count the fields. Guess based on the number of words. */
4251 HOST_WIDE_INT n = int_size_in_bytes (type);
4254 return n / UNITS_PER_WORD;
4261 return TYPE_VECTOR_SUBPARTS (type);
4270 case REFERENCE_TYPE:
4284 /* Return 1 if EXP contains mostly (3/4) zeros. */
4287 mostly_zeros_p (tree exp)
4289 if (TREE_CODE (exp) == CONSTRUCTOR)
4292 HOST_WIDE_INT nz_elts, nc_elts, elts;
4294 /* If there are no ranges of true bits, it is all zero. */
4295 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4296 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4298 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4299 elts = count_type_elements (TREE_TYPE (exp));
4301 return nz_elts < elts / 4;
4304 return initializer_zerop (exp);
4307 /* Helper function for store_constructor.
4308 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4309 TYPE is the type of the CONSTRUCTOR, not the element type.
4310 CLEARED is as for store_constructor.
4311 ALIAS_SET is the alias set to use for any stores.
4313 This provides a recursive shortcut back to store_constructor when it isn't
4314 necessary to go through store_field. This is so that we can pass through
4315 the cleared field to let store_constructor know that we may not have to
4316 clear a substructure if the outer structure has already been cleared. */
4319 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4320 HOST_WIDE_INT bitpos, enum machine_mode mode,
4321 tree exp, tree type, int cleared, int alias_set)
4323 if (TREE_CODE (exp) == CONSTRUCTOR
4324 /* We can only call store_constructor recursively if the size and
4325 bit position are on a byte boundary. */
4326 && bitpos % BITS_PER_UNIT == 0
4327 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4328 /* If we have a nonzero bitpos for a register target, then we just
4329 let store_field do the bitfield handling. This is unlikely to
4330 generate unnecessary clear instructions anyways. */
4331 && (bitpos == 0 || MEM_P (target)))
4335 = adjust_address (target,
4336 GET_MODE (target) == BLKmode
4338 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4339 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4342 /* Update the alias set, if required. */
4343 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4344 && MEM_ALIAS_SET (target) != 0)
4346 target = copy_rtx (target);
4347 set_mem_alias_set (target, alias_set);
4350 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4353 store_field (target, bitsize, bitpos, mode, exp, type, alias_set);
4356 /* Store the value of constructor EXP into the rtx TARGET.
4357 TARGET is either a REG or a MEM; we know it cannot conflict, since
4358 safe_from_p has been called.
4359 CLEARED is true if TARGET is known to have been zero'd.
4360 SIZE is the number of bytes of TARGET we are allowed to modify: this
4361 may not be the same as the size of EXP if we are assigning to a field
4362 which has been packed to exclude padding bits. */
4365 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4367 tree type = TREE_TYPE (exp);
4368 #ifdef WORD_REGISTER_OPERATIONS
4369 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4372 switch (TREE_CODE (type))
4376 case QUAL_UNION_TYPE:
4380 /* If size is zero or the target is already cleared, do nothing. */
4381 if (size == 0 || cleared)
4383 /* We either clear the aggregate or indicate the value is dead. */
4384 else if ((TREE_CODE (type) == UNION_TYPE
4385 || TREE_CODE (type) == QUAL_UNION_TYPE)
4386 && ! CONSTRUCTOR_ELTS (exp))
4387 /* If the constructor is empty, clear the union. */
4389 clear_storage (target, expr_size (exp));
4393 /* If we are building a static constructor into a register,
4394 set the initial value as zero so we can fold the value into
4395 a constant. But if more than one register is involved,
4396 this probably loses. */
4397 else if (REG_P (target) && TREE_STATIC (exp)
4398 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4400 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4404 /* If the constructor has fewer fields than the structure or
4405 if we are initializing the structure to mostly zeros, clear
4406 the whole structure first. Don't do this if TARGET is a
4407 register whose mode size isn't equal to SIZE since
4408 clear_storage can't handle this case. */
4410 && ((list_length (CONSTRUCTOR_ELTS (exp))
4411 != fields_length (type))
4412 || mostly_zeros_p (exp))
4414 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4417 clear_storage (target, GEN_INT (size));
4422 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4424 /* Store each element of the constructor into the
4425 corresponding field of TARGET. */
4427 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4429 tree field = TREE_PURPOSE (elt);
4430 tree value = TREE_VALUE (elt);
4431 enum machine_mode mode;
4432 HOST_WIDE_INT bitsize;
4433 HOST_WIDE_INT bitpos = 0;
4435 rtx to_rtx = target;
4437 /* Just ignore missing fields. We cleared the whole
4438 structure, above, if any fields are missing. */
4442 if (cleared && initializer_zerop (value))
4445 if (host_integerp (DECL_SIZE (field), 1))
4446 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4450 mode = DECL_MODE (field);
4451 if (DECL_BIT_FIELD (field))
4454 offset = DECL_FIELD_OFFSET (field);
4455 if (host_integerp (offset, 0)
4456 && host_integerp (bit_position (field), 0))
4458 bitpos = int_bit_position (field);
4462 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4469 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4470 make_tree (TREE_TYPE (exp),
4473 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4474 gcc_assert (MEM_P (to_rtx));
4476 #ifdef POINTERS_EXTEND_UNSIGNED
4477 if (GET_MODE (offset_rtx) != Pmode)
4478 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4480 if (GET_MODE (offset_rtx) != ptr_mode)
4481 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4484 to_rtx = offset_address (to_rtx, offset_rtx,
4485 highest_pow2_factor (offset));
4488 #ifdef WORD_REGISTER_OPERATIONS
4489 /* If this initializes a field that is smaller than a
4490 word, at the start of a word, try to widen it to a full
4491 word. This special case allows us to output C++ member
4492 function initializations in a form that the optimizers
4495 && bitsize < BITS_PER_WORD
4496 && bitpos % BITS_PER_WORD == 0
4497 && GET_MODE_CLASS (mode) == MODE_INT
4498 && TREE_CODE (value) == INTEGER_CST
4500 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4502 tree type = TREE_TYPE (value);
4504 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4506 type = lang_hooks.types.type_for_size
4507 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4508 value = convert (type, value);
4511 if (BYTES_BIG_ENDIAN)
4513 = fold (build2 (LSHIFT_EXPR, type, value,
4514 build_int_cst (NULL_TREE,
4515 BITS_PER_WORD - bitsize)));
4516 bitsize = BITS_PER_WORD;
4521 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4522 && DECL_NONADDRESSABLE_P (field))
4524 to_rtx = copy_rtx (to_rtx);
4525 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4528 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4529 value, type, cleared,
4530 get_alias_set (TREE_TYPE (field)));
4540 tree elttype = TREE_TYPE (type);
4542 HOST_WIDE_INT minelt = 0;
4543 HOST_WIDE_INT maxelt = 0;
4545 domain = TYPE_DOMAIN (type);
4546 const_bounds_p = (TYPE_MIN_VALUE (domain)
4547 && TYPE_MAX_VALUE (domain)
4548 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4549 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4551 /* If we have constant bounds for the range of the type, get them. */
4554 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4555 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4558 /* If the constructor has fewer elements than the array, clear
4559 the whole array first. Similarly if this is static
4560 constructor of a non-BLKmode object. */
4563 else if (REG_P (target) && TREE_STATIC (exp))
4567 HOST_WIDE_INT count = 0, zero_count = 0;
4568 need_to_clear = ! const_bounds_p;
4570 /* This loop is a more accurate version of the loop in
4571 mostly_zeros_p (it handles RANGE_EXPR in an index). It
4572 is also needed to check for missing elements. */
4573 for (elt = CONSTRUCTOR_ELTS (exp);
4574 elt != NULL_TREE && ! need_to_clear;
4575 elt = TREE_CHAIN (elt))
4577 tree index = TREE_PURPOSE (elt);
4578 HOST_WIDE_INT this_node_count;
4580 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4582 tree lo_index = TREE_OPERAND (index, 0);
4583 tree hi_index = TREE_OPERAND (index, 1);
4585 if (! host_integerp (lo_index, 1)
4586 || ! host_integerp (hi_index, 1))
4592 this_node_count = (tree_low_cst (hi_index, 1)
4593 - tree_low_cst (lo_index, 1) + 1);
4596 this_node_count = 1;
4598 count += this_node_count;
4599 if (mostly_zeros_p (TREE_VALUE (elt)))
4600 zero_count += this_node_count;
4603 /* Clear the entire array first if there are any missing
4604 elements, or if the incidence of zero elements is >=
4607 && (count < maxelt - minelt + 1
4608 || 4 * zero_count >= 3 * count))
4612 if (need_to_clear && size > 0)
4615 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4617 clear_storage (target, GEN_INT (size));
4621 if (!cleared && REG_P (target))
4622 /* Inform later passes that the old value is dead. */
4623 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4625 /* Store each element of the constructor into the
4626 corresponding element of TARGET, determined by counting the
4628 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4630 elt = TREE_CHAIN (elt), i++)
4632 enum machine_mode mode;
4633 HOST_WIDE_INT bitsize;
4634 HOST_WIDE_INT bitpos;
4636 tree value = TREE_VALUE (elt);
4637 tree index = TREE_PURPOSE (elt);
4638 rtx xtarget = target;
4640 if (cleared && initializer_zerop (value))
4643 unsignedp = TYPE_UNSIGNED (elttype);
4644 mode = TYPE_MODE (elttype);
4645 if (mode == BLKmode)
4646 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4647 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4650 bitsize = GET_MODE_BITSIZE (mode);
4652 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4654 tree lo_index = TREE_OPERAND (index, 0);
4655 tree hi_index = TREE_OPERAND (index, 1);
4656 rtx index_r, pos_rtx;
4657 HOST_WIDE_INT lo, hi, count;
4660 /* If the range is constant and "small", unroll the loop. */
4662 && host_integerp (lo_index, 0)
4663 && host_integerp (hi_index, 0)
4664 && (lo = tree_low_cst (lo_index, 0),
4665 hi = tree_low_cst (hi_index, 0),
4666 count = hi - lo + 1,
4669 || (host_integerp (TYPE_SIZE (elttype), 1)
4670 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4673 lo -= minelt; hi -= minelt;
4674 for (; lo <= hi; lo++)
4676 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4679 && !MEM_KEEP_ALIAS_SET_P (target)
4680 && TREE_CODE (type) == ARRAY_TYPE
4681 && TYPE_NONALIASED_COMPONENT (type))
4683 target = copy_rtx (target);
4684 MEM_KEEP_ALIAS_SET_P (target) = 1;
4687 store_constructor_field
4688 (target, bitsize, bitpos, mode, value, type, cleared,
4689 get_alias_set (elttype));
4694 rtx loop_start = gen_label_rtx ();
4695 rtx loop_end = gen_label_rtx ();
4698 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4699 unsignedp = TYPE_UNSIGNED (domain);
4701 index = build_decl (VAR_DECL, NULL_TREE, domain);
4704 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4706 SET_DECL_RTL (index, index_r);
4707 store_expr (lo_index, index_r, 0);
4709 /* Build the head of the loop. */
4710 do_pending_stack_adjust ();
4711 emit_label (loop_start);
4713 /* Assign value to element index. */
4715 = convert (ssizetype,
4716 fold (build2 (MINUS_EXPR, TREE_TYPE (index),
4717 index, TYPE_MIN_VALUE (domain))));
4718 position = size_binop (MULT_EXPR, position,
4720 TYPE_SIZE_UNIT (elttype)));
4722 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4723 xtarget = offset_address (target, pos_rtx,
4724 highest_pow2_factor (position));
4725 xtarget = adjust_address (xtarget, mode, 0);
4726 if (TREE_CODE (value) == CONSTRUCTOR)
4727 store_constructor (value, xtarget, cleared,
4728 bitsize / BITS_PER_UNIT);
4730 store_expr (value, xtarget, 0);
4732 /* Generate a conditional jump to exit the loop. */
4733 exit_cond = build2 (LT_EXPR, integer_type_node,
4735 jumpif (exit_cond, loop_end);
4737 /* Update the loop counter, and jump to the head of
4739 expand_assignment (index,
4740 build2 (PLUS_EXPR, TREE_TYPE (index),
4741 index, integer_one_node));
4743 emit_jump (loop_start);
4745 /* Build the end of the loop. */
4746 emit_label (loop_end);
4749 else if ((index != 0 && ! host_integerp (index, 0))
4750 || ! host_integerp (TYPE_SIZE (elttype), 1))
4755 index = ssize_int (1);
4758 index = fold_convert (ssizetype,
4759 fold (build2 (MINUS_EXPR,
4762 TYPE_MIN_VALUE (domain))));
4764 position = size_binop (MULT_EXPR, index,
4766 TYPE_SIZE_UNIT (elttype)));
4767 xtarget = offset_address (target,
4768 expand_expr (position, 0, VOIDmode, 0),
4769 highest_pow2_factor (position));
4770 xtarget = adjust_address (xtarget, mode, 0);
4771 store_expr (value, xtarget, 0);
4776 bitpos = ((tree_low_cst (index, 0) - minelt)
4777 * tree_low_cst (TYPE_SIZE (elttype), 1));
4779 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4781 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
4782 && TREE_CODE (type) == ARRAY_TYPE
4783 && TYPE_NONALIASED_COMPONENT (type))
4785 target = copy_rtx (target);
4786 MEM_KEEP_ALIAS_SET_P (target) = 1;
4788 store_constructor_field (target, bitsize, bitpos, mode, value,
4789 type, cleared, get_alias_set (elttype));
4801 tree elttype = TREE_TYPE (type);
4802 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
4803 enum machine_mode eltmode = TYPE_MODE (elttype);
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4809 gcc_assert (eltmode != BLKmode);
4811 n_elts = TYPE_VECTOR_SUBPARTS (type);
4812 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4814 enum machine_mode mode = GET_MODE (target);
4816 icode = (int) vec_init_optab->handlers[mode].insn_code;
4817 if (icode != CODE_FOR_nothing)
4821 vector = alloca (n_elts);
4822 for (i = 0; i < n_elts; i++)
4823 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4827 /* If the constructor has fewer elements than the vector,
4828 clear the whole array first. Similarly if this is static
4829 constructor of a non-BLKmode object. */
4832 else if (REG_P (target) && TREE_STATIC (exp))
4836 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
4838 for (elt = CONSTRUCTOR_ELTS (exp);
4840 elt = TREE_CHAIN (elt))
4842 int n_elts_here = tree_low_cst
4843 (int_const_binop (TRUNC_DIV_EXPR,
4844 TYPE_SIZE (TREE_TYPE (TREE_VALUE (elt))),
4845 TYPE_SIZE (elttype), 0), 1);
4847 count += n_elts_here;
4848 if (mostly_zeros_p (TREE_VALUE (elt)))
4849 zero_count += n_elts_here;
4852 /* Clear the entire vector first if there are any missing elements,
4853 or if the incidence of zero elements is >= 75%. */
4854 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
4857 if (need_to_clear && size > 0 && !vector)
4860 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4862 clear_storage (target, GEN_INT (size));
4866 if (!cleared && REG_P (target))
4867 /* Inform later passes that the old value is dead. */
4868 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4870 /* Store each element of the constructor into the corresponding
4871 element of TARGET, determined by counting the elements. */
4872 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4874 elt = TREE_CHAIN (elt), i += bitsize / elt_size)
4876 tree value = TREE_VALUE (elt);
4877 tree index = TREE_PURPOSE (elt);
4878 HOST_WIDE_INT eltpos;
4880 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
4881 if (cleared && initializer_zerop (value))
4885 eltpos = tree_low_cst (index, 1);
4891 /* Vector CONSTRUCTORs should only be built from smaller
4892 vectors in the case of BLKmode vectors. */
4893 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
4894 vector[eltpos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4898 enum machine_mode value_mode =
4899 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
4900 ? TYPE_MODE (TREE_TYPE (value))
4902 bitpos = eltpos * elt_size;
4903 store_constructor_field (target, bitsize, bitpos,
4904 value_mode, value, type,
4905 cleared, get_alias_set (elttype));
4910 emit_insn (GEN_FCN (icode)
4912 gen_rtx_PARALLEL (GET_MODE (target),
4913 gen_rtvec_v (n_elts, vector))));
4917 /* Set constructor assignments. */
4920 tree elt = CONSTRUCTOR_ELTS (exp);
4921 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4922 tree domain = TYPE_DOMAIN (type);
4923 tree domain_min, domain_max, bitlength;
4925 /* The default implementation strategy is to extract the
4926 constant parts of the constructor, use that to initialize
4927 the target, and then "or" in whatever non-constant ranges
4928 we need in addition.
4930 If a large set is all zero or all ones, it is probably
4931 better to set it using memset. Also, if a large set has
4932 just a single range, it may also be better to first clear
4933 all the first clear the set (using memset), and set the
4936 /* Check for all zeros. */
4937 if (elt == NULL_TREE && size > 0)
4940 clear_storage (target, GEN_INT (size));
4944 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4945 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4946 bitlength = size_binop (PLUS_EXPR,
4947 size_diffop (domain_max, domain_min),
4950 nbits = tree_low_cst (bitlength, 1);
4952 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets
4953 that are "complicated" (more than one range), initialize
4954 (the constant parts) by copying from a constant. */
4955 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4956 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4958 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4959 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4960 char *bit_buffer = alloca (nbits);
4961 HOST_WIDE_INT word = 0;
4962 unsigned int bit_pos = 0;
4963 unsigned int ibit = 0;
4964 unsigned int offset = 0; /* In bytes from beginning of set. */
4966 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4969 if (bit_buffer[ibit])
4971 if (BYTES_BIG_ENDIAN)
4972 word |= (1 << (set_word_size - 1 - bit_pos));
4974 word |= 1 << bit_pos;
4978 if (bit_pos >= set_word_size || ibit == nbits)
4980 if (word != 0 || ! cleared)
4982 rtx datum = gen_int_mode (word, mode);
4985 /* The assumption here is that it is safe to
4986 use XEXP if the set is multi-word, but not
4987 if it's single-word. */
4989 to_rtx = adjust_address (target, mode, offset);
4992 gcc_assert (!offset);
4995 emit_move_insn (to_rtx, datum);
5002 offset += set_word_size / BITS_PER_UNIT;
5007 /* Don't bother clearing storage if the set is all ones. */
5008 if (TREE_CHAIN (elt) != NULL_TREE
5009 || (TREE_PURPOSE (elt) == NULL_TREE
5011 : ( ! host_integerp (TREE_VALUE (elt), 0)
5012 || ! host_integerp (TREE_PURPOSE (elt), 0)
5013 || (tree_low_cst (TREE_VALUE (elt), 0)
5014 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5015 != (HOST_WIDE_INT) nbits))))
5016 clear_storage (target, expr_size (exp));
5018 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5020 /* Start of range of element or NULL. */
5021 tree startbit = TREE_PURPOSE (elt);
5022 /* End of range of element, or element value. */
5023 tree endbit = TREE_VALUE (elt);
5024 HOST_WIDE_INT startb, endb;
5025 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5027 bitlength_rtx = expand_expr (bitlength,
5028 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5030 /* Handle non-range tuple element like [ expr ]. */
5031 if (startbit == NULL_TREE)
5033 startbit = save_expr (endbit);
5037 startbit = convert (sizetype, startbit);
5038 endbit = convert (sizetype, endbit);
5039 if (! integer_zerop (domain_min))
5041 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5042 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5044 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5045 EXPAND_CONST_ADDRESS);
5046 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5047 EXPAND_CONST_ADDRESS);
5053 ((build_qualified_type (lang_hooks.types.type_for_mode
5054 (GET_MODE (target), 0),
5057 emit_move_insn (targetx, target);
5062 gcc_assert (MEM_P (target));
5066 /* Optimization: If startbit and endbit are constants divisible
5067 by BITS_PER_UNIT, call memset instead. */
5068 if (TREE_CODE (startbit) == INTEGER_CST
5069 && TREE_CODE (endbit) == INTEGER_CST
5070 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5071 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5073 emit_library_call (memset_libfunc, LCT_NORMAL,
5075 plus_constant (XEXP (targetx, 0),
5076 startb / BITS_PER_UNIT),
5078 constm1_rtx, TYPE_MODE (integer_type_node),
5079 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5080 TYPE_MODE (sizetype));
5083 emit_library_call (setbits_libfunc, LCT_NORMAL,
5084 VOIDmode, 4, XEXP (targetx, 0),
5085 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5086 startbit_rtx, TYPE_MODE (sizetype),
5087 endbit_rtx, TYPE_MODE (sizetype));
5090 emit_move_insn (target, targetx);
5099 /* Store the value of EXP (an expression tree)
5100 into a subfield of TARGET which has mode MODE and occupies
5101 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5102 If MODE is VOIDmode, it means that we are storing into a bit-field.
5104 Always return const0_rtx unless we have something particular to
5107 TYPE is the type of the underlying object,
5109 ALIAS_SET is the alias set for the destination. This value will
5110 (in general) be different from that for TARGET, since TARGET is a
5111 reference to the containing structure. */
5114 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5115 enum machine_mode mode, tree exp, tree type, int alias_set)
5117 HOST_WIDE_INT width_mask = 0;
5119 if (TREE_CODE (exp) == ERROR_MARK)
5122 /* If we have nothing to store, do nothing unless the expression has
5125 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5126 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5127 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5129 /* If we are storing into an unaligned field of an aligned union that is
5130 in a register, we may have the mode of TARGET being an integer mode but
5131 MODE == BLKmode. In that case, get an aligned object whose size and
5132 alignment are the same as TARGET and store TARGET into it (we can avoid
5133 the store if the field being stored is the entire width of TARGET). Then
5134 call ourselves recursively to store the field into a BLKmode version of
5135 that object. Finally, load from the object into TARGET. This is not
5136 very efficient in general, but should only be slightly more expensive
5137 than the otherwise-required unaligned accesses. Perhaps this can be
5138 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5139 twice, once with emit_move_insn and once via store_field. */
5142 && (REG_P (target) || GET_CODE (target) == SUBREG))
5144 rtx object = assign_temp (type, 0, 1, 1);
5145 rtx blk_object = adjust_address (object, BLKmode, 0);
5147 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5148 emit_move_insn (object, target);
5150 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set);
5152 emit_move_insn (target, object);
5154 /* We want to return the BLKmode version of the data. */
5158 if (GET_CODE (target) == CONCAT)
5160 /* We're storing into a struct containing a single __complex. */
5162 gcc_assert (!bitpos);
5163 return store_expr (exp, target, 0);
5166 /* If the structure is in a register or if the component
5167 is a bit field, we cannot use addressing to access it.
5168 Use bit-field techniques or SUBREG to store in it. */
5170 if (mode == VOIDmode
5171 || (mode != BLKmode && ! direct_store[(int) mode]
5172 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5173 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5175 || GET_CODE (target) == SUBREG
5176 /* If the field isn't aligned enough to store as an ordinary memref,
5177 store it as a bit field. */
5179 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5180 || bitpos % GET_MODE_ALIGNMENT (mode))
5181 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5182 || (bitpos % BITS_PER_UNIT != 0)))
5183 /* If the RHS and field are a constant size and the size of the
5184 RHS isn't the same size as the bitfield, we must use bitfield
5187 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5188 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5190 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5192 /* If BITSIZE is narrower than the size of the type of EXP
5193 we will be narrowing TEMP. Normally, what's wanted are the
5194 low-order bits. However, if EXP's type is a record and this is
5195 big-endian machine, we want the upper BITSIZE bits. */
5196 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5197 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5198 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5199 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5200 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5204 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5206 if (mode != VOIDmode && mode != BLKmode
5207 && mode != TYPE_MODE (TREE_TYPE (exp)))
5208 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5210 /* If the modes of TARGET and TEMP are both BLKmode, both
5211 must be in memory and BITPOS must be aligned on a byte
5212 boundary. If so, we simply do a block copy. */
5213 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5215 gcc_assert (MEM_P (target) && MEM_P (temp)
5216 && !(bitpos % BITS_PER_UNIT));
5218 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5219 emit_block_move (target, temp,
5220 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5227 /* Store the value in the bitfield. */
5228 store_bit_field (target, bitsize, bitpos, mode, temp);
5234 /* Now build a reference to just the desired component. */
5235 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5237 if (to_rtx == target)
5238 to_rtx = copy_rtx (to_rtx);
5240 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5241 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5242 set_mem_alias_set (to_rtx, alias_set);
5244 return store_expr (exp, to_rtx, 0);
5248 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5249 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5250 codes and find the ultimate containing object, which we return.
5252 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5253 bit position, and *PUNSIGNEDP to the signedness of the field.
5254 If the position of the field is variable, we store a tree
5255 giving the variable offset (in units) in *POFFSET.
5256 This offset is in addition to the bit position.
5257 If the position is not variable, we store 0 in *POFFSET.
5259 If any of the extraction expressions is volatile,
5260 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5262 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5263 is a mode that can be used to access the field. In that case, *PBITSIZE
5266 If the field describes a variable-sized object, *PMODE is set to
5267 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5268 this case, but the address of the object can be found. */
5271 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5272 HOST_WIDE_INT *pbitpos, tree *poffset,
5273 enum machine_mode *pmode, int *punsignedp,
5277 enum machine_mode mode = VOIDmode;
5278 tree offset = size_zero_node;
5279 tree bit_offset = bitsize_zero_node;
5282 /* First get the mode, signedness, and size. We do this from just the
5283 outermost expression. */
5284 if (TREE_CODE (exp) == COMPONENT_REF)
5286 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5287 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5288 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5290 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5292 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5294 size_tree = TREE_OPERAND (exp, 1);
5295 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5299 mode = TYPE_MODE (TREE_TYPE (exp));
5300 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5302 if (mode == BLKmode)
5303 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5305 *pbitsize = GET_MODE_BITSIZE (mode);
5310 if (! host_integerp (size_tree, 1))
5311 mode = BLKmode, *pbitsize = -1;
5313 *pbitsize = tree_low_cst (size_tree, 1);
5316 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5317 and find the ultimate containing object. */
5320 switch (TREE_CODE (exp))
5323 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5324 TREE_OPERAND (exp, 2));
5329 tree field = TREE_OPERAND (exp, 1);
5330 tree this_offset = component_ref_field_offset (exp);
5332 /* If this field hasn't been filled in yet, don't go past it.
5333 This should only happen when folding expressions made during
5334 type construction. */
5335 if (this_offset == 0)
5338 offset = size_binop (PLUS_EXPR, offset, this_offset);
5339 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5340 DECL_FIELD_BIT_OFFSET (field));
5342 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5347 case ARRAY_RANGE_REF:
5349 tree index = TREE_OPERAND (exp, 1);
5350 tree low_bound = array_ref_low_bound (exp);
5351 tree unit_size = array_ref_element_size (exp);
5353 /* We assume all arrays have sizes that are a multiple of a byte.
5354 First subtract the lower bound, if any, in the type of the
5355 index, then convert to sizetype and multiply by the size of
5356 the array element. */
5357 if (! integer_zerop (low_bound))
5358 index = fold (build2 (MINUS_EXPR, TREE_TYPE (index),
5361 offset = size_binop (PLUS_EXPR, offset,
5362 size_binop (MULT_EXPR,
5363 convert (sizetype, index),
5369 bit_offset = bitsize_zero_node;
5373 bit_offset = build_int_cst (bitsizetype, *pbitsize);
5376 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5377 conversions that don't change the mode, and all view conversions
5378 except those that need to "step up" the alignment. */
5380 case NON_LVALUE_EXPR:
5385 if (TYPE_MODE (TREE_TYPE (exp))
5386 != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5390 case VIEW_CONVERT_EXPR:
5391 if ((TYPE_ALIGN (TREE_TYPE (exp))
5392 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5394 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5395 < BIGGEST_ALIGNMENT)
5396 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5397 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5405 /* If any reference in the chain is volatile, the effect is volatile. */
5406 if (TREE_THIS_VOLATILE (exp))
5409 exp = TREE_OPERAND (exp, 0);
5413 /* If OFFSET is constant, see if we can return the whole thing as a
5414 constant bit position. Otherwise, split it up. */
5415 if (host_integerp (offset, 0)
5416 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5418 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5419 && host_integerp (tem, 0))
5420 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5422 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5428 /* Return a tree of sizetype representing the size, in bytes, of the element
5429 of EXP, an ARRAY_REF. */
5432 array_ref_element_size (tree exp)
5434 tree aligned_size = TREE_OPERAND (exp, 3);
5435 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5437 /* If a size was specified in the ARRAY_REF, it's the size measured
5438 in alignment units of the element type. So multiply by that value. */
5441 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5442 sizetype from another type of the same width and signedness. */
5443 if (TREE_TYPE (aligned_size) != sizetype)
5444 aligned_size = fold_convert (sizetype, aligned_size);
5445 return size_binop (MULT_EXPR, aligned_size,
5446 size_int (TYPE_ALIGN_UNIT (elmt_type)));
5449 /* Otherwise, take the size from that of the element type. Substitute
5450 any PLACEHOLDER_EXPR that we have. */
5452 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5455 /* Return a tree representing the lower bound of the array mentioned in
5456 EXP, an ARRAY_REF. */
5459 array_ref_low_bound (tree exp)
5461 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5463 /* If a lower bound is specified in EXP, use it. */
5464 if (TREE_OPERAND (exp, 2))
5465 return TREE_OPERAND (exp, 2);
5467 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5468 substituting for a PLACEHOLDER_EXPR as needed. */
5469 if (domain_type && TYPE_MIN_VALUE (domain_type))
5470 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5472 /* Otherwise, return a zero of the appropriate type. */
5473 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
5476 /* Return a tree representing the upper bound of the array mentioned in
5477 EXP, an ARRAY_REF. */
5480 array_ref_up_bound (tree exp)
5482 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5484 /* If there is a domain type and it has an upper bound, use it, substituting
5485 for a PLACEHOLDER_EXPR as needed. */
5486 if (domain_type && TYPE_MAX_VALUE (domain_type))
5487 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5489 /* Otherwise fail. */
5493 /* Return a tree representing the offset, in bytes, of the field referenced
5494 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5497 component_ref_field_offset (tree exp)
5499 tree aligned_offset = TREE_OPERAND (exp, 2);
5500 tree field = TREE_OPERAND (exp, 1);
5502 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5503 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5507 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
5508 sizetype from another type of the same width and signedness. */
5509 if (TREE_TYPE (aligned_offset) != sizetype)
5510 aligned_offset = fold_convert (sizetype, aligned_offset);
5511 return size_binop (MULT_EXPR, aligned_offset,
5512 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5515 /* Otherwise, take the offset from that of the field. Substitute
5516 any PLACEHOLDER_EXPR that we have. */
5518 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5521 /* Return 1 if T is an expression that get_inner_reference handles. */
5524 handled_component_p (tree t)
5526 switch (TREE_CODE (t))
5531 case ARRAY_RANGE_REF:
5532 case NON_LVALUE_EXPR:
5533 case VIEW_CONVERT_EXPR:
5538 /* ??? Sure they are handled, but get_inner_reference may return
5539 a different PBITSIZE, depending upon whether the expression is
5540 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5543 return (TYPE_MODE (TREE_TYPE (t))
5544 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5551 /* Given an rtx VALUE that may contain additions and multiplications, return
5552 an equivalent value that just refers to a register, memory, or constant.
5553 This is done by generating instructions to perform the arithmetic and
5554 returning a pseudo-register containing the value.
5556 The returned value may be a REG, SUBREG, MEM or constant. */
5559 force_operand (rtx value, rtx target)
5562 /* Use subtarget as the target for operand 0 of a binary operation. */
5563 rtx subtarget = get_subtarget (target);
5564 enum rtx_code code = GET_CODE (value);
5566 /* Check for subreg applied to an expression produced by loop optimizer. */
5568 && !REG_P (SUBREG_REG (value))
5569 && !MEM_P (SUBREG_REG (value)))
5571 value = simplify_gen_subreg (GET_MODE (value),
5572 force_reg (GET_MODE (SUBREG_REG (value)),
5573 force_operand (SUBREG_REG (value),
5575 GET_MODE (SUBREG_REG (value)),
5576 SUBREG_BYTE (value));
5577 code = GET_CODE (value);
5580 /* Check for a PIC address load. */
5581 if ((code == PLUS || code == MINUS)
5582 && XEXP (value, 0) == pic_offset_table_rtx
5583 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5584 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5585 || GET_CODE (XEXP (value, 1)) == CONST))
5588 subtarget = gen_reg_rtx (GET_MODE (value));
5589 emit_move_insn (subtarget, value);
5593 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5596 target = gen_reg_rtx (GET_MODE (value));
5597 convert_move (target, force_operand (XEXP (value, 0), NULL),
5598 code == ZERO_EXTEND);
5602 if (ARITHMETIC_P (value))
5604 op2 = XEXP (value, 1);
5605 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5607 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5610 op2 = negate_rtx (GET_MODE (value), op2);
5613 /* Check for an addition with OP2 a constant integer and our first
5614 operand a PLUS of a virtual register and something else. In that
5615 case, we want to emit the sum of the virtual register and the
5616 constant first and then add the other value. This allows virtual
5617 register instantiation to simply modify the constant rather than
5618 creating another one around this addition. */
5619 if (code == PLUS && GET_CODE (op2) == CONST_INT
5620 && GET_CODE (XEXP (value, 0)) == PLUS
5621 && REG_P (XEXP (XEXP (value, 0), 0))
5622 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5623 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5625 rtx temp = expand_simple_binop (GET_MODE (value), code,
5626 XEXP (XEXP (value, 0), 0), op2,
5627 subtarget, 0, OPTAB_LIB_WIDEN);
5628 return expand_simple_binop (GET_MODE (value), code, temp,
5629 force_operand (XEXP (XEXP (value,
5631 target, 0, OPTAB_LIB_WIDEN);
5634 op1 = force_operand (XEXP (value, 0), subtarget);
5635 op2 = force_operand (op2, NULL_RTX);
5639 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5641 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5642 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5643 target, 1, OPTAB_LIB_WIDEN);
5645 return expand_divmod (0,
5646 FLOAT_MODE_P (GET_MODE (value))
5647 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5648 GET_MODE (value), op1, op2, target, 0);
5651 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5655 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5659 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5663 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5664 target, 0, OPTAB_LIB_WIDEN);
5667 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5668 target, 1, OPTAB_LIB_WIDEN);
5671 if (UNARY_P (value))
5673 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5674 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5677 #ifdef INSN_SCHEDULING
5678 /* On machines that have insn scheduling, we want all memory reference to be
5679 explicit, so we need to deal with such paradoxical SUBREGs. */
5680 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5681 && (GET_MODE_SIZE (GET_MODE (value))
5682 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5684 = simplify_gen_subreg (GET_MODE (value),
5685 force_reg (GET_MODE (SUBREG_REG (value)),
5686 force_operand (SUBREG_REG (value),
5688 GET_MODE (SUBREG_REG (value)),
5689 SUBREG_BYTE (value));
5695 /* Subroutine of expand_expr: return nonzero iff there is no way that
5696 EXP can reference X, which is being modified. TOP_P is nonzero if this
5697 call is going to be used to determine whether we need a temporary
5698 for EXP, as opposed to a recursive call to this function.
5700 It is always safe for this routine to return zero since it merely
5701 searches for optimization opportunities. */
5704 safe_from_p (rtx x, tree exp, int top_p)
5710 /* If EXP has varying size, we MUST use a target since we currently
5711 have no way of allocating temporaries of variable size
5712 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5713 So we assume here that something at a higher level has prevented a
5714 clash. This is somewhat bogus, but the best we can do. Only
5715 do this when X is BLKmode and when we are at the top level. */
5716 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5717 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5718 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5719 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5720 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5722 && GET_MODE (x) == BLKmode)
5723 /* If X is in the outgoing argument area, it is always safe. */
5725 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5726 || (GET_CODE (XEXP (x, 0)) == PLUS
5727 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5730 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5731 find the underlying pseudo. */
5732 if (GET_CODE (x) == SUBREG)
5735 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5739 /* Now look at our tree code and possibly recurse. */
5740 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5742 case tcc_declaration:
5743 exp_rtl = DECL_RTL_IF_SET (exp);
5749 case tcc_exceptional:
5750 if (TREE_CODE (exp) == TREE_LIST)
5754 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5756 exp = TREE_CHAIN (exp);
5759 if (TREE_CODE (exp) != TREE_LIST)
5760 return safe_from_p (x, exp, 0);
5763 else if (TREE_CODE (exp) == ERROR_MARK)
5764 return 1; /* An already-visited SAVE_EXPR? */
5769 /* The only case we look at here is the DECL_INITIAL inside a
5771 return (TREE_CODE (exp) != DECL_EXPR
5772 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5773 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5774 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5777 case tcc_comparison:
5778 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5783 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5785 case tcc_expression:
5787 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5788 the expression. If it is set, we conflict iff we are that rtx or
5789 both are in memory. Otherwise, we check all operands of the
5790 expression recursively. */
5792 switch (TREE_CODE (exp))
5795 /* If the operand is static or we are static, we can't conflict.
5796 Likewise if we don't conflict with the operand at all. */
5797 if (staticp (TREE_OPERAND (exp, 0))
5798 || TREE_STATIC (exp)
5799 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5802 /* Otherwise, the only way this can conflict is if we are taking
5803 the address of a DECL a that address if part of X, which is
5805 exp = TREE_OPERAND (exp, 0);
5808 if (!DECL_RTL_SET_P (exp)
5809 || !MEM_P (DECL_RTL (exp)))
5812 exp_rtl = XEXP (DECL_RTL (exp), 0);
5816 case MISALIGNED_INDIRECT_REF:
5817 case ALIGN_INDIRECT_REF:
5820 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5821 get_alias_set (exp)))
5826 /* Assume that the call will clobber all hard registers and
5828 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5833 case WITH_CLEANUP_EXPR:
5834 case CLEANUP_POINT_EXPR:
5835 /* Lowered by gimplify.c. */
5839 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5845 /* If we have an rtx, we do not need to scan our operands. */
5849 nops = first_rtl_op (TREE_CODE (exp));
5850 for (i = 0; i < nops; i++)
5851 if (TREE_OPERAND (exp, i) != 0
5852 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5855 /* If this is a language-specific tree code, it may require
5856 special handling. */
5857 if ((unsigned int) TREE_CODE (exp)
5858 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5859 && !lang_hooks.safe_from_p (x, exp))
5864 /* Should never get a type here. */
5868 /* If we have an rtl, find any enclosed object. Then see if we conflict
5872 if (GET_CODE (exp_rtl) == SUBREG)
5874 exp_rtl = SUBREG_REG (exp_rtl);
5876 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5880 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5881 are memory and they conflict. */
5882 return ! (rtx_equal_p (x, exp_rtl)
5883 || (MEM_P (x) && MEM_P (exp_rtl)
5884 && true_dependence (exp_rtl, VOIDmode, x,
5885 rtx_addr_varies_p)));
5888 /* If we reach here, it is safe. */
5893 /* Return the highest power of two that EXP is known to be a multiple of.
5894 This is used in updating alignment of MEMs in array references. */
5896 static unsigned HOST_WIDE_INT
5897 highest_pow2_factor (tree exp)
5899 unsigned HOST_WIDE_INT c0, c1;
5901 switch (TREE_CODE (exp))
5904 /* We can find the lowest bit that's a one. If the low
5905 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5906 We need to handle this case since we can find it in a COND_EXPR,
5907 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5908 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5910 if (TREE_CONSTANT_OVERFLOW (exp))
5911 return BIGGEST_ALIGNMENT;
5914 /* Note: tree_low_cst is intentionally not used here,
5915 we don't care about the upper bits. */
5916 c0 = TREE_INT_CST_LOW (exp);
5918 return c0 ? c0 : BIGGEST_ALIGNMENT;
5922 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5923 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5924 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5925 return MIN (c0, c1);
5928 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5929 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5932 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5934 if (integer_pow2p (TREE_OPERAND (exp, 1))
5935 && host_integerp (TREE_OPERAND (exp, 1), 1))
5937 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5938 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
5939 return MAX (1, c0 / c1);
5943 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
5945 return highest_pow2_factor (TREE_OPERAND (exp, 0));
5948 return highest_pow2_factor (TREE_OPERAND (exp, 1));
5951 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5952 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
5953 return MIN (c0, c1);
5962 /* Similar, except that the alignment requirements of TARGET are
5963 taken into account. Assume it is at least as aligned as its
5964 type, unless it is a COMPONENT_REF in which case the layout of
5965 the structure gives the alignment. */
5967 static unsigned HOST_WIDE_INT
5968 highest_pow2_factor_for_target (tree target, tree exp)
5970 unsigned HOST_WIDE_INT target_align, factor;
5972 factor = highest_pow2_factor (exp);
5973 if (TREE_CODE (target) == COMPONENT_REF)
5974 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
5976 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
5977 return MAX (factor, target_align);
5980 /* Expands variable VAR. */
5983 expand_var (tree var)
5985 if (DECL_EXTERNAL (var))
5988 if (TREE_STATIC (var))
5989 /* If this is an inlined copy of a static local variable,
5990 look up the original decl. */
5991 var = DECL_ORIGIN (var);
5993 if (TREE_STATIC (var)
5994 ? !TREE_ASM_WRITTEN (var)
5995 : !DECL_RTL_SET_P (var))
5997 if (TREE_CODE (var) == VAR_DECL && DECL_VALUE_EXPR (var))
5998 /* Should be ignored. */;
5999 else if (lang_hooks.expand_decl (var))
6001 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6003 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6004 rest_of_decl_compilation (var, 0, 0);
6006 /* No expansion needed. */
6007 gcc_assert (TREE_CODE (var) == TYPE_DECL
6008 || TREE_CODE (var) == CONST_DECL
6009 || TREE_CODE (var) == FUNCTION_DECL
6010 || TREE_CODE (var) == LABEL_DECL);
6014 /* Subroutine of expand_expr. Expand the two operands of a binary
6015 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6016 The value may be stored in TARGET if TARGET is nonzero. The
6017 MODIFIER argument is as documented by expand_expr. */
6020 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6021 enum expand_modifier modifier)
6023 if (! safe_from_p (target, exp1, 1))
6025 if (operand_equal_p (exp0, exp1, 0))
6027 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6028 *op1 = copy_rtx (*op0);
6032 /* If we need to preserve evaluation order, copy exp0 into its own
6033 temporary variable so that it can't be clobbered by exp1. */
6034 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6035 exp0 = save_expr (exp0);
6036 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6037 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6042 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6043 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6046 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6047 enum expand_modifier modifier)
6049 rtx result, subtarget;
6051 HOST_WIDE_INT bitsize, bitpos;
6052 int volatilep, unsignedp;
6053 enum machine_mode mode1;
6055 /* If we are taking the address of a constant and are at the top level,
6056 we have to use output_constant_def since we can't call force_const_mem
6058 /* ??? This should be considered a front-end bug. We should not be
6059 generating ADDR_EXPR of something that isn't an LVALUE. The only
6060 exception here is STRING_CST. */
6061 if (TREE_CODE (exp) == CONSTRUCTOR
6062 || CONSTANT_CLASS_P (exp))
6063 return XEXP (output_constant_def (exp, 0), 0);
6065 /* Everything must be something allowed by is_gimple_addressable. */
6066 switch (TREE_CODE (exp))
6069 /* This case will happen via recursion for &a->b. */
6070 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, EXPAND_NORMAL);
6073 /* Recurse and make the output_constant_def clause above handle this. */
6074 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6078 /* The real part of the complex number is always first, therefore
6079 the address is the same as the address of the parent object. */
6082 inner = TREE_OPERAND (exp, 0);
6086 /* The imaginary part of the complex number is always second.
6087 The expression is therefore always offset by the size of the
6090 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6091 inner = TREE_OPERAND (exp, 0);
6095 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6096 expand_expr, as that can have various side effects; LABEL_DECLs for
6097 example, may not have their DECL_RTL set yet. Assume language
6098 specific tree nodes can be expanded in some interesting way. */
6100 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6102 result = expand_expr (exp, target, tmode,
6103 modifier == EXPAND_INITIALIZER
6104 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6106 /* If the DECL isn't in memory, then the DECL wasn't properly
6107 marked TREE_ADDRESSABLE, which will be either a front-end
6108 or a tree optimizer bug. */
6109 gcc_assert (GET_CODE (result) == MEM);
6110 result = XEXP (result, 0);
6112 /* ??? Is this needed anymore? */
6113 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6115 assemble_external (exp);
6116 TREE_USED (exp) = 1;
6119 if (modifier != EXPAND_INITIALIZER
6120 && modifier != EXPAND_CONST_ADDRESS)
6121 result = force_operand (result, target);
6125 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6126 &mode1, &unsignedp, &volatilep);
6130 /* We must have made progress. */
6131 gcc_assert (inner != exp);
6133 subtarget = offset || bitpos ? NULL_RTX : target;
6134 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6140 if (modifier != EXPAND_NORMAL)
6141 result = force_operand (result, NULL);
6142 tmp = expand_expr (offset, NULL, tmode, EXPAND_NORMAL);
6144 result = convert_memory_address (tmode, result);
6145 tmp = convert_memory_address (tmode, tmp);
6147 if (modifier == EXPAND_SUM)
6148 result = gen_rtx_PLUS (tmode, result, tmp);
6151 subtarget = bitpos ? NULL_RTX : target;
6152 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6153 1, OPTAB_LIB_WIDEN);
6159 /* Someone beforehand should have rejected taking the address
6160 of such an object. */
6161 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6163 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6164 if (modifier < EXPAND_SUM)
6165 result = force_operand (result, target);
6171 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6172 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6175 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6176 enum expand_modifier modifier)
6178 enum machine_mode rmode;
6181 /* Target mode of VOIDmode says "whatever's natural". */
6182 if (tmode == VOIDmode)
6183 tmode = TYPE_MODE (TREE_TYPE (exp));
6185 /* We can get called with some Weird Things if the user does silliness
6186 like "(short) &a". In that case, convert_memory_address won't do
6187 the right thing, so ignore the given target mode. */
6188 if (tmode != Pmode && tmode != ptr_mode)
6191 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6194 /* Despite expand_expr claims concerning ignoring TMODE when not
6195 strictly convenient, stuff breaks if we don't honor it. Note
6196 that combined with the above, we only do this for pointer modes. */
6197 rmode = GET_MODE (result);
6198 if (rmode == VOIDmode)
6201 result = convert_memory_address (tmode, result);
6207 /* expand_expr: generate code for computing expression EXP.
6208 An rtx for the computed value is returned. The value is never null.
6209 In the case of a void EXP, const0_rtx is returned.
6211 The value may be stored in TARGET if TARGET is nonzero.
6212 TARGET is just a suggestion; callers must assume that
6213 the rtx returned may not be the same as TARGET.
6215 If TARGET is CONST0_RTX, it means that the value will be ignored.
6217 If TMODE is not VOIDmode, it suggests generating the
6218 result in mode TMODE. But this is done only when convenient.
6219 Otherwise, TMODE is ignored and the value generated in its natural mode.
6220 TMODE is just a suggestion; callers must assume that
6221 the rtx returned may not have mode TMODE.
6223 Note that TARGET may have neither TMODE nor MODE. In that case, it
6224 probably will not be used.
6226 If MODIFIER is EXPAND_SUM then when EXP is an addition
6227 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6228 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6229 products as above, or REG or MEM, or constant.
6230 Ordinarily in such cases we would output mul or add instructions
6231 and then return a pseudo reg containing the sum.
6233 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6234 it also marks a label as absolutely required (it can't be dead).
6235 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6236 This is used for outputting expressions used in initializers.
6238 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6239 with a constant address even if that address is not normally legitimate.
6240 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6242 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6243 a call parameter. Such targets require special care as we haven't yet
6244 marked TARGET so that it's safe from being trashed by libcalls. We
6245 don't want to use TARGET for anything but the final result;
6246 Intermediate values must go elsewhere. Additionally, calls to
6247 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6249 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6250 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6251 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6252 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6255 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6256 enum expand_modifier, rtx *);
6259 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6260 enum expand_modifier modifier, rtx *alt_rtl)
6263 rtx ret, last = NULL;
6265 /* Handle ERROR_MARK before anybody tries to access its type. */
6266 if (TREE_CODE (exp) == ERROR_MARK
6267 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6269 ret = CONST0_RTX (tmode);
6270 return ret ? ret : const0_rtx;
6273 if (flag_non_call_exceptions)
6275 rn = lookup_stmt_eh_region (exp);
6276 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6278 last = get_last_insn ();
6281 /* If this is an expression of some kind and it has an associated line
6282 number, then emit the line number before expanding the expression.
6284 We need to save and restore the file and line information so that
6285 errors discovered during expansion are emitted with the right
6286 information. It would be better of the diagnostic routines
6287 used the file/line information embedded in the tree nodes rather
6289 if (cfun && EXPR_HAS_LOCATION (exp))
6291 location_t saved_location = input_location;
6292 input_location = EXPR_LOCATION (exp);
6293 emit_line_note (input_location);
6295 /* Record where the insns produced belong. */
6296 record_block_change (TREE_BLOCK (exp));
6298 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6300 input_location = saved_location;
6304 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6307 /* If using non-call exceptions, mark all insns that may trap.
6308 expand_call() will mark CALL_INSNs before we get to this code,
6309 but it doesn't handle libcalls, and these may trap. */
6313 for (insn = next_real_insn (last); insn;
6314 insn = next_real_insn (insn))
6316 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6317 /* If we want exceptions for non-call insns, any
6318 may_trap_p instruction may throw. */
6319 && GET_CODE (PATTERN (insn)) != CLOBBER
6320 && GET_CODE (PATTERN (insn)) != USE
6321 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6323 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6333 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6334 enum expand_modifier modifier, rtx *alt_rtl)
6337 tree type = TREE_TYPE (exp);
6339 enum machine_mode mode;
6340 enum tree_code code = TREE_CODE (exp);
6342 rtx subtarget, original_target;
6345 bool reduce_bit_field = false;
6346 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6347 ? reduce_to_bit_field_precision ((expr), \
6352 mode = TYPE_MODE (type);
6353 unsignedp = TYPE_UNSIGNED (type);
6354 if (lang_hooks.reduce_bit_field_operations
6355 && TREE_CODE (type) == INTEGER_TYPE
6356 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6358 /* An operation in what may be a bit-field type needs the
6359 result to be reduced to the precision of the bit-field type,
6360 which is narrower than that of the type's mode. */
6361 reduce_bit_field = true;
6362 if (modifier == EXPAND_STACK_PARM)
6366 /* Use subtarget as the target for operand 0 of a binary operation. */
6367 subtarget = get_subtarget (target);
6368 original_target = target;
6369 ignore = (target == const0_rtx
6370 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6371 || code == CONVERT_EXPR || code == COND_EXPR
6372 || code == VIEW_CONVERT_EXPR)
6373 && TREE_CODE (type) == VOID_TYPE));
6375 /* If we are going to ignore this result, we need only do something
6376 if there is a side-effect somewhere in the expression. If there
6377 is, short-circuit the most common cases here. Note that we must
6378 not call expand_expr with anything but const0_rtx in case this
6379 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6383 if (! TREE_SIDE_EFFECTS (exp))
6386 /* Ensure we reference a volatile object even if value is ignored, but
6387 don't do this if all we are doing is taking its address. */
6388 if (TREE_THIS_VOLATILE (exp)
6389 && TREE_CODE (exp) != FUNCTION_DECL
6390 && mode != VOIDmode && mode != BLKmode
6391 && modifier != EXPAND_CONST_ADDRESS)
6393 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6395 temp = copy_to_reg (temp);
6399 if (TREE_CODE_CLASS (code) == tcc_unary
6400 || code == COMPONENT_REF || code == INDIRECT_REF)
6401 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6404 else if (TREE_CODE_CLASS (code) == tcc_binary
6405 || TREE_CODE_CLASS (code) == tcc_comparison
6406 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6408 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6409 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6412 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6413 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6414 /* If the second operand has no side effects, just evaluate
6416 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6418 else if (code == BIT_FIELD_REF)
6420 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6421 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6422 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6429 /* If will do cse, generate all results into pseudo registers
6430 since 1) that allows cse to find more things
6431 and 2) otherwise cse could produce an insn the machine
6432 cannot support. An exception is a CONSTRUCTOR into a multi-word
6433 MEM: that's much more likely to be most efficient into the MEM.
6434 Another is a CALL_EXPR which must return in memory. */
6436 if (! cse_not_expected && mode != BLKmode && target
6437 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6438 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6439 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6446 tree function = decl_function_context (exp);
6448 temp = label_rtx (exp);
6449 temp = gen_rtx_LABEL_REF (Pmode, temp);
6451 if (function != current_function_decl
6453 LABEL_REF_NONLOCAL_P (temp) = 1;
6455 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6460 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
6465 /* If a static var's type was incomplete when the decl was written,
6466 but the type is complete now, lay out the decl now. */
6467 if (DECL_SIZE (exp) == 0
6468 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6469 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6470 layout_decl (exp, 0);
6472 /* ... fall through ... */
6476 gcc_assert (DECL_RTL (exp));
6478 /* Ensure variable marked as used even if it doesn't go through
6479 a parser. If it hasn't be used yet, write out an external
6481 if (! TREE_USED (exp))
6483 assemble_external (exp);
6484 TREE_USED (exp) = 1;
6487 /* Show we haven't gotten RTL for this yet. */
6490 /* Variables inherited from containing functions should have
6491 been lowered by this point. */
6492 context = decl_function_context (exp);
6493 gcc_assert (!context
6494 || context == current_function_decl
6495 || TREE_STATIC (exp)
6496 /* ??? C++ creates functions that are not TREE_STATIC. */
6497 || TREE_CODE (exp) == FUNCTION_DECL);
6499 /* This is the case of an array whose size is to be determined
6500 from its initializer, while the initializer is still being parsed.
6503 if (MEM_P (DECL_RTL (exp))
6504 && REG_P (XEXP (DECL_RTL (exp), 0)))
6505 temp = validize_mem (DECL_RTL (exp));
6507 /* If DECL_RTL is memory, we are in the normal case and either
6508 the address is not valid or it is not a register and -fforce-addr
6509 is specified, get the address into a register. */
6511 else if (MEM_P (DECL_RTL (exp))
6512 && modifier != EXPAND_CONST_ADDRESS
6513 && modifier != EXPAND_SUM
6514 && modifier != EXPAND_INITIALIZER
6515 && (! memory_address_p (DECL_MODE (exp),
6516 XEXP (DECL_RTL (exp), 0))
6518 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6521 *alt_rtl = DECL_RTL (exp);
6522 temp = replace_equiv_address (DECL_RTL (exp),
6523 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6526 /* If we got something, return it. But first, set the alignment
6527 if the address is a register. */
6530 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6531 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6536 /* If the mode of DECL_RTL does not match that of the decl, it
6537 must be a promoted value. We return a SUBREG of the wanted mode,
6538 but mark it so that we know that it was already extended. */
6540 if (REG_P (DECL_RTL (exp))
6541 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6543 enum machine_mode pmode;
6545 /* Get the signedness used for this variable. Ensure we get the
6546 same mode we got when the variable was declared. */
6547 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
6548 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0));
6549 gcc_assert (GET_MODE (DECL_RTL (exp)) == pmode);
6551 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6552 SUBREG_PROMOTED_VAR_P (temp) = 1;
6553 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6557 return DECL_RTL (exp);
6560 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6561 TREE_INT_CST_HIGH (exp), mode);
6563 /* ??? If overflow is set, fold will have done an incomplete job,
6564 which can result in (plus xx (const_int 0)), which can get
6565 simplified by validate_replace_rtx during virtual register
6566 instantiation, which can result in unrecognizable insns.
6567 Avoid this by forcing all overflows into registers. */
6568 if (TREE_CONSTANT_OVERFLOW (exp)
6569 && modifier != EXPAND_INITIALIZER)
6570 temp = force_reg (mode, temp);
6575 if (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_INT
6576 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp))) == MODE_VECTOR_FLOAT)
6577 return const_vector_from_tree (exp);
6579 return expand_expr (build1 (CONSTRUCTOR, TREE_TYPE (exp),
6580 TREE_VECTOR_CST_ELTS (exp)),
6581 ignore ? const0_rtx : target, tmode, modifier);
6584 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6587 /* If optimized, generate immediate CONST_DOUBLE
6588 which will be turned into memory by reload if necessary.
6590 We used to force a register so that loop.c could see it. But
6591 this does not allow gen_* patterns to perform optimizations with
6592 the constants. It also produces two insns in cases like "x = 1.0;".
6593 On most machines, floating-point constants are not permitted in
6594 many insns, so we'd end up copying it to a register in any case.
6596 Now, we do the copying in expand_binop, if appropriate. */
6597 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6598 TYPE_MODE (TREE_TYPE (exp)));
6601 /* Handle evaluating a complex constant in a CONCAT target. */
6602 if (original_target && GET_CODE (original_target) == CONCAT)
6604 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6607 rtarg = XEXP (original_target, 0);
6608 itarg = XEXP (original_target, 1);
6610 /* Move the real and imaginary parts separately. */
6611 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6612 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6615 emit_move_insn (rtarg, op0);
6617 emit_move_insn (itarg, op1);
6619 return original_target;
6622 /* ... fall through ... */
6625 temp = output_constant_def (exp, 1);
6627 /* temp contains a constant address.
6628 On RISC machines where a constant address isn't valid,
6629 make some insns to get that address into a register. */
6630 if (modifier != EXPAND_CONST_ADDRESS
6631 && modifier != EXPAND_INITIALIZER
6632 && modifier != EXPAND_SUM
6633 && (! memory_address_p (mode, XEXP (temp, 0))
6634 || flag_force_addr))
6635 return replace_equiv_address (temp,
6636 copy_rtx (XEXP (temp, 0)));
6641 tree val = TREE_OPERAND (exp, 0);
6642 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6644 if (!SAVE_EXPR_RESOLVED_P (exp))
6646 /* We can indeed still hit this case, typically via builtin
6647 expanders calling save_expr immediately before expanding
6648 something. Assume this means that we only have to deal
6649 with non-BLKmode values. */
6650 gcc_assert (GET_MODE (ret) != BLKmode);
6652 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6653 DECL_ARTIFICIAL (val) = 1;
6654 DECL_IGNORED_P (val) = 1;
6655 TREE_OPERAND (exp, 0) = val;
6656 SAVE_EXPR_RESOLVED_P (exp) = 1;
6658 if (!CONSTANT_P (ret))
6659 ret = copy_to_reg (ret);
6660 SET_DECL_RTL (val, ret);
6667 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6668 expand_goto (TREE_OPERAND (exp, 0));
6670 expand_computed_goto (TREE_OPERAND (exp, 0));
6674 /* If we don't need the result, just ensure we evaluate any
6680 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6681 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6686 /* All elts simple constants => refer to a constant in memory. But
6687 if this is a non-BLKmode mode, let it store a field at a time
6688 since that should make a CONST_INT or CONST_DOUBLE when we
6689 fold. Likewise, if we have a target we can use, it is best to
6690 store directly into the target unless the type is large enough
6691 that memcpy will be used. If we are making an initializer and
6692 all operands are constant, put it in memory as well.
6694 FIXME: Avoid trying to fill vector constructors piece-meal.
6695 Output them with output_constant_def below unless we're sure
6696 they're zeros. This should go away when vector initializers
6697 are treated like VECTOR_CST instead of arrays.
6699 else if ((TREE_STATIC (exp)
6700 && ((mode == BLKmode
6701 && ! (target != 0 && safe_from_p (target, exp, 1)))
6702 || TREE_ADDRESSABLE (exp)
6703 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6704 && (! MOVE_BY_PIECES_P
6705 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6707 && ! mostly_zeros_p (exp))))
6708 || ((modifier == EXPAND_INITIALIZER
6709 || modifier == EXPAND_CONST_ADDRESS)
6710 && TREE_CONSTANT (exp)))
6712 rtx constructor = output_constant_def (exp, 1);
6714 if (modifier != EXPAND_CONST_ADDRESS
6715 && modifier != EXPAND_INITIALIZER
6716 && modifier != EXPAND_SUM)
6717 constructor = validize_mem (constructor);
6723 /* Handle calls that pass values in multiple non-contiguous
6724 locations. The Irix 6 ABI has examples of this. */
6725 if (target == 0 || ! safe_from_p (target, exp, 1)
6726 || GET_CODE (target) == PARALLEL
6727 || modifier == EXPAND_STACK_PARM)
6729 = assign_temp (build_qualified_type (type,
6731 | (TREE_READONLY (exp)
6732 * TYPE_QUAL_CONST))),
6733 0, TREE_ADDRESSABLE (exp), 1);
6735 store_constructor (exp, target, 0, int_expr_size (exp));
6739 case MISALIGNED_INDIRECT_REF:
6740 case ALIGN_INDIRECT_REF:
6743 tree exp1 = TREE_OPERAND (exp, 0);
6746 if (code == MISALIGNED_INDIRECT_REF
6747 && !targetm.vectorize.misaligned_mem_ok (mode))
6750 if (modifier != EXPAND_WRITE)
6754 t = fold_read_from_constant_string (exp);
6756 return expand_expr (t, target, tmode, modifier);
6759 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6760 op0 = memory_address (mode, op0);
6762 if (code == ALIGN_INDIRECT_REF)
6764 int align = TYPE_ALIGN_UNIT (type);
6765 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
6766 op0 = memory_address (mode, op0);
6769 temp = gen_rtx_MEM (mode, op0);
6771 orig = REF_ORIGINAL (exp);
6774 set_mem_attributes (temp, orig, 0);
6782 tree array = TREE_OPERAND (exp, 0);
6783 tree index = TREE_OPERAND (exp, 1);
6785 /* Fold an expression like: "foo"[2].
6786 This is not done in fold so it won't happen inside &.
6787 Don't fold if this is for wide characters since it's too
6788 difficult to do correctly and this is a very rare case. */
6790 if (modifier != EXPAND_CONST_ADDRESS
6791 && modifier != EXPAND_INITIALIZER
6792 && modifier != EXPAND_MEMORY)
6794 tree t = fold_read_from_constant_string (exp);
6797 return expand_expr (t, target, tmode, modifier);
6800 /* If this is a constant index into a constant array,
6801 just get the value from the array. Handle both the cases when
6802 we have an explicit constructor and when our operand is a variable
6803 that was declared const. */
6805 if (modifier != EXPAND_CONST_ADDRESS
6806 && modifier != EXPAND_INITIALIZER
6807 && modifier != EXPAND_MEMORY
6808 && TREE_CODE (array) == CONSTRUCTOR
6809 && ! TREE_SIDE_EFFECTS (array)
6810 && TREE_CODE (index) == INTEGER_CST)
6814 for (elem = CONSTRUCTOR_ELTS (array);
6815 (elem && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6816 elem = TREE_CHAIN (elem))
6819 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6820 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6824 else if (optimize >= 1
6825 && modifier != EXPAND_CONST_ADDRESS
6826 && modifier != EXPAND_INITIALIZER
6827 && modifier != EXPAND_MEMORY
6828 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6829 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6830 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6831 && targetm.binds_local_p (array))
6833 if (TREE_CODE (index) == INTEGER_CST)
6835 tree init = DECL_INITIAL (array);
6837 if (TREE_CODE (init) == CONSTRUCTOR)
6841 for (elem = CONSTRUCTOR_ELTS (init);
6843 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6844 elem = TREE_CHAIN (elem))
6847 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6848 return expand_expr (fold (TREE_VALUE (elem)), target,
6851 else if (TREE_CODE (init) == STRING_CST
6852 && 0 > compare_tree_int (index,
6853 TREE_STRING_LENGTH (init)))
6855 tree type = TREE_TYPE (TREE_TYPE (init));
6856 enum machine_mode mode = TYPE_MODE (type);
6858 if (GET_MODE_CLASS (mode) == MODE_INT
6859 && GET_MODE_SIZE (mode) == 1)
6860 return gen_int_mode (TREE_STRING_POINTER (init)
6861 [TREE_INT_CST_LOW (index)], mode);
6866 goto normal_inner_ref;
6869 /* If the operand is a CONSTRUCTOR, we can just extract the
6870 appropriate field if it is present. */
6871 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6875 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6876 elt = TREE_CHAIN (elt))
6877 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6878 /* We can normally use the value of the field in the
6879 CONSTRUCTOR. However, if this is a bitfield in
6880 an integral mode that we can fit in a HOST_WIDE_INT,
6881 we must mask only the number of bits in the bitfield,
6882 since this is done implicitly by the constructor. If
6883 the bitfield does not meet either of those conditions,
6884 we can't do this optimization. */
6885 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6886 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6888 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6889 <= HOST_BITS_PER_WIDE_INT))))
6891 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6892 && modifier == EXPAND_STACK_PARM)
6894 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6895 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6897 HOST_WIDE_INT bitsize
6898 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6899 enum machine_mode imode
6900 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6902 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6904 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6905 op0 = expand_and (imode, op0, op1, target);
6910 = build_int_cst (NULL_TREE,
6911 GET_MODE_BITSIZE (imode) - bitsize);
6913 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6915 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6923 goto normal_inner_ref;
6926 case ARRAY_RANGE_REF:
6929 enum machine_mode mode1;
6930 HOST_WIDE_INT bitsize, bitpos;
6933 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6934 &mode1, &unsignedp, &volatilep);
6937 /* If we got back the original object, something is wrong. Perhaps
6938 we are evaluating an expression too early. In any event, don't
6939 infinitely recurse. */
6940 gcc_assert (tem != exp);
6942 /* If TEM's type is a union of variable size, pass TARGET to the inner
6943 computation, since it will need a temporary and TARGET is known
6944 to have to do. This occurs in unchecked conversion in Ada. */
6948 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6949 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6951 && modifier != EXPAND_STACK_PARM
6952 ? target : NULL_RTX),
6954 (modifier == EXPAND_INITIALIZER
6955 || modifier == EXPAND_CONST_ADDRESS
6956 || modifier == EXPAND_STACK_PARM)
6957 ? modifier : EXPAND_NORMAL);
6959 /* If this is a constant, put it into a register if it is a
6960 legitimate constant and OFFSET is 0 and memory if it isn't. */
6961 if (CONSTANT_P (op0))
6963 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6964 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6966 op0 = force_reg (mode, op0);
6968 op0 = validize_mem (force_const_mem (mode, op0));
6971 /* Otherwise, if this object not in memory and we either have an
6972 offset or a BLKmode result, put it there. This case can't occur in
6973 C, but can in Ada if we have unchecked conversion of an expression
6974 from a scalar type to an array or record type or for an
6975 ARRAY_RANGE_REF whose type is BLKmode. */
6976 else if (!MEM_P (op0)
6978 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6980 tree nt = build_qualified_type (TREE_TYPE (tem),
6981 (TYPE_QUALS (TREE_TYPE (tem))
6982 | TYPE_QUAL_CONST));
6983 rtx memloc = assign_temp (nt, 1, 1, 1);
6985 emit_move_insn (memloc, op0);
6991 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6994 gcc_assert (MEM_P (op0));
6996 #ifdef POINTERS_EXTEND_UNSIGNED
6997 if (GET_MODE (offset_rtx) != Pmode)
6998 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7000 if (GET_MODE (offset_rtx) != ptr_mode)
7001 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7004 if (GET_MODE (op0) == BLKmode
7005 /* A constant address in OP0 can have VOIDmode, we must
7006 not try to call force_reg in that case. */
7007 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7009 && (bitpos % bitsize) == 0
7010 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7011 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7013 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7017 op0 = offset_address (op0, offset_rtx,
7018 highest_pow2_factor (offset));
7021 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7022 record its alignment as BIGGEST_ALIGNMENT. */
7023 if (MEM_P (op0) && bitpos == 0 && offset != 0
7024 && is_aligning_offset (offset, tem))
7025 set_mem_align (op0, BIGGEST_ALIGNMENT);
7027 /* Don't forget about volatility even if this is a bitfield. */
7028 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7030 if (op0 == orig_op0)
7031 op0 = copy_rtx (op0);
7033 MEM_VOLATILE_P (op0) = 1;
7036 /* The following code doesn't handle CONCAT.
7037 Assume only bitpos == 0 can be used for CONCAT, due to
7038 one element arrays having the same mode as its element. */
7039 if (GET_CODE (op0) == CONCAT)
7041 gcc_assert (bitpos == 0
7042 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7046 /* In cases where an aligned union has an unaligned object
7047 as a field, we might be extracting a BLKmode value from
7048 an integer-mode (e.g., SImode) object. Handle this case
7049 by doing the extract into an object as wide as the field
7050 (which we know to be the width of a basic mode), then
7051 storing into memory, and changing the mode to BLKmode. */
7052 if (mode1 == VOIDmode
7053 || REG_P (op0) || GET_CODE (op0) == SUBREG
7054 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7055 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7056 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7057 && modifier != EXPAND_CONST_ADDRESS
7058 && modifier != EXPAND_INITIALIZER)
7059 /* If the field isn't aligned enough to fetch as a memref,
7060 fetch it as a bit field. */
7061 || (mode1 != BLKmode
7062 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7063 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7065 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7066 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7067 && ((modifier == EXPAND_CONST_ADDRESS
7068 || modifier == EXPAND_INITIALIZER)
7070 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7071 || (bitpos % BITS_PER_UNIT != 0)))
7072 /* If the type and the field are a constant size and the
7073 size of the type isn't the same size as the bitfield,
7074 we must use bitfield operations. */
7076 && TYPE_SIZE (TREE_TYPE (exp))
7077 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7078 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7081 enum machine_mode ext_mode = mode;
7083 if (ext_mode == BLKmode
7084 && ! (target != 0 && MEM_P (op0)
7086 && bitpos % BITS_PER_UNIT == 0))
7087 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7089 if (ext_mode == BLKmode)
7092 target = assign_temp (type, 0, 1, 1);
7097 /* In this case, BITPOS must start at a byte boundary and
7098 TARGET, if specified, must be a MEM. */
7099 gcc_assert (MEM_P (op0)
7100 && (!target || MEM_P (target))
7101 && !(bitpos % BITS_PER_UNIT));
7103 emit_block_move (target,
7104 adjust_address (op0, VOIDmode,
7105 bitpos / BITS_PER_UNIT),
7106 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7108 (modifier == EXPAND_STACK_PARM
7109 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7114 op0 = validize_mem (op0);
7116 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7117 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7119 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7120 (modifier == EXPAND_STACK_PARM
7121 ? NULL_RTX : target),
7122 ext_mode, ext_mode);
7124 /* If the result is a record type and BITSIZE is narrower than
7125 the mode of OP0, an integral mode, and this is a big endian
7126 machine, we must put the field into the high-order bits. */
7127 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7128 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7129 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7130 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7131 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7135 /* If the result type is BLKmode, store the data into a temporary
7136 of the appropriate type, but with the mode corresponding to the
7137 mode for the data we have (op0's mode). It's tempting to make
7138 this a constant type, since we know it's only being stored once,
7139 but that can cause problems if we are taking the address of this
7140 COMPONENT_REF because the MEM of any reference via that address
7141 will have flags corresponding to the type, which will not
7142 necessarily be constant. */
7143 if (mode == BLKmode)
7146 = assign_stack_temp_for_type
7147 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7149 emit_move_insn (new, op0);
7150 op0 = copy_rtx (new);
7151 PUT_MODE (op0, BLKmode);
7152 set_mem_attributes (op0, exp, 1);
7158 /* If the result is BLKmode, use that to access the object
7160 if (mode == BLKmode)
7163 /* Get a reference to just this component. */
7164 if (modifier == EXPAND_CONST_ADDRESS
7165 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7166 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7168 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7170 if (op0 == orig_op0)
7171 op0 = copy_rtx (op0);
7173 set_mem_attributes (op0, exp, 0);
7174 if (REG_P (XEXP (op0, 0)))
7175 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7177 MEM_VOLATILE_P (op0) |= volatilep;
7178 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7179 || modifier == EXPAND_CONST_ADDRESS
7180 || modifier == EXPAND_INITIALIZER)
7182 else if (target == 0)
7183 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7185 convert_move (target, op0, unsignedp);
7190 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7193 /* Check for a built-in function. */
7194 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7195 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7197 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7199 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7200 == BUILT_IN_FRONTEND)
7201 return lang_hooks.expand_expr (exp, original_target,
7205 return expand_builtin (exp, target, subtarget, tmode, ignore);
7208 return expand_call (exp, target, ignore);
7210 case NON_LVALUE_EXPR:
7213 if (TREE_OPERAND (exp, 0) == error_mark_node)
7216 if (TREE_CODE (type) == UNION_TYPE)
7218 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7220 /* If both input and output are BLKmode, this conversion isn't doing
7221 anything except possibly changing memory attribute. */
7222 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7224 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7227 result = copy_rtx (result);
7228 set_mem_attributes (result, exp, 0);
7234 if (TYPE_MODE (type) != BLKmode)
7235 target = gen_reg_rtx (TYPE_MODE (type));
7237 target = assign_temp (type, 0, 1, 1);
7241 /* Store data into beginning of memory target. */
7242 store_expr (TREE_OPERAND (exp, 0),
7243 adjust_address (target, TYPE_MODE (valtype), 0),
7244 modifier == EXPAND_STACK_PARM);
7248 gcc_assert (REG_P (target));
7250 /* Store this field into a union of the proper type. */
7251 store_field (target,
7252 MIN ((int_size_in_bytes (TREE_TYPE
7253 (TREE_OPERAND (exp, 0)))
7255 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7256 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7260 /* Return the entire union. */
7264 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7266 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7269 /* If the signedness of the conversion differs and OP0 is
7270 a promoted SUBREG, clear that indication since we now
7271 have to do the proper extension. */
7272 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7273 && GET_CODE (op0) == SUBREG)
7274 SUBREG_PROMOTED_VAR_P (op0) = 0;
7276 return REDUCE_BIT_FIELD (op0);
7279 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7280 if (GET_MODE (op0) == mode)
7283 /* If OP0 is a constant, just convert it into the proper mode. */
7284 else if (CONSTANT_P (op0))
7286 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7287 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7289 if (modifier == EXPAND_INITIALIZER)
7290 op0 = simplify_gen_subreg (mode, op0, inner_mode,
7291 subreg_lowpart_offset (mode,
7294 op0= convert_modes (mode, inner_mode, op0,
7295 TYPE_UNSIGNED (inner_type));
7298 else if (modifier == EXPAND_INITIALIZER)
7299 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7301 else if (target == 0)
7302 op0 = convert_to_mode (mode, op0,
7303 TYPE_UNSIGNED (TREE_TYPE
7304 (TREE_OPERAND (exp, 0))));
7307 convert_move (target, op0,
7308 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7312 return REDUCE_BIT_FIELD (op0);
7314 case VIEW_CONVERT_EXPR:
7315 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7317 /* If the input and output modes are both the same, we are done.
7318 Otherwise, if neither mode is BLKmode and both are integral and within
7319 a word, we can use gen_lowpart. If neither is true, make sure the
7320 operand is in memory and convert the MEM to the new mode. */
7321 if (TYPE_MODE (type) == GET_MODE (op0))
7323 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7324 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7325 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7326 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7327 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7328 op0 = gen_lowpart (TYPE_MODE (type), op0);
7329 else if (!MEM_P (op0))
7331 /* If the operand is not a MEM, force it into memory. Since we
7332 are going to be be changing the mode of the MEM, don't call
7333 force_const_mem for constants because we don't allow pool
7334 constants to change mode. */
7335 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7337 gcc_assert (!TREE_ADDRESSABLE (exp));
7339 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7341 = assign_stack_temp_for_type
7342 (TYPE_MODE (inner_type),
7343 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7345 emit_move_insn (target, op0);
7349 /* At this point, OP0 is in the correct mode. If the output type is such
7350 that the operand is known to be aligned, indicate that it is.
7351 Otherwise, we need only be concerned about alignment for non-BLKmode
7355 op0 = copy_rtx (op0);
7357 if (TYPE_ALIGN_OK (type))
7358 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7359 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7360 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7362 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7363 HOST_WIDE_INT temp_size
7364 = MAX (int_size_in_bytes (inner_type),
7365 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7366 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7367 temp_size, 0, type);
7368 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7370 gcc_assert (!TREE_ADDRESSABLE (exp));
7372 if (GET_MODE (op0) == BLKmode)
7373 emit_block_move (new_with_op0_mode, op0,
7374 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7375 (modifier == EXPAND_STACK_PARM
7376 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7378 emit_move_insn (new_with_op0_mode, op0);
7383 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7389 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7390 something else, make sure we add the register to the constant and
7391 then to the other thing. This case can occur during strength
7392 reduction and doing it this way will produce better code if the
7393 frame pointer or argument pointer is eliminated.
7395 fold-const.c will ensure that the constant is always in the inner
7396 PLUS_EXPR, so the only case we need to do anything about is if
7397 sp, ap, or fp is our second argument, in which case we must swap
7398 the innermost first argument and our second argument. */
7400 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7401 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7402 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7403 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7404 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7405 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7407 tree t = TREE_OPERAND (exp, 1);
7409 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7410 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7413 /* If the result is to be ptr_mode and we are adding an integer to
7414 something, we might be forming a constant. So try to use
7415 plus_constant. If it produces a sum and we can't accept it,
7416 use force_operand. This allows P = &ARR[const] to generate
7417 efficient code on machines where a SYMBOL_REF is not a valid
7420 If this is an EXPAND_SUM call, always return the sum. */
7421 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7422 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7424 if (modifier == EXPAND_STACK_PARM)
7426 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7427 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7428 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7432 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7434 /* Use immed_double_const to ensure that the constant is
7435 truncated according to the mode of OP1, then sign extended
7436 to a HOST_WIDE_INT. Using the constant directly can result
7437 in non-canonical RTL in a 64x32 cross compile. */
7439 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7441 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7442 op1 = plus_constant (op1, INTVAL (constant_part));
7443 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7444 op1 = force_operand (op1, target);
7445 return REDUCE_BIT_FIELD (op1);
7448 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7449 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7450 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7454 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7455 (modifier == EXPAND_INITIALIZER
7456 ? EXPAND_INITIALIZER : EXPAND_SUM));
7457 if (! CONSTANT_P (op0))
7459 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7460 VOIDmode, modifier);
7461 /* Return a PLUS if modifier says it's OK. */
7462 if (modifier == EXPAND_SUM
7463 || modifier == EXPAND_INITIALIZER)
7464 return simplify_gen_binary (PLUS, mode, op0, op1);
7467 /* Use immed_double_const to ensure that the constant is
7468 truncated according to the mode of OP1, then sign extended
7469 to a HOST_WIDE_INT. Using the constant directly can result
7470 in non-canonical RTL in a 64x32 cross compile. */
7472 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7474 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7475 op0 = plus_constant (op0, INTVAL (constant_part));
7476 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7477 op0 = force_operand (op0, target);
7478 return REDUCE_BIT_FIELD (op0);
7482 /* No sense saving up arithmetic to be done
7483 if it's all in the wrong mode to form part of an address.
7484 And force_operand won't know whether to sign-extend or
7486 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7487 || mode != ptr_mode)
7489 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7490 subtarget, &op0, &op1, 0);
7491 if (op0 == const0_rtx)
7493 if (op1 == const0_rtx)
7498 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7499 subtarget, &op0, &op1, modifier);
7500 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7503 /* For initializers, we are allowed to return a MINUS of two
7504 symbolic constants. Here we handle all cases when both operands
7506 /* Handle difference of two symbolic constants,
7507 for the sake of an initializer. */
7508 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7509 && really_constant_p (TREE_OPERAND (exp, 0))
7510 && really_constant_p (TREE_OPERAND (exp, 1)))
7512 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7513 NULL_RTX, &op0, &op1, modifier);
7515 /* If the last operand is a CONST_INT, use plus_constant of
7516 the negated constant. Else make the MINUS. */
7517 if (GET_CODE (op1) == CONST_INT)
7518 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7520 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7523 /* No sense saving up arithmetic to be done
7524 if it's all in the wrong mode to form part of an address.
7525 And force_operand won't know whether to sign-extend or
7527 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7528 || mode != ptr_mode)
7531 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7532 subtarget, &op0, &op1, modifier);
7534 /* Convert A - const to A + (-const). */
7535 if (GET_CODE (op1) == CONST_INT)
7537 op1 = negate_rtx (mode, op1);
7538 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7544 /* If first operand is constant, swap them.
7545 Thus the following special case checks need only
7546 check the second operand. */
7547 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7549 tree t1 = TREE_OPERAND (exp, 0);
7550 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7551 TREE_OPERAND (exp, 1) = t1;
7554 /* Attempt to return something suitable for generating an
7555 indexed address, for machines that support that. */
7557 if (modifier == EXPAND_SUM && mode == ptr_mode
7558 && host_integerp (TREE_OPERAND (exp, 1), 0))
7560 tree exp1 = TREE_OPERAND (exp, 1);
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7566 op0 = force_operand (op0, NULL_RTX);
7568 op0 = copy_to_mode_reg (mode, op0);
7570 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7571 gen_int_mode (tree_low_cst (exp1, 0),
7572 TYPE_MODE (TREE_TYPE (exp1)))));
7575 if (modifier == EXPAND_STACK_PARM)
7578 /* Check for multiplying things that have been extended
7579 from a narrower type. If this machine supports multiplying
7580 in that narrower type with a result in the desired type,
7581 do it that way, and avoid the explicit type-conversion. */
7582 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7583 && TREE_CODE (type) == INTEGER_TYPE
7584 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7585 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7586 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7587 && int_fits_type_p (TREE_OPERAND (exp, 1),
7588 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7589 /* Don't use a widening multiply if a shift will do. */
7590 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7591 > HOST_BITS_PER_WIDE_INT)
7592 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7594 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7595 && (TYPE_PRECISION (TREE_TYPE
7596 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7597 == TYPE_PRECISION (TREE_TYPE
7599 (TREE_OPERAND (exp, 0), 0))))
7600 /* If both operands are extended, they must either both
7601 be zero-extended or both be sign-extended. */
7602 && (TYPE_UNSIGNED (TREE_TYPE
7603 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7604 == TYPE_UNSIGNED (TREE_TYPE
7606 (TREE_OPERAND (exp, 0), 0)))))))
7608 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7609 enum machine_mode innermode = TYPE_MODE (op0type);
7610 bool zextend_p = TYPE_UNSIGNED (op0type);
7611 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7612 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7614 if (mode == GET_MODE_WIDER_MODE (innermode))
7616 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7618 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7619 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7620 TREE_OPERAND (exp, 1),
7621 NULL_RTX, &op0, &op1, 0);
7623 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7624 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7625 NULL_RTX, &op0, &op1, 0);
7628 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7629 && innermode == word_mode)
7632 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7633 NULL_RTX, VOIDmode, 0);
7634 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7635 op1 = convert_modes (innermode, mode,
7636 expand_expr (TREE_OPERAND (exp, 1),
7637 NULL_RTX, VOIDmode, 0),
7640 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7641 NULL_RTX, VOIDmode, 0);
7642 temp = expand_binop (mode, other_optab, op0, op1, target,
7643 unsignedp, OPTAB_LIB_WIDEN);
7644 hipart = gen_highpart (innermode, temp);
7645 htem = expand_mult_highpart_adjust (innermode, hipart,
7649 emit_move_insn (hipart, htem);
7650 return REDUCE_BIT_FIELD (temp);
7654 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7655 subtarget, &op0, &op1, 0);
7656 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7658 case TRUNC_DIV_EXPR:
7659 case FLOOR_DIV_EXPR:
7661 case ROUND_DIV_EXPR:
7662 case EXACT_DIV_EXPR:
7663 if (modifier == EXPAND_STACK_PARM)
7665 /* Possible optimization: compute the dividend with EXPAND_SUM
7666 then if the divisor is constant can optimize the case
7667 where some terms of the dividend have coeffs divisible by it. */
7668 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7669 subtarget, &op0, &op1, 0);
7670 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7673 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7674 expensive divide. If not, combine will rebuild the original
7676 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7677 && TREE_CODE (type) == REAL_TYPE
7678 && !real_onep (TREE_OPERAND (exp, 0)))
7679 return expand_expr (build2 (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7680 build2 (RDIV_EXPR, type,
7681 build_real (type, dconst1),
7682 TREE_OPERAND (exp, 1))),
7683 target, tmode, modifier);
7687 case TRUNC_MOD_EXPR:
7688 case FLOOR_MOD_EXPR:
7690 case ROUND_MOD_EXPR:
7691 if (modifier == EXPAND_STACK_PARM)
7693 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7694 subtarget, &op0, &op1, 0);
7695 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7697 case FIX_ROUND_EXPR:
7698 case FIX_FLOOR_EXPR:
7700 gcc_unreachable (); /* Not used for C. */
7702 case FIX_TRUNC_EXPR:
7703 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7704 if (target == 0 || modifier == EXPAND_STACK_PARM)
7705 target = gen_reg_rtx (mode);
7706 expand_fix (target, op0, unsignedp);
7710 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7711 if (target == 0 || modifier == EXPAND_STACK_PARM)
7712 target = gen_reg_rtx (mode);
7713 /* expand_float can't figure out what to do if FROM has VOIDmode.
7714 So give it the correct mode. With -O, cse will optimize this. */
7715 if (GET_MODE (op0) == VOIDmode)
7716 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7718 expand_float (target, op0,
7719 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7723 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7724 if (modifier == EXPAND_STACK_PARM)
7726 temp = expand_unop (mode,
7727 optab_for_tree_code (NEGATE_EXPR, type),
7730 return REDUCE_BIT_FIELD (temp);
7733 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7734 if (modifier == EXPAND_STACK_PARM)
7737 /* ABS_EXPR is not valid for complex arguments. */
7738 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7739 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
7741 /* Unsigned abs is simply the operand. Testing here means we don't
7742 risk generating incorrect code below. */
7743 if (TYPE_UNSIGNED (type))
7746 return expand_abs (mode, op0, target, unsignedp,
7747 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7751 target = original_target;
7753 || modifier == EXPAND_STACK_PARM
7754 || (MEM_P (target) && MEM_VOLATILE_P (target))
7755 || GET_MODE (target) != mode
7757 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7758 target = gen_reg_rtx (mode);
7759 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7760 target, &op0, &op1, 0);
7762 /* First try to do it with a special MIN or MAX instruction.
7763 If that does not win, use a conditional jump to select the proper
7765 this_optab = optab_for_tree_code (code, type);
7766 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7771 /* At this point, a MEM target is no longer useful; we will get better
7775 target = gen_reg_rtx (mode);
7777 /* If op1 was placed in target, swap op0 and op1. */
7778 if (target != op0 && target == op1)
7786 emit_move_insn (target, op0);
7788 op0 = gen_label_rtx ();
7790 /* If this mode is an integer too wide to compare properly,
7791 compare word by word. Rely on cse to optimize constant cases. */
7792 if (GET_MODE_CLASS (mode) == MODE_INT
7793 && ! can_compare_p (GE, mode, ccp_jump))
7795 if (code == MAX_EXPR)
7796 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7799 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7804 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7805 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7807 emit_move_insn (target, op1);
7812 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7813 if (modifier == EXPAND_STACK_PARM)
7815 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7819 /* ??? Can optimize bitwise operations with one arg constant.
7820 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7821 and (a bitwise1 b) bitwise2 b (etc)
7822 but that is probably not worth while. */
7824 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7825 boolean values when we want in all cases to compute both of them. In
7826 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7827 as actual zero-or-1 values and then bitwise anding. In cases where
7828 there cannot be any side effects, better code would be made by
7829 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7830 how to recognize those cases. */
7832 case TRUTH_AND_EXPR:
7833 code = BIT_AND_EXPR;
7838 code = BIT_IOR_EXPR;
7842 case TRUTH_XOR_EXPR:
7843 code = BIT_XOR_EXPR;
7851 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7853 if (modifier == EXPAND_STACK_PARM)
7855 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7856 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7859 /* Could determine the answer when only additive constants differ. Also,
7860 the addition of one can be handled by changing the condition. */
7867 case UNORDERED_EXPR:
7875 temp = do_store_flag (exp,
7876 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
7877 tmode != VOIDmode ? tmode : mode, 0);
7881 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7882 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7884 && REG_P (original_target)
7885 && (GET_MODE (original_target)
7886 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7888 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7891 /* If temp is constant, we can just compute the result. */
7892 if (GET_CODE (temp) == CONST_INT)
7894 if (INTVAL (temp) != 0)
7895 emit_move_insn (target, const1_rtx);
7897 emit_move_insn (target, const0_rtx);
7902 if (temp != original_target)
7904 enum machine_mode mode1 = GET_MODE (temp);
7905 if (mode1 == VOIDmode)
7906 mode1 = tmode != VOIDmode ? tmode : mode;
7908 temp = copy_to_mode_reg (mode1, temp);
7911 op1 = gen_label_rtx ();
7912 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7913 GET_MODE (temp), unsignedp, op1);
7914 emit_move_insn (temp, const1_rtx);
7919 /* If no set-flag instruction, must generate a conditional store
7920 into a temporary variable. Drop through and handle this
7925 || modifier == EXPAND_STACK_PARM
7926 || ! safe_from_p (target, exp, 1)
7927 /* Make sure we don't have a hard reg (such as function's return
7928 value) live across basic blocks, if not optimizing. */
7929 || (!optimize && REG_P (target)
7930 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
7931 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7934 emit_move_insn (target, const0_rtx);
7936 op1 = gen_label_rtx ();
7937 jumpifnot (exp, op1);
7940 emit_move_insn (target, const1_rtx);
7943 return ignore ? const0_rtx : target;
7945 case TRUTH_NOT_EXPR:
7946 if (modifier == EXPAND_STACK_PARM)
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7949 /* The parser is careful to generate TRUTH_NOT_EXPR
7950 only with operands that are always zero or one. */
7951 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
7952 target, 1, OPTAB_LIB_WIDEN);
7956 case STATEMENT_LIST:
7958 tree_stmt_iterator iter;
7960 gcc_assert (ignore);
7962 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
7963 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
7968 /* If it's void, we don't need to worry about computing a value. */
7969 if (VOID_TYPE_P (TREE_TYPE (exp)))
7971 tree pred = TREE_OPERAND (exp, 0);
7972 tree then_ = TREE_OPERAND (exp, 1);
7973 tree else_ = TREE_OPERAND (exp, 2);
7975 gcc_assert (TREE_CODE (then_) == GOTO_EXPR
7976 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL
7977 && TREE_CODE (else_) == GOTO_EXPR
7978 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL);
7980 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
7981 return expand_expr (else_, const0_rtx, VOIDmode, 0);
7984 /* Note that COND_EXPRs whose type is a structure or union
7985 are required to be constructed to contain assignments of
7986 a temporary variable, so that we can evaluate them here
7987 for side effect only. If type is void, we must do likewise. */
7989 gcc_assert (!TREE_ADDRESSABLE (type)
7991 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
7992 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
7994 /* If we are not to produce a result, we have no target. Otherwise,
7995 if a target was specified use it; it will not be used as an
7996 intermediate target unless it is safe. If no target, use a
7999 if (modifier != EXPAND_STACK_PARM
8001 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8002 && GET_MODE (original_target) == mode
8003 #ifdef HAVE_conditional_move
8004 && (! can_conditionally_move_p (mode)
8005 || REG_P (original_target))
8007 && !MEM_P (original_target))
8008 temp = original_target;
8010 temp = assign_temp (type, 0, 0, 1);
8012 do_pending_stack_adjust ();
8014 op0 = gen_label_rtx ();
8015 op1 = gen_label_rtx ();
8016 jumpifnot (TREE_OPERAND (exp, 0), op0);
8017 store_expr (TREE_OPERAND (exp, 1), temp,
8018 modifier == EXPAND_STACK_PARM);
8020 emit_jump_insn (gen_jump (op1));
8023 store_expr (TREE_OPERAND (exp, 2), temp,
8024 modifier == EXPAND_STACK_PARM);
8031 target = expand_vec_cond_expr (exp, target);
8036 tree lhs = TREE_OPERAND (exp, 0);
8037 tree rhs = TREE_OPERAND (exp, 1);
8039 gcc_assert (ignore);
8041 /* Check for |= or &= of a bitfield of size one into another bitfield
8042 of size 1. In this case, (unless we need the result of the
8043 assignment) we can do this more efficiently with a
8044 test followed by an assignment, if necessary.
8046 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8047 things change so we do, this code should be enhanced to
8049 if (TREE_CODE (lhs) == COMPONENT_REF
8050 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8051 || TREE_CODE (rhs) == BIT_AND_EXPR)
8052 && TREE_OPERAND (rhs, 0) == lhs
8053 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8054 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8055 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8057 rtx label = gen_label_rtx ();
8059 do_jump (TREE_OPERAND (rhs, 1),
8060 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8061 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8062 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8063 (TREE_CODE (rhs) == BIT_IOR_EXPR
8065 : integer_zero_node)));
8066 do_pending_stack_adjust ();
8071 expand_assignment (lhs, rhs);
8077 if (!TREE_OPERAND (exp, 0))
8078 expand_null_return ();
8080 expand_return (TREE_OPERAND (exp, 0));
8084 return expand_expr_addr_expr (exp, target, tmode, modifier);
8086 /* COMPLEX type for Extended Pascal & Fortran */
8089 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8092 /* Get the rtx code of the operands. */
8093 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8094 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8097 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8101 /* Move the real (op0) and imaginary (op1) parts to their location. */
8102 emit_move_insn (gen_realpart (mode, target), op0);
8103 emit_move_insn (gen_imagpart (mode, target), op1);
8105 insns = get_insns ();
8108 /* Complex construction should appear as a single unit. */
8109 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8110 each with a separate pseudo as destination.
8111 It's not correct for flow to treat them as a unit. */
8112 if (GET_CODE (target) != CONCAT)
8113 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8121 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8122 return gen_realpart (mode, op0);
8125 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8126 return gen_imagpart (mode, op0);
8129 expand_resx_expr (exp);
8132 case TRY_CATCH_EXPR:
8134 case EH_FILTER_EXPR:
8135 case TRY_FINALLY_EXPR:
8136 /* Lowered by tree-eh.c. */
8139 case WITH_CLEANUP_EXPR:
8140 case CLEANUP_POINT_EXPR:
8142 case CASE_LABEL_EXPR:
8148 case PREINCREMENT_EXPR:
8149 case PREDECREMENT_EXPR:
8150 case POSTINCREMENT_EXPR:
8151 case POSTDECREMENT_EXPR:
8154 case TRUTH_ANDIF_EXPR:
8155 case TRUTH_ORIF_EXPR:
8156 /* Lowered by gimplify.c. */
8160 return get_exception_pointer (cfun);
8163 return get_exception_filter (cfun);
8166 /* Function descriptors are not valid except for as
8167 initialization constants, and should not be expanded. */
8175 expand_label (TREE_OPERAND (exp, 0));
8179 expand_asm_expr (exp);
8182 case WITH_SIZE_EXPR:
8183 /* WITH_SIZE_EXPR expands to its first argument. The caller should
8184 have pulled out the size to use in whatever context it needed. */
8185 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
8188 case REALIGN_LOAD_EXPR:
8190 tree oprnd0 = TREE_OPERAND (exp, 0);
8191 tree oprnd1 = TREE_OPERAND (exp, 1);
8192 tree oprnd2 = TREE_OPERAND (exp, 2);
8195 this_optab = optab_for_tree_code (code, type);
8196 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
8197 op2 = expand_expr (oprnd2, NULL_RTX, VOIDmode, 0);
8198 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8207 return lang_hooks.expand_expr (exp, original_target, tmode,
8211 /* Here to do an ordinary binary operator. */
8213 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8214 subtarget, &op0, &op1, 0);
8216 this_optab = optab_for_tree_code (code, type);
8218 if (modifier == EXPAND_STACK_PARM)
8220 temp = expand_binop (mode, this_optab, op0, op1, target,
8221 unsignedp, OPTAB_LIB_WIDEN);
8223 return REDUCE_BIT_FIELD (temp);
8225 #undef REDUCE_BIT_FIELD
8227 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8228 signedness of TYPE), possibly returning the result in TARGET. */
8230 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8232 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8233 if (target && GET_MODE (target) != GET_MODE (exp))
8235 if (TYPE_UNSIGNED (type))
8238 if (prec < HOST_BITS_PER_WIDE_INT)
8239 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8242 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8243 ((unsigned HOST_WIDE_INT) 1
8244 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8246 return expand_and (GET_MODE (exp), exp, mask, target);
8250 tree count = build_int_cst (NULL_TREE,
8251 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
8252 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8253 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8257 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8258 when applied to the address of EXP produces an address known to be
8259 aligned more than BIGGEST_ALIGNMENT. */
8262 is_aligning_offset (tree offset, tree exp)
8264 /* Strip off any conversions. */
8265 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8266 || TREE_CODE (offset) == NOP_EXPR
8267 || TREE_CODE (offset) == CONVERT_EXPR)
8268 offset = TREE_OPERAND (offset, 0);
8270 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8271 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8272 if (TREE_CODE (offset) != BIT_AND_EXPR
8273 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8274 || compare_tree_int (TREE_OPERAND (offset, 1),
8275 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8276 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8279 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8280 It must be NEGATE_EXPR. Then strip any more conversions. */
8281 offset = TREE_OPERAND (offset, 0);
8282 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8283 || TREE_CODE (offset) == NOP_EXPR
8284 || TREE_CODE (offset) == CONVERT_EXPR)
8285 offset = TREE_OPERAND (offset, 0);
8287 if (TREE_CODE (offset) != NEGATE_EXPR)
8290 offset = TREE_OPERAND (offset, 0);
8291 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8292 || TREE_CODE (offset) == NOP_EXPR
8293 || TREE_CODE (offset) == CONVERT_EXPR)
8294 offset = TREE_OPERAND (offset, 0);
8296 /* This must now be the address of EXP. */
8297 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
8300 /* Return the tree node if an ARG corresponds to a string constant or zero
8301 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
8302 in bytes within the string that ARG is accessing. The type of the
8303 offset will be `sizetype'. */
8306 string_constant (tree arg, tree *ptr_offset)
8311 if (TREE_CODE (arg) == ADDR_EXPR)
8313 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8315 *ptr_offset = size_zero_node;
8316 return TREE_OPERAND (arg, 0);
8318 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
8320 array = TREE_OPERAND (arg, 0);
8321 offset = size_zero_node;
8323 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
8325 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
8326 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
8327 if (TREE_CODE (array) != STRING_CST
8328 && TREE_CODE (array) != VAR_DECL)
8334 else if (TREE_CODE (arg) == PLUS_EXPR)
8336 tree arg0 = TREE_OPERAND (arg, 0);
8337 tree arg1 = TREE_OPERAND (arg, 1);
8342 if (TREE_CODE (arg0) == ADDR_EXPR
8343 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
8344 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
8346 array = TREE_OPERAND (arg0, 0);
8349 else if (TREE_CODE (arg1) == ADDR_EXPR
8350 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
8351 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
8353 array = TREE_OPERAND (arg1, 0);
8362 if (TREE_CODE (array) == STRING_CST)
8364 *ptr_offset = convert (sizetype, offset);
8367 else if (TREE_CODE (array) == VAR_DECL)
8371 /* Variables initialized to string literals can be handled too. */
8372 if (DECL_INITIAL (array) == NULL_TREE
8373 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
8376 /* If they are read-only, non-volatile and bind locally. */
8377 if (! TREE_READONLY (array)
8378 || TREE_SIDE_EFFECTS (array)
8379 || ! targetm.binds_local_p (array))
8382 /* Avoid const char foo[4] = "abcde"; */
8383 if (DECL_SIZE_UNIT (array) == NULL_TREE
8384 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
8385 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
8386 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
8389 /* If variable is bigger than the string literal, OFFSET must be constant
8390 and inside of the bounds of the string literal. */
8391 offset = convert (sizetype, offset);
8392 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
8393 && (! host_integerp (offset, 1)
8394 || compare_tree_int (offset, length) >= 0))
8397 *ptr_offset = offset;
8398 return DECL_INITIAL (array);
8404 /* Generate code to calculate EXP using a store-flag instruction
8405 and return an rtx for the result. EXP is either a comparison
8406 or a TRUTH_NOT_EXPR whose operand is a comparison.
8408 If TARGET is nonzero, store the result there if convenient.
8410 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
8413 Return zero if there is no suitable set-flag instruction
8414 available on this machine.
8416 Once expand_expr has been called on the arguments of the comparison,
8417 we are committed to doing the store flag, since it is not safe to
8418 re-evaluate the expression. We emit the store-flag insn by calling
8419 emit_store_flag, but only expand the arguments if we have a reason
8420 to believe that emit_store_flag will be successful. If we think that
8421 it will, but it isn't, we have to simulate the store-flag with a
8422 set/jump/set sequence. */
8425 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
8428 tree arg0, arg1, type;
8430 enum machine_mode operand_mode;
8434 enum insn_code icode;
8435 rtx subtarget = target;
8438 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8439 result at the end. We can't simply invert the test since it would
8440 have already been inverted if it were valid. This case occurs for
8441 some floating-point comparisons. */
8443 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8444 invert = 1, exp = TREE_OPERAND (exp, 0);
8446 arg0 = TREE_OPERAND (exp, 0);
8447 arg1 = TREE_OPERAND (exp, 1);
8449 /* Don't crash if the comparison was erroneous. */
8450 if (arg0 == error_mark_node || arg1 == error_mark_node)
8453 type = TREE_TYPE (arg0);
8454 operand_mode = TYPE_MODE (type);
8455 unsignedp = TYPE_UNSIGNED (type);
8457 /* We won't bother with BLKmode store-flag operations because it would mean
8458 passing a lot of information to emit_store_flag. */
8459 if (operand_mode == BLKmode)
8462 /* We won't bother with store-flag operations involving function pointers
8463 when function pointers must be canonicalized before comparisons. */
8464 #ifdef HAVE_canonicalize_funcptr_for_compare
8465 if (HAVE_canonicalize_funcptr_for_compare
8466 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
8467 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8469 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
8470 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8471 == FUNCTION_TYPE))))
8478 /* Get the rtx comparison code to use. We know that EXP is a comparison
8479 operation of some type. Some comparisons against 1 and -1 can be
8480 converted to comparisons with zero. Do so here so that the tests
8481 below will be aware that we have a comparison with zero. These
8482 tests will not catch constants in the first operand, but constants
8483 are rarely passed as the first operand. */
8485 switch (TREE_CODE (exp))
8494 if (integer_onep (arg1))
8495 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8497 code = unsignedp ? LTU : LT;
8500 if (! unsignedp && integer_all_onesp (arg1))
8501 arg1 = integer_zero_node, code = LT;
8503 code = unsignedp ? LEU : LE;
8506 if (! unsignedp && integer_all_onesp (arg1))
8507 arg1 = integer_zero_node, code = GE;
8509 code = unsignedp ? GTU : GT;
8512 if (integer_onep (arg1))
8513 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8515 code = unsignedp ? GEU : GE;
8518 case UNORDERED_EXPR:
8547 /* Put a constant second. */
8548 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8550 tem = arg0; arg0 = arg1; arg1 = tem;
8551 code = swap_condition (code);
8554 /* If this is an equality or inequality test of a single bit, we can
8555 do this by shifting the bit being tested to the low-order bit and
8556 masking the result with the constant 1. If the condition was EQ,
8557 we xor it with 1. This does not require an scc insn and is faster
8558 than an scc insn even if we have it.
8560 The code to make this transformation was moved into fold_single_bit_test,
8561 so we just call into the folder and expand its result. */
8563 if ((code == NE || code == EQ)
8564 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8565 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8567 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
8568 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
8570 target, VOIDmode, EXPAND_NORMAL);
8573 /* Now see if we are likely to be able to do this. Return if not. */
8574 if (! can_compare_p (code, operand_mode, ccp_store_flag))
8577 icode = setcc_gen_code[(int) code];
8578 if (icode == CODE_FOR_nothing
8579 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
8581 /* We can only do this if it is one of the special cases that
8582 can be handled without an scc insn. */
8583 if ((code == LT && integer_zerop (arg1))
8584 || (! only_cheap && code == GE && integer_zerop (arg1)))
8586 else if (BRANCH_COST >= 0
8587 && ! only_cheap && (code == NE || code == EQ)
8588 && TREE_CODE (type) != REAL_TYPE
8589 && ((abs_optab->handlers[(int) operand_mode].insn_code
8590 != CODE_FOR_nothing)
8591 || (ffs_optab->handlers[(int) operand_mode].insn_code
8592 != CODE_FOR_nothing)))
8598 if (! get_subtarget (target)
8599 || GET_MODE (subtarget) != operand_mode)
8602 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
8605 target = gen_reg_rtx (mode);
8607 result = emit_store_flag (target, code, op0, op1,
8608 operand_mode, unsignedp, 1);
8613 result = expand_binop (mode, xor_optab, result, const1_rtx,
8614 result, 0, OPTAB_LIB_WIDEN);
8618 /* If this failed, we have to do this with set/compare/jump/set code. */
8620 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8621 target = gen_reg_rtx (GET_MODE (target));
8623 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
8624 result = compare_from_rtx (op0, op1, code, unsignedp,
8625 operand_mode, NULL_RTX);
8626 if (GET_CODE (result) == CONST_INT)
8627 return (((result == const0_rtx && ! invert)
8628 || (result != const0_rtx && invert))
8629 ? const0_rtx : const1_rtx);
8631 /* The code of RESULT may not match CODE if compare_from_rtx
8632 decided to swap its operands and reverse the original code.
8634 We know that compare_from_rtx returns either a CONST_INT or
8635 a new comparison code, so it is safe to just extract the
8636 code from RESULT. */
8637 code = GET_CODE (result);
8639 label = gen_label_rtx ();
8640 gcc_assert (bcc_gen_fctn[(int) code]);
8642 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
8643 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
8650 /* Stubs in case we haven't got a casesi insn. */
8652 # define HAVE_casesi 0
8653 # define gen_casesi(a, b, c, d, e) (0)
8654 # define CODE_FOR_casesi CODE_FOR_nothing
8657 /* If the machine does not have a case insn that compares the bounds,
8658 this means extra overhead for dispatch tables, which raises the
8659 threshold for using them. */
8660 #ifndef CASE_VALUES_THRESHOLD
8661 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
8662 #endif /* CASE_VALUES_THRESHOLD */
8665 case_values_threshold (void)
8667 return CASE_VALUES_THRESHOLD;
8670 /* Attempt to generate a casesi instruction. Returns 1 if successful,
8671 0 otherwise (i.e. if there is no casesi instruction). */
8673 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
8674 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
8676 enum machine_mode index_mode = SImode;
8677 int index_bits = GET_MODE_BITSIZE (index_mode);
8678 rtx op1, op2, index;
8679 enum machine_mode op_mode;
8684 /* Convert the index to SImode. */
8685 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
8687 enum machine_mode omode = TYPE_MODE (index_type);
8688 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
8690 /* We must handle the endpoints in the original mode. */
8691 index_expr = build2 (MINUS_EXPR, index_type,
8692 index_expr, minval);
8693 minval = integer_zero_node;
8694 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8695 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
8696 omode, 1, default_label);
8697 /* Now we can safely truncate. */
8698 index = convert_to_mode (index_mode, index, 0);
8702 if (TYPE_MODE (index_type) != index_mode)
8704 index_expr = convert (lang_hooks.types.type_for_size
8705 (index_bits, 0), index_expr);
8706 index_type = TREE_TYPE (index_expr);
8709 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8712 do_pending_stack_adjust ();
8714 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
8715 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
8717 index = copy_to_mode_reg (op_mode, index);
8719 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
8721 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
8722 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
8723 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
8724 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
8726 op1 = copy_to_mode_reg (op_mode, op1);
8728 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
8730 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
8731 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
8732 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
8733 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
8735 op2 = copy_to_mode_reg (op_mode, op2);
8737 emit_jump_insn (gen_casesi (index, op1, op2,
8738 table_label, default_label));
8742 /* Attempt to generate a tablejump instruction; same concept. */
8743 #ifndef HAVE_tablejump
8744 #define HAVE_tablejump 0
8745 #define gen_tablejump(x, y) (0)
8748 /* Subroutine of the next function.
8750 INDEX is the value being switched on, with the lowest value
8751 in the table already subtracted.
8752 MODE is its expected mode (needed if INDEX is constant).
8753 RANGE is the length of the jump table.
8754 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8756 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8757 index value is out of range. */
8760 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
8765 if (INTVAL (range) > cfun->max_jumptable_ents)
8766 cfun->max_jumptable_ents = INTVAL (range);
8768 /* Do an unsigned comparison (in the proper mode) between the index
8769 expression and the value which represents the length of the range.
8770 Since we just finished subtracting the lower bound of the range
8771 from the index expression, this comparison allows us to simultaneously
8772 check that the original index expression value is both greater than
8773 or equal to the minimum value of the range and less than or equal to
8774 the maximum value of the range. */
8776 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
8779 /* If index is in range, it must fit in Pmode.
8780 Convert to Pmode so we can index with it. */
8782 index = convert_to_mode (Pmode, index, 1);
8784 /* Don't let a MEM slip through, because then INDEX that comes
8785 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8786 and break_out_memory_refs will go to work on it and mess it up. */
8787 #ifdef PIC_CASE_VECTOR_ADDRESS
8788 if (flag_pic && !REG_P (index))
8789 index = copy_to_mode_reg (Pmode, index);
8792 /* If flag_force_addr were to affect this address
8793 it could interfere with the tricky assumptions made
8794 about addresses that contain label-refs,
8795 which may be valid only very near the tablejump itself. */
8796 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8797 GET_MODE_SIZE, because this indicates how large insns are. The other
8798 uses should all be Pmode, because they are addresses. This code
8799 could fail if addresses and insns are not the same size. */
8800 index = gen_rtx_PLUS (Pmode,
8801 gen_rtx_MULT (Pmode, index,
8802 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8803 gen_rtx_LABEL_REF (Pmode, table_label));
8804 #ifdef PIC_CASE_VECTOR_ADDRESS
8806 index = PIC_CASE_VECTOR_ADDRESS (index);
8809 index = memory_address_noforce (CASE_VECTOR_MODE, index);
8810 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8811 vector = gen_const_mem (CASE_VECTOR_MODE, index);
8812 convert_move (temp, vector, 0);
8814 emit_jump_insn (gen_tablejump (temp, table_label));
8816 /* If we are generating PIC code or if the table is PC-relative, the
8817 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8818 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
8823 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
8824 rtx table_label, rtx default_label)
8828 if (! HAVE_tablejump)
8831 index_expr = fold (build2 (MINUS_EXPR, index_type,
8832 convert (index_type, index_expr),
8833 convert (index_type, minval)));
8834 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
8835 do_pending_stack_adjust ();
8837 do_tablejump (index, TYPE_MODE (index_type),
8838 convert_modes (TYPE_MODE (index_type),
8839 TYPE_MODE (TREE_TYPE (range)),
8840 expand_expr (range, NULL_RTX,
8842 TYPE_UNSIGNED (TREE_TYPE (range))),
8843 table_label, default_label);
8847 /* Nonzero if the mode is a valid vector mode for this architecture.
8848 This returns nonzero even if there is no hardware support for the
8849 vector mode, but we can emulate with narrower modes. */
8852 vector_mode_valid_p (enum machine_mode mode)
8854 enum mode_class class = GET_MODE_CLASS (mode);
8855 enum machine_mode innermode;
8857 /* Doh! What's going on? */
8858 if (class != MODE_VECTOR_INT
8859 && class != MODE_VECTOR_FLOAT)
8862 /* Hardware support. Woo hoo! */
8863 if (targetm.vector_mode_supported_p (mode))
8866 innermode = GET_MODE_INNER (mode);
8868 /* We should probably return 1 if requesting V4DI and we have no DI,
8869 but we have V2DI, but this is probably very unlikely. */
8871 /* If we have support for the inner mode, we can safely emulate it.
8872 We may not have V2DI, but me can emulate with a pair of DIs. */
8873 return targetm.scalar_mode_supported_p (innermode);
8876 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
8878 const_vector_from_tree (tree exp)
8883 enum machine_mode inner, mode;
8885 mode = TYPE_MODE (TREE_TYPE (exp));
8887 if (initializer_zerop (exp))
8888 return CONST0_RTX (mode);
8890 units = GET_MODE_NUNITS (mode);
8891 inner = GET_MODE_INNER (mode);
8893 v = rtvec_alloc (units);
8895 link = TREE_VECTOR_CST_ELTS (exp);
8896 for (i = 0; link; link = TREE_CHAIN (link), ++i)
8898 elt = TREE_VALUE (link);
8900 if (TREE_CODE (elt) == REAL_CST)
8901 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
8904 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
8905 TREE_INT_CST_HIGH (elt),
8909 /* Initialize remaining elements to 0. */
8910 for (; i < units; ++i)
8911 RTVEC_ELT (v, i) = CONST0_RTX (inner);
8913 return gen_rtx_CONST_VECTOR (mode, v);
8915 #include "gt-expr.h"