1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 #include "diagnostic.h"
58 /* Decide whether a function's arguments should be processed
59 from first to last or from last to first.
61 They should if the stack and args grow in opposite directions, but
62 only if we have push insns. */
66 #ifndef PUSH_ARGS_REVERSED
67 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
68 #define PUSH_ARGS_REVERSED /* If it's last to first. */
74 #ifndef STACK_PUSH_CODE
75 #ifdef STACK_GROWS_DOWNWARD
76 #define STACK_PUSH_CODE PRE_DEC
78 #define STACK_PUSH_CODE PRE_INC
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* This structure is used by move_by_pieces to describe the move to
102 int explicit_inc_from;
103 unsigned HOST_WIDE_INT len;
104 HOST_WIDE_INT offset;
108 /* This structure is used by store_by_pieces to describe the clear to
111 struct store_by_pieces
117 unsigned HOST_WIDE_INT len;
118 HOST_WIDE_INT offset;
119 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
124 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
127 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
128 struct move_by_pieces *);
129 static bool block_move_libcall_safe_for_call_parm (void);
130 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
131 static tree emit_block_move_libcall_fn (int);
132 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
133 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
134 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
135 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
136 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
137 struct store_by_pieces *);
138 static tree clear_storage_libcall_fn (int);
139 static rtx compress_float_constant (rtx, rtx);
140 static rtx get_subtarget (rtx);
141 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
142 HOST_WIDE_INT, enum machine_mode,
143 tree, tree, int, alias_set_type);
144 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
145 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
146 tree, tree, alias_set_type, bool);
148 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 static int is_aligning_offset (const_tree, const_tree);
151 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
152 enum expand_modifier);
153 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
154 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
156 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
159 static rtx const_vector_from_tree (tree);
160 static void write_complex_part (rtx, rtx, bool);
162 /* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
166 static char direct_load[NUM_MACHINE_MODES];
167 static char direct_store[NUM_MACHINE_MODES];
169 /* Record for each mode whether we can float-extend from memory. */
171 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 /* This macro is used to determine whether move_by_pieces should be called
174 to perform a structure copy. */
175 #ifndef MOVE_BY_PIECES_P
176 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
177 (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
178 < (unsigned int) MOVE_RATIO)
181 /* This macro is used to determine whether clear_by_pieces should be
182 called to clear storage. */
183 #ifndef CLEAR_BY_PIECES_P
184 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
185 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
186 < (unsigned int) CLEAR_RATIO)
189 /* This macro is used to determine whether store_by_pieces should be
190 called to "memset" storage with byte values other than zero. */
191 #ifndef SET_BY_PIECES_P
192 #define SET_BY_PIECES_P(SIZE, ALIGN) \
193 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
194 < (unsigned int) SET_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memcpy" storage when the source is a constant string. */
199 #ifndef STORE_BY_PIECES_P
200 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
202 < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block sets. */
209 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 /* These arrays record the insn_code of three different kinds of insns
212 to perform block compares. */
213 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
214 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 /* Synchronization primitives. */
218 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
219 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap_cc[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
247 /* This is run to set up which modes can be used
248 directly in memory and to initialize the block move optab. It is run
249 at the beginning of compilation and when the target is reinitialized. */
252 init_expr_target (void)
255 enum machine_mode mode;
260 /* Try indexing by frame ptr and try by stack ptr.
261 It is known that on the Convex the stack ptr isn't a valid index.
262 With luck, one or the other is valid on any machine. */
263 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
266 /* A scratch register we can modify in-place below to avoid
267 useless RTL allocations. */
268 reg = gen_rtx_REG (VOIDmode, -1);
270 insn = rtx_alloc (INSN);
271 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
272 PATTERN (insn) = pat;
274 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275 mode = (enum machine_mode) ((int) mode + 1))
279 direct_load[(int) mode] = direct_store[(int) mode] = 0;
280 PUT_MODE (mem, mode);
281 PUT_MODE (mem1, mode);
282 PUT_MODE (reg, mode);
284 /* See if there is some register that can be used in this mode and
285 directly loaded or stored from memory. */
287 if (mode != VOIDmode && mode != BLKmode)
288 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
292 if (! HARD_REGNO_MODE_OK (regno, mode))
295 SET_REGNO (reg, regno);
298 SET_DEST (pat) = reg;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_load[(int) mode] = 1;
302 SET_SRC (pat) = mem1;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
308 SET_DEST (pat) = mem;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_store[(int) mode] = 1;
313 SET_DEST (pat) = mem1;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
319 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
321 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322 mode = GET_MODE_WIDER_MODE (mode))
324 enum machine_mode srcmode;
325 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 srcmode = GET_MODE_WIDER_MODE (srcmode))
330 ic = can_extend_p (mode, srcmode, 0);
331 if (ic == CODE_FOR_nothing)
334 PUT_MODE (mem, srcmode);
336 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 float_extend_from_mem[mode][srcmode] = true;
342 /* This is run at the start of compiling a function. */
347 memset (&crtl->expr, 0, sizeof (crtl->expr));
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351 Both modes may be integer, or both may be floating, or both may be
353 UNSIGNEDP should be nonzero if FROM is an unsigned type.
354 This causes zero-extension instead of sign-extension. */
357 convert_move (rtx to, rtx from, int unsignedp)
359 enum machine_mode to_mode = GET_MODE (to);
360 enum machine_mode from_mode = GET_MODE (from);
361 int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362 int from_real = SCALAR_FLOAT_MODE_P (from_mode);
366 /* rtx code for making an equivalent value. */
367 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
371 gcc_assert (to_real == from_real);
372 gcc_assert (to_mode != BLKmode);
373 gcc_assert (from_mode != BLKmode);
375 /* If the source and destination are already the same, then there's
380 /* If FROM is a SUBREG that indicates that we have already done at least
381 the required extension, strip it. We don't handle such SUBREGs as
384 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 >= GET_MODE_SIZE (to_mode))
387 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388 from = gen_lowpart (to_mode, from), from_mode = to_mode;
390 gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
392 if (to_mode == from_mode
393 || (from_mode == VOIDmode && CONSTANT_P (from)))
395 emit_move_insn (to, from);
399 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
401 gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
403 if (VECTOR_MODE_P (to_mode))
404 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
406 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
408 emit_move_insn (to, from);
412 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
414 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
424 gcc_assert ((GET_MODE_PRECISION (from_mode)
425 != GET_MODE_PRECISION (to_mode))
426 || (DECIMAL_FLOAT_MODE_P (from_mode)
427 != DECIMAL_FLOAT_MODE_P (to_mode)));
429 if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 /* Conversion between decimal float and binary float, same size. */
431 tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432 else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
437 /* Try converting directly if the insn is supported. */
439 code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440 if (code != CODE_FOR_nothing)
442 emit_unop_insn (code, to, from,
443 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
447 /* Otherwise use a libcall. */
448 libcall = convert_optab_libfunc (tab, to_mode, from_mode);
450 /* Is this conversion implemented yet? */
451 gcc_assert (libcall);
454 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
456 insns = get_insns ();
458 emit_libcall_block (insns, to, value,
459 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
461 : gen_rtx_FLOAT_EXTEND (to_mode, from));
465 /* Handle pointer conversion. */ /* SPEE 900220. */
466 /* Targets are expected to provide conversion insns between PxImode and
467 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
468 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
470 enum machine_mode full_mode
471 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
473 gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 != CODE_FOR_nothing);
476 if (full_mode != from_mode)
477 from = convert_to_mode (full_mode, from, unsignedp);
478 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
482 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
485 enum machine_mode full_mode
486 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
488 gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 != CODE_FOR_nothing);
491 if (to_mode == full_mode)
493 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
498 new_from = gen_reg_rtx (full_mode);
499 emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 new_from, from, UNKNOWN);
502 /* else proceed to integer conversions below. */
503 from_mode = full_mode;
507 /* Make sure both are fixed-point modes or both are not. */
508 gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510 if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
512 /* If we widen from_mode to to_mode and they are in the same class,
513 we won't saturate the result.
514 Otherwise, always saturate the result to play safe. */
515 if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 expand_fixed_convert (to, from, 0, 0);
519 expand_fixed_convert (to, from, 0, 1);
523 /* Now both modes are integers. */
525 /* Handle expanding beyond a word. */
526 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
534 enum machine_mode lowpart_mode;
535 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
541 /* If FROM is a SUBREG, put it into a register. Do this
542 so that we always generate the same set of insns for
543 better cse'ing; if an intermediate assignment occurred,
544 we won't be doing the operation directly on the SUBREG. */
545 if (optimize > 0 && GET_CODE (from) == SUBREG)
546 from = force_reg (from_mode, from);
547 emit_unop_insn (code, to, from, equiv_code);
550 /* Next, try converting via full word. */
551 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 != CODE_FOR_nothing))
555 rtx word_to = gen_reg_rtx (word_mode);
558 if (reg_overlap_mentioned_p (to, from))
559 from = force_reg (from_mode, from);
562 convert_move (word_to, from, unsignedp);
563 emit_unop_insn (code, to, word_to, equiv_code);
567 /* No special multiword conversion insn; do it by hand. */
570 /* Since we will turn this into a no conflict block, we must ensure
571 that the source does not overlap the target. */
573 if (reg_overlap_mentioned_p (to, from))
574 from = force_reg (from_mode, from);
576 /* Get a copy of FROM widened to a word, if necessary. */
577 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 lowpart_mode = word_mode;
580 lowpart_mode = from_mode;
582 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
584 lowpart = gen_lowpart (lowpart_mode, to);
585 emit_move_insn (lowpart, lowfrom);
587 /* Compute the value to put in each remaining word. */
589 fill_value = const0_rtx;
594 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
595 && STORE_FLAG_VALUE == -1)
597 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
599 fill_value = gen_reg_rtx (word_mode);
600 emit_insn (gen_slt (fill_value));
606 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
607 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
609 fill_value = convert_to_mode (word_mode, fill_value, 1);
613 /* Fill the remaining words. */
614 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
616 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
617 rtx subword = operand_subword (to, index, 1, to_mode);
619 gcc_assert (subword);
621 if (fill_value != subword)
622 emit_move_insn (subword, fill_value);
625 insns = get_insns ();
632 /* Truncating multi-word to a word or less. */
633 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
634 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
637 && ! MEM_VOLATILE_P (from)
638 && direct_load[(int) to_mode]
639 && ! mode_dependent_address_p (XEXP (from, 0)))
641 || GET_CODE (from) == SUBREG))
642 from = force_reg (from_mode, from);
643 convert_move (to, gen_lowpart (word_mode, from), 0);
647 /* Now follow all the conversions between integers
648 no more than a word long. */
650 /* For truncation, usually we can just refer to FROM in a narrower mode. */
651 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
652 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
653 GET_MODE_BITSIZE (from_mode)))
656 && ! MEM_VOLATILE_P (from)
657 && direct_load[(int) to_mode]
658 && ! mode_dependent_address_p (XEXP (from, 0)))
660 || GET_CODE (from) == SUBREG))
661 from = force_reg (from_mode, from);
662 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
663 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
664 from = copy_to_reg (from);
665 emit_move_insn (to, gen_lowpart (to_mode, from));
669 /* Handle extension. */
670 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
672 /* Convert directly if that works. */
673 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
676 emit_unop_insn (code, to, from, equiv_code);
681 enum machine_mode intermediate;
685 /* Search for a mode to convert via. */
686 for (intermediate = from_mode; intermediate != VOIDmode;
687 intermediate = GET_MODE_WIDER_MODE (intermediate))
688 if (((can_extend_p (to_mode, intermediate, unsignedp)
690 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
691 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
692 GET_MODE_BITSIZE (intermediate))))
693 && (can_extend_p (intermediate, from_mode, unsignedp)
694 != CODE_FOR_nothing))
696 convert_move (to, convert_to_mode (intermediate, from,
697 unsignedp), unsignedp);
701 /* No suitable intermediate mode.
702 Generate what we need with shifts. */
703 shift_amount = build_int_cst (NULL_TREE,
704 GET_MODE_BITSIZE (to_mode)
705 - GET_MODE_BITSIZE (from_mode));
706 from = gen_lowpart (to_mode, force_reg (from_mode, from));
707 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
709 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
712 emit_move_insn (to, tmp);
717 /* Support special truncate insns for certain modes. */
718 if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
720 emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
725 /* Handle truncation of volatile memrefs, and so on;
726 the things that couldn't be truncated directly,
727 and for which there was no special instruction.
729 ??? Code above formerly short-circuited this, for most integer
730 mode pairs, with a force_reg in from_mode followed by a recursive
731 call to this routine. Appears always to have been wrong. */
732 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
734 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
735 emit_move_insn (to, temp);
739 /* Mode combination is not recognized. */
743 /* Return an rtx for a value that would result
744 from converting X to mode MODE.
745 Both X and MODE may be floating, or both integer.
746 UNSIGNEDP is nonzero if X is an unsigned value.
747 This can be done by referring to a part of X in place
748 or by copying to a new temporary with conversion. */
751 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
753 return convert_modes (mode, VOIDmode, x, unsignedp);
756 /* Return an rtx for a value that would result
757 from converting X from mode OLDMODE to mode MODE.
758 Both modes may be floating, or both integer.
759 UNSIGNEDP is nonzero if X is an unsigned value.
761 This can be done by referring to a part of X in place
762 or by copying to a new temporary with conversion.
764 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode. */
767 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
771 /* If FROM is a SUBREG that indicates that we have already done at least
772 the required extension, strip it. */
774 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
775 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
776 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
777 x = gen_lowpart (mode, x);
779 if (GET_MODE (x) != VOIDmode)
780 oldmode = GET_MODE (x);
785 /* There is one case that we must handle specially: If we are converting
786 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
787 we are to interpret the constant as unsigned, gen_lowpart will do
788 the wrong if the constant appears negative. What we want to do is
789 make the high-order word of the constant zero, not all ones. */
791 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
792 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
793 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
795 HOST_WIDE_INT val = INTVAL (x);
797 if (oldmode != VOIDmode
798 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
800 int width = GET_MODE_BITSIZE (oldmode);
802 /* We need to zero extend VAL. */
803 val &= ((HOST_WIDE_INT) 1 << width) - 1;
806 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
809 /* We can do this with a gen_lowpart if both desired and current modes
810 are integer, and this is either a constant integer, a register, or a
811 non-volatile MEM. Except for the constant case where MODE is no
812 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
814 if ((GET_CODE (x) == CONST_INT
815 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
816 || (GET_MODE_CLASS (mode) == MODE_INT
817 && GET_MODE_CLASS (oldmode) == MODE_INT
818 && (GET_CODE (x) == CONST_DOUBLE
819 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
820 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
821 && direct_load[(int) mode])
823 && (! HARD_REGISTER_P (x)
824 || HARD_REGNO_MODE_OK (REGNO (x), mode))
825 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
826 GET_MODE_BITSIZE (GET_MODE (x)))))))))
828 /* ?? If we don't know OLDMODE, we have to assume here that
829 X does not need sign- or zero-extension. This may not be
830 the case, but it's the best we can do. */
831 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
832 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
834 HOST_WIDE_INT val = INTVAL (x);
835 int width = GET_MODE_BITSIZE (oldmode);
837 /* We must sign or zero-extend in this case. Start by
838 zero-extending, then sign extend if we need to. */
839 val &= ((HOST_WIDE_INT) 1 << width) - 1;
841 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
842 val |= (HOST_WIDE_INT) (-1) << width;
844 return gen_int_mode (val, mode);
847 return gen_lowpart (mode, x);
850 /* Converting from integer constant into mode is always equivalent to an
852 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
854 gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
855 return simplify_gen_subreg (mode, x, oldmode, 0);
858 temp = gen_reg_rtx (mode);
859 convert_move (temp, x, unsignedp);
863 /* STORE_MAX_PIECES is the number of bytes at a time that we can
864 store efficiently. Due to internal GCC limitations, this is
865 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
866 for an immediate constant. */
868 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
870 /* Determine whether the LEN bytes can be moved by using several move
871 instructions. Return nonzero if a call to move_by_pieces should
875 can_move_by_pieces (unsigned HOST_WIDE_INT len,
876 unsigned int align ATTRIBUTE_UNUSED)
878 return MOVE_BY_PIECES_P (len, align);
881 /* Generate several move instructions to copy LEN bytes from block FROM to
882 block TO. (These are MEM rtx's with BLKmode).
884 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
885 used to push FROM to the stack.
887 ALIGN is maximum stack alignment we can assume.
889 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
890 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
894 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
895 unsigned int align, int endp)
897 struct move_by_pieces data;
898 rtx to_addr, from_addr = XEXP (from, 0);
899 unsigned int max_size = MOVE_MAX_PIECES + 1;
900 enum machine_mode mode = VOIDmode, tmode;
901 enum insn_code icode;
903 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
906 data.from_addr = from_addr;
909 to_addr = XEXP (to, 0);
912 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
913 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
915 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
922 #ifdef STACK_GROWS_DOWNWARD
928 data.to_addr = to_addr;
931 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
932 || GET_CODE (from_addr) == POST_INC
933 || GET_CODE (from_addr) == POST_DEC);
935 data.explicit_inc_from = 0;
936 data.explicit_inc_to = 0;
937 if (data.reverse) data.offset = len;
940 /* If copying requires more than two move insns,
941 copy addresses to registers (to make displacements shorter)
942 and use post-increment if available. */
943 if (!(data.autinc_from && data.autinc_to)
944 && move_by_pieces_ninsns (len, align, max_size) > 2)
946 /* Find the mode of the largest move... */
947 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
948 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
949 if (GET_MODE_SIZE (tmode) < max_size)
952 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
954 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
955 data.autinc_from = 1;
956 data.explicit_inc_from = -1;
958 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
960 data.from_addr = copy_addr_to_reg (from_addr);
961 data.autinc_from = 1;
962 data.explicit_inc_from = 1;
964 if (!data.autinc_from && CONSTANT_P (from_addr))
965 data.from_addr = copy_addr_to_reg (from_addr);
966 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
968 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
970 data.explicit_inc_to = -1;
972 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
974 data.to_addr = copy_addr_to_reg (to_addr);
976 data.explicit_inc_to = 1;
978 if (!data.autinc_to && CONSTANT_P (to_addr))
979 data.to_addr = copy_addr_to_reg (to_addr);
982 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
983 if (align >= GET_MODE_ALIGNMENT (tmode))
984 align = GET_MODE_ALIGNMENT (tmode);
987 enum machine_mode xmode;
989 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
991 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
992 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
993 || SLOW_UNALIGNED_ACCESS (tmode, align))
996 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
999 /* First move what we can in the largest integer mode, then go to
1000 successively smaller modes. */
1002 while (max_size > 1)
1004 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1005 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1006 if (GET_MODE_SIZE (tmode) < max_size)
1009 if (mode == VOIDmode)
1012 icode = optab_handler (mov_optab, mode)->insn_code;
1013 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1014 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1016 max_size = GET_MODE_SIZE (mode);
1019 /* The code above should have handled everything. */
1020 gcc_assert (!data.len);
1026 gcc_assert (!data.reverse);
1031 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1032 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1034 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1037 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1044 to1 = adjust_address (data.to, QImode, data.offset);
1052 /* Return number of insns required to move L bytes by pieces.
1053 ALIGN (in bits) is maximum alignment we can assume. */
1055 static unsigned HOST_WIDE_INT
1056 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1057 unsigned int max_size)
1059 unsigned HOST_WIDE_INT n_insns = 0;
1060 enum machine_mode tmode;
1062 tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1063 if (align >= GET_MODE_ALIGNMENT (tmode))
1064 align = GET_MODE_ALIGNMENT (tmode);
1067 enum machine_mode tmode, xmode;
1069 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1071 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1072 if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1073 || SLOW_UNALIGNED_ACCESS (tmode, align))
1076 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1079 while (max_size > 1)
1081 enum machine_mode mode = VOIDmode;
1082 enum insn_code icode;
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1089 if (mode == VOIDmode)
1092 icode = optab_handler (mov_optab, mode)->insn_code;
1093 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1094 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1096 max_size = GET_MODE_SIZE (mode);
1103 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1104 with move instructions for mode MODE. GENFUN is the gen_... function
1105 to make a move insn for that mode. DATA has all the other info. */
1108 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1109 struct move_by_pieces *data)
1111 unsigned int size = GET_MODE_SIZE (mode);
1112 rtx to1 = NULL_RTX, from1;
1114 while (data->len >= size)
1117 data->offset -= size;
1121 if (data->autinc_to)
1122 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1125 to1 = adjust_address (data->to, mode, data->offset);
1128 if (data->autinc_from)
1129 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1132 from1 = adjust_address (data->from, mode, data->offset);
1134 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1135 emit_insn (gen_add2_insn (data->to_addr,
1136 GEN_INT (-(HOST_WIDE_INT)size)));
1137 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1138 emit_insn (gen_add2_insn (data->from_addr,
1139 GEN_INT (-(HOST_WIDE_INT)size)));
1142 emit_insn ((*genfun) (to1, from1));
1145 #ifdef PUSH_ROUNDING
1146 emit_single_push_insn (mode, from1, NULL);
1152 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1153 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1154 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1155 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1157 if (! data->reverse)
1158 data->offset += size;
1164 /* Emit code to move a block Y to a block X. This may be done with
1165 string-move instructions, with multiple scalar move instructions,
1166 or with a library call.
1168 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1169 SIZE is an rtx that says how long they are.
1170 ALIGN is the maximum alignment we can assume they have.
1171 METHOD describes what kind of copy this is, and what mechanisms may be used.
1173 Return the address of the new block, if memcpy is called and returns it,
1177 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1178 unsigned int expected_align, HOST_WIDE_INT expected_size)
1186 case BLOCK_OP_NORMAL:
1187 case BLOCK_OP_TAILCALL:
1188 may_use_call = true;
1191 case BLOCK_OP_CALL_PARM:
1192 may_use_call = block_move_libcall_safe_for_call_parm ();
1194 /* Make inhibit_defer_pop nonzero around the library call
1195 to force it to pop the arguments right away. */
1199 case BLOCK_OP_NO_LIBCALL:
1200 may_use_call = false;
1207 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1209 gcc_assert (MEM_P (x));
1210 gcc_assert (MEM_P (y));
1213 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1214 block copy is more efficient for other large modes, e.g. DCmode. */
1215 x = adjust_address (x, BLKmode, 0);
1216 y = adjust_address (y, BLKmode, 0);
1218 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1219 can be incorrect is coming from __builtin_memcpy. */
1220 if (GET_CODE (size) == CONST_INT)
1222 if (INTVAL (size) == 0)
1225 x = shallow_copy_rtx (x);
1226 y = shallow_copy_rtx (y);
1227 set_mem_size (x, size);
1228 set_mem_size (y, size);
1231 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1232 move_by_pieces (x, y, INTVAL (size), align, 0);
1233 else if (emit_block_move_via_movmem (x, y, size, align,
1234 expected_align, expected_size))
1236 else if (may_use_call)
1237 retval = emit_block_move_via_libcall (x, y, size,
1238 method == BLOCK_OP_TAILCALL);
1240 emit_block_move_via_loop (x, y, size, align);
1242 if (method == BLOCK_OP_CALL_PARM)
1249 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1251 return emit_block_move_hints (x, y, size, method, 0, -1);
1254 /* A subroutine of emit_block_move. Returns true if calling the
1255 block move libcall will not clobber any parameters which may have
1256 already been placed on the stack. */
1259 block_move_libcall_safe_for_call_parm (void)
1261 #if defined (REG_PARM_STACK_SPACE)
1265 /* If arguments are pushed on the stack, then they're safe. */
1269 /* If registers go on the stack anyway, any argument is sure to clobber
1270 an outgoing argument. */
1271 #if defined (REG_PARM_STACK_SPACE)
1272 fn = emit_block_move_libcall_fn (false);
1273 if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1274 && REG_PARM_STACK_SPACE (fn) != 0)
1278 /* If any argument goes in memory, then it might clobber an outgoing
1281 CUMULATIVE_ARGS args_so_far;
1284 fn = emit_block_move_libcall_fn (false);
1285 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1287 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1288 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1290 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1291 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1292 if (!tmp || !REG_P (tmp))
1294 if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1296 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1302 /* A subroutine of emit_block_move. Expand a movmem pattern;
1303 return true if successful. */
1306 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1307 unsigned int expected_align, HOST_WIDE_INT expected_size)
1309 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1310 int save_volatile_ok = volatile_ok;
1311 enum machine_mode mode;
1313 if (expected_align < align)
1314 expected_align = align;
1316 /* Since this is a move insn, we don't care about volatility. */
1319 /* Try the most limited insn first, because there's no point
1320 including more than one in the machine description unless
1321 the more limited one has some advantage. */
1323 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1324 mode = GET_MODE_WIDER_MODE (mode))
1326 enum insn_code code = movmem_optab[(int) mode];
1327 insn_operand_predicate_fn pred;
1329 if (code != CODE_FOR_nothing
1330 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1331 here because if SIZE is less than the mode mask, as it is
1332 returned by the macro, it will definitely be less than the
1333 actual mode mask. */
1334 && ((GET_CODE (size) == CONST_INT
1335 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1336 <= (GET_MODE_MASK (mode) >> 1)))
1337 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1338 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1339 || (*pred) (x, BLKmode))
1340 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1341 || (*pred) (y, BLKmode))
1342 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1343 || (*pred) (opalign, VOIDmode)))
1346 rtx last = get_last_insn ();
1349 op2 = convert_to_mode (mode, size, 1);
1350 pred = insn_data[(int) code].operand[2].predicate;
1351 if (pred != 0 && ! (*pred) (op2, mode))
1352 op2 = copy_to_mode_reg (mode, op2);
1354 /* ??? When called via emit_block_move_for_call, it'd be
1355 nice if there were some way to inform the backend, so
1356 that it doesn't fail the expansion because it thinks
1357 emitting the libcall would be more efficient. */
1359 if (insn_data[(int) code].n_operands == 4)
1360 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1362 pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1363 GEN_INT (expected_align
1365 GEN_INT (expected_size));
1369 volatile_ok = save_volatile_ok;
1373 delete_insns_since (last);
1377 volatile_ok = save_volatile_ok;
1381 /* A subroutine of emit_block_move. Expand a call to memcpy.
1382 Return the return value from memcpy, 0 otherwise. */
1385 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1387 rtx dst_addr, src_addr;
1388 tree call_expr, fn, src_tree, dst_tree, size_tree;
1389 enum machine_mode size_mode;
1392 /* Emit code to copy the addresses of DST and SRC and SIZE into new
1393 pseudos. We can then place those new pseudos into a VAR_DECL and
1396 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1397 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1399 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1400 src_addr = convert_memory_address (ptr_mode, src_addr);
1402 dst_tree = make_tree (ptr_type_node, dst_addr);
1403 src_tree = make_tree (ptr_type_node, src_addr);
1405 size_mode = TYPE_MODE (sizetype);
1407 size = convert_to_mode (size_mode, size, 1);
1408 size = copy_to_mode_reg (size_mode, size);
1410 /* It is incorrect to use the libcall calling conventions to call
1411 memcpy in this context. This could be a user call to memcpy and
1412 the user may wish to examine the return value from memcpy. For
1413 targets where libcalls and normal calls have different conventions
1414 for returning pointers, we could end up generating incorrect code. */
1416 size_tree = make_tree (sizetype, size);
1418 fn = emit_block_move_libcall_fn (true);
1419 call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1420 CALL_EXPR_TAILCALL (call_expr) = tailcall;
1422 retval = expand_normal (call_expr);
1427 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1428 for the function we use for block copies. The first time FOR_CALL
1429 is true, we call assemble_external. */
1431 static GTY(()) tree block_move_fn;
1434 init_block_move_fn (const char *asmspec)
1440 fn = get_identifier ("memcpy");
1441 args = build_function_type_list (ptr_type_node, ptr_type_node,
1442 const_ptr_type_node, sizetype,
1445 fn = build_decl (FUNCTION_DECL, fn, args);
1446 DECL_EXTERNAL (fn) = 1;
1447 TREE_PUBLIC (fn) = 1;
1448 DECL_ARTIFICIAL (fn) = 1;
1449 TREE_NOTHROW (fn) = 1;
1450 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1451 DECL_VISIBILITY_SPECIFIED (fn) = 1;
1457 set_user_assembler_name (block_move_fn, asmspec);
1461 emit_block_move_libcall_fn (int for_call)
1463 static bool emitted_extern;
1466 init_block_move_fn (NULL);
1468 if (for_call && !emitted_extern)
1470 emitted_extern = true;
1471 make_decl_rtl (block_move_fn);
1472 assemble_external (block_move_fn);
1475 return block_move_fn;
1478 /* A subroutine of emit_block_move. Copy the data via an explicit
1479 loop. This is used only when libcalls are forbidden. */
1480 /* ??? It'd be nice to copy in hunks larger than QImode. */
1483 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1484 unsigned int align ATTRIBUTE_UNUSED)
1486 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1487 enum machine_mode iter_mode;
1489 iter_mode = GET_MODE (size);
1490 if (iter_mode == VOIDmode)
1491 iter_mode = word_mode;
1493 top_label = gen_label_rtx ();
1494 cmp_label = gen_label_rtx ();
1495 iter = gen_reg_rtx (iter_mode);
1497 emit_move_insn (iter, const0_rtx);
1499 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1500 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1501 do_pending_stack_adjust ();
1503 emit_jump (cmp_label);
1504 emit_label (top_label);
1506 tmp = convert_modes (Pmode, iter_mode, iter, true);
1507 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1508 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1509 x = change_address (x, QImode, x_addr);
1510 y = change_address (y, QImode, y_addr);
1512 emit_move_insn (x, y);
1514 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 true, OPTAB_LIB_WIDEN);
1517 emit_move_insn (iter, tmp);
1519 emit_label (cmp_label);
1521 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526 The number of registers to be filled is NREGS. */
1529 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1532 #ifdef HAVE_load_multiple
1540 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541 x = validize_mem (force_const_mem (mode, x));
1543 /* See if the machine can do this with a load multiple insn. */
1544 #ifdef HAVE_load_multiple
1545 if (HAVE_load_multiple)
1547 last = get_last_insn ();
1548 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1556 delete_insns_since (last);
1560 for (i = 0; i < nregs; i++)
1561 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 operand_subword_force (x, i, mode));
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1569 move_block_from_reg (int regno, rtx x, int nregs)
1576 /* See if the machine can do this with a store multiple insn. */
1577 #ifdef HAVE_store_multiple
1578 if (HAVE_store_multiple)
1580 rtx last = get_last_insn ();
1581 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1589 delete_insns_since (last);
1593 for (i = 0; i < nregs; i++)
1595 rtx tem = operand_subword (x, i, 1, BLKmode);
1599 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604 ORIG, where ORIG is a non-consecutive group of registers represented by
1605 a PARALLEL. The clone is identical to the original except in that the
1606 original set of registers is replaced by a new set of pseudo registers.
1607 The new set has the same modes as the original set. */
1610 gen_group_rtx (rtx orig)
1615 gcc_assert (GET_CODE (orig) == PARALLEL);
1617 length = XVECLEN (orig, 0);
1618 tmps = XALLOCAVEC (rtx, length);
1620 /* Skip a NULL entry in first slot. */
1621 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1626 for (; i < length; i++)
1628 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1631 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1634 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1637 /* A subroutine of emit_group_load. Arguments as for emit_group_load,
1638 except that values are placed in TMPS[i], and must later be moved
1639 into corresponding XEXP (XVECEXP (DST, 0, i), 0) element. */
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1646 enum machine_mode m = GET_MODE (orig_src);
1648 gcc_assert (GET_CODE (dst) == PARALLEL);
1651 && !SCALAR_INT_MODE_P (m)
1652 && !MEM_P (orig_src)
1653 && GET_CODE (orig_src) != CONCAT)
1655 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656 if (imode == BLKmode)
1657 src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1659 src = gen_reg_rtx (imode);
1660 if (imode != BLKmode)
1661 src = gen_lowpart (GET_MODE (orig_src), src);
1662 emit_move_insn (src, orig_src);
1663 /* ...and back again. */
1664 if (imode != BLKmode)
1665 src = gen_lowpart (imode, src);
1666 emit_group_load_1 (tmps, dst, src, type, ssize);
1670 /* Check for a NULL entry, used to indicate that the parameter goes
1671 both on the stack and in registers. */
1672 if (XEXP (XVECEXP (dst, 0, 0), 0))
1677 /* Process the pieces. */
1678 for (i = start; i < XVECLEN (dst, 0); i++)
1680 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682 unsigned int bytelen = GET_MODE_SIZE (mode);
1685 /* Handle trailing fragments that run over the size of the struct. */
1686 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1688 /* Arrange to shift the fragment to where it belongs.
1689 extract_bit_field loads to the lsb of the reg. */
1691 #ifdef BLOCK_REG_PADDING
1692 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 == (BYTES_BIG_ENDIAN ? upward : downward)
1698 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 bytelen = ssize - bytepos;
1700 gcc_assert (bytelen > 0);
1703 /* If we won't be loading directly from memory, protect the real source
1704 from strange tricks we might play; but make sure that the source can
1705 be loaded directly into the destination. */
1707 if (!MEM_P (orig_src)
1708 && (!CONSTANT_P (orig_src)
1709 || (GET_MODE (orig_src) != mode
1710 && GET_MODE (orig_src) != VOIDmode)))
1712 if (GET_MODE (orig_src) == VOIDmode)
1713 src = gen_reg_rtx (mode);
1715 src = gen_reg_rtx (GET_MODE (orig_src));
1717 emit_move_insn (src, orig_src);
1720 /* Optimize the access just a bit. */
1722 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 && bytelen == GET_MODE_SIZE (mode))
1727 tmps[i] = gen_reg_rtx (mode);
1728 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1730 else if (COMPLEX_MODE_P (mode)
1731 && GET_MODE (src) == mode
1732 && bytelen == GET_MODE_SIZE (mode))
1733 /* Let emit_move_complex do the bulk of the work. */
1735 else if (GET_CODE (src) == CONCAT)
1737 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1740 if ((bytepos == 0 && bytelen == slen0)
1741 || (bytepos != 0 && bytepos + bytelen <= slen))
1743 /* The following assumes that the concatenated objects all
1744 have the same size. In this case, a simple calculation
1745 can be used to determine the object and the bit field
1747 tmps[i] = XEXP (src, bytepos / slen0);
1748 if (! CONSTANT_P (tmps[i])
1749 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 (bytepos % slen0) * BITS_PER_UNIT,
1752 1, NULL_RTX, mode, mode);
1758 gcc_assert (!bytepos);
1759 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760 emit_move_insn (mem, src);
1761 tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 0, 1, NULL_RTX, mode, mode);
1765 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 SIMD register, which is currently broken. While we get GCC
1767 to emit proper RTL for these cases, let's dump to memory. */
1768 else if (VECTOR_MODE_P (GET_MODE (dst))
1771 int slen = GET_MODE_SIZE (GET_MODE (src));
1774 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775 emit_move_insn (mem, src);
1776 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1778 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779 && XVECLEN (dst, 0) > 1)
1780 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781 else if (CONSTANT_P (src))
1783 HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1791 gcc_assert (2 * len == ssize);
1792 split_double (src, &first, &second);
1799 else if (REG_P (src) && GET_MODE (src) == mode)
1802 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1807 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808 build_int_cst (NULL_TREE, shift), tmps[i], 0);
1812 /* Emit code to move a block SRC of type TYPE to a block DST,
1813 where DST is non-consecutive registers represented by a PARALLEL.
1814 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1818 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1823 tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1824 emit_group_load_1 (tmps, dst, src, type, ssize);
1826 /* Copy the extracted pieces into the proper (probable) hard regs. */
1827 for (i = 0; i < XVECLEN (dst, 0); i++)
1829 rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1832 emit_move_insn (d, tmps[i]);
1836 /* Similar, but load SRC into new pseudos in a format that looks like
1837 PARALLEL. This can later be fed to emit_group_move to get things
1838 in the right place. */
1841 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1846 vec = rtvec_alloc (XVECLEN (parallel, 0));
1847 emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1849 /* Convert the vector to look just like the original PARALLEL, except
1850 with the computed values. */
1851 for (i = 0; i < XVECLEN (parallel, 0); i++)
1853 rtx e = XVECEXP (parallel, 0, i);
1854 rtx d = XEXP (e, 0);
1858 d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1861 RTVEC_ELT (vec, i) = e;
1864 return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1867 /* Emit code to move a block SRC to block DST, where SRC and DST are
1868 non-consecutive groups of registers, each represented by a PARALLEL. */
1871 emit_group_move (rtx dst, rtx src)
1875 gcc_assert (GET_CODE (src) == PARALLEL
1876 && GET_CODE (dst) == PARALLEL
1877 && XVECLEN (src, 0) == XVECLEN (dst, 0));
1879 /* Skip first entry if NULL. */
1880 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882 XEXP (XVECEXP (src, 0, i), 0));
1885 /* Move a group of registers represented by a PARALLEL into pseudos. */
1888 emit_group_move_into_temps (rtx src)
1890 rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1893 for (i = 0; i < XVECLEN (src, 0); i++)
1895 rtx e = XVECEXP (src, 0, i);
1896 rtx d = XEXP (e, 0);
1899 e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900 RTVEC_ELT (vec, i) = e;
1903 return gen_rtx_PARALLEL (GET_MODE (src), vec);
1906 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907 where SRC is non-consecutive registers represented by a PARALLEL.
1908 SSIZE represents the total size of block ORIG_DST, or -1 if not
1912 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1915 int start, finish, i;
1916 enum machine_mode m = GET_MODE (orig_dst);
1918 gcc_assert (GET_CODE (src) == PARALLEL);
1920 if (!SCALAR_INT_MODE_P (m)
1921 && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1923 enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924 if (imode == BLKmode)
1925 dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1927 dst = gen_reg_rtx (imode);
1928 emit_group_store (dst, src, type, ssize);
1929 if (imode != BLKmode)
1930 dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931 emit_move_insn (orig_dst, dst);
1935 /* Check for a NULL entry, used to indicate that the parameter goes
1936 both on the stack and in registers. */
1937 if (XEXP (XVECEXP (src, 0, 0), 0))
1941 finish = XVECLEN (src, 0);
1943 tmps = XALLOCAVEC (rtx, finish);
1945 /* Copy the (probable) hard regs into pseudos. */
1946 for (i = start; i < finish; i++)
1948 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949 if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1951 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952 emit_move_insn (tmps[i], reg);
1958 /* If we won't be storing directly into memory, protect the real destination
1959 from strange tricks we might play. */
1961 if (GET_CODE (dst) == PARALLEL)
1965 /* We can get a PARALLEL dst if there is a conditional expression in
1966 a return statement. In that case, the dst and src are the same,
1967 so no action is necessary. */
1968 if (rtx_equal_p (dst, src))
1971 /* It is unclear if we can ever reach here, but we may as well handle
1972 it. Allocate a temporary, and split this into a store/load to/from
1975 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976 emit_group_store (temp, src, type, ssize);
1977 emit_group_load (dst, temp, type, ssize);
1980 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1982 enum machine_mode outer = GET_MODE (dst);
1983 enum machine_mode inner;
1984 HOST_WIDE_INT bytepos;
1988 if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 dst = gen_reg_rtx (outer);
1991 /* Make life a bit easier for combine. */
1992 /* If the first element of the vector is the low part
1993 of the destination mode, use a paradoxical subreg to
1994 initialize the destination. */
1997 inner = GET_MODE (tmps[start]);
1998 bytepos = subreg_lowpart_offset (inner, outer);
1999 if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2001 temp = simplify_gen_subreg (outer, tmps[start],
2005 emit_move_insn (dst, temp);
2012 /* If the first element wasn't the low part, try the last. */
2014 && start < finish - 1)
2016 inner = GET_MODE (tmps[finish - 1]);
2017 bytepos = subreg_lowpart_offset (inner, outer);
2018 if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2020 temp = simplify_gen_subreg (outer, tmps[finish - 1],
2024 emit_move_insn (dst, temp);
2031 /* Otherwise, simply initialize the result to zero. */
2033 emit_move_insn (dst, CONST0_RTX (outer));
2036 /* Process the pieces. */
2037 for (i = start; i < finish; i++)
2039 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040 enum machine_mode mode = GET_MODE (tmps[i]);
2041 unsigned int bytelen = GET_MODE_SIZE (mode);
2044 /* Handle trailing fragments that run over the size of the struct. */
2045 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 /* store_bit_field always takes its value from the lsb.
2048 Move the fragment to the lsb if it's not already there. */
2050 #ifdef BLOCK_REG_PADDING
2051 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2052 == (BYTES_BIG_ENDIAN ? upward : downward)
2058 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2059 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2060 build_int_cst (NULL_TREE, shift),
2063 bytelen = ssize - bytepos;
2066 if (GET_CODE (dst) == CONCAT)
2068 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2069 dest = XEXP (dst, 0);
2070 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2072 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2073 dest = XEXP (dst, 1);
2077 enum machine_mode dest_mode = GET_MODE (dest);
2078 enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2079 int dest_size = GET_MODE_SIZE (dest_mode);
2080 int tmp_size = GET_MODE_SIZE (tmp_mode);
2082 gcc_assert (bytepos == 0
2084 && dest_size == tmp_size);
2086 if (GET_MODE_ALIGNMENT (dest_mode)
2087 >= GET_MODE_ALIGNMENT (tmp_mode))
2089 dest = assign_stack_temp (dest_mode, dest_size, 0);
2090 emit_move_insn (adjust_address (dest,
2098 dest = assign_stack_temp (tmp_mode, tmp_size, 0);
2099 emit_move_insn (dest, tmps[i]);
2100 dst = adjust_address (dest, dest_mode, bytepos);
2106 /* Optimize the access just a bit. */
2108 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2109 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2110 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2111 && bytelen == GET_MODE_SIZE (mode))
2112 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2114 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2139 enum machine_mode copy_mode;
2143 tgtblk = assign_temp (build_qualified_type (type,
2145 | TYPE_QUAL_CONST)),
2147 preserve_temp_slots (tgtblk);
2150 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2151 into a new pseudo which is a full word. */
2153 if (GET_MODE (srcreg) != BLKmode
2154 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2155 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2157 /* If the structure doesn't take up a whole number of words, see whether
2158 SRCREG is padded on the left or on the right. If it's on the left,
2159 set PADDING_CORRECTION to the number of bits to skip.
2161 In most ABIs, the structure will be returned at the least end of
2162 the register, which translates to right padding on little-endian
2163 targets and left padding on big-endian targets. The opposite
2164 holds if the structure is returned at the most significant
2165 end of the register. */
2166 if (bytes % UNITS_PER_WORD != 0
2167 && (targetm.calls.return_in_msb (type)
2169 : BYTES_BIG_ENDIAN))
2171 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2173 /* Copy the structure BITSIZE bits at a time. If the target lives in
2174 memory, take care of not reading/writing past its end by selecting
2175 a copy mode suited to BITSIZE. This should always be possible given
2178 We could probably emit more efficient code for machines which do not use
2179 strict alignment, but it doesn't seem worth the effort at the current
2182 copy_mode = word_mode;
2185 enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2186 if (mem_mode != BLKmode)
2187 copy_mode = mem_mode;
2190 for (bitpos = 0, xbitpos = padding_correction;
2191 bitpos < bytes * BITS_PER_UNIT;
2192 bitpos += bitsize, xbitpos += bitsize)
2194 /* We need a new source operand each time xbitpos is on a
2195 word boundary and when xbitpos == padding_correction
2196 (the first time through). */
2197 if (xbitpos % BITS_PER_WORD == 0
2198 || xbitpos == padding_correction)
2199 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2202 /* We need a new destination operand each time bitpos is on
2204 if (bitpos % BITS_PER_WORD == 0)
2205 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2207 /* Use xbitpos for the source extraction (right justified) and
2208 bitpos for the destination store (left justified). */
2209 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2210 extract_bit_field (src, bitsize,
2211 xbitpos % BITS_PER_WORD, 1,
2212 NULL_RTX, copy_mode, copy_mode));
2218 /* Add a USE expression for REG to the (possibly empty) list pointed
2219 to by CALL_FUSAGE. REG must denote a hard register. */
2222 use_reg (rtx *call_fusage, rtx reg)
2224 gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2227 = gen_rtx_EXPR_LIST (VOIDmode,
2228 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2231 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2232 starting at REGNO. All of these registers must be hard registers. */
2235 use_regs (rtx *call_fusage, int regno, int nregs)
2239 gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2241 for (i = 0; i < nregs; i++)
2242 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2245 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2246 PARALLEL REGS. This is for calls that pass values in multiple
2247 non-contiguous locations. The Irix 6 ABI has examples of this. */
2250 use_group_regs (rtx *call_fusage, rtx regs)
2254 for (i = 0; i < XVECLEN (regs, 0); i++)
2256 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2258 /* A NULL entry means the parameter goes both on the stack and in
2259 registers. This can also be a MEM for targets that pass values
2260 partially on the stack and partially in registers. */
2261 if (reg != 0 && REG_P (reg))
2262 use_reg (call_fusage, reg);
2267 /* Determine whether the LEN bytes generated by CONSTFUN can be
2268 stored to memory using several move instructions. CONSTFUNDATA is
2269 a pointer which will be passed as argument in every CONSTFUN call.
2270 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2271 a memset operation and false if it's a copy of a constant string.
2272 Return nonzero if a call to store_by_pieces should succeed. */
2275 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2276 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2277 void *constfundata, unsigned int align, bool memsetp)
2279 unsigned HOST_WIDE_INT l;
2280 unsigned int max_size;
2281 HOST_WIDE_INT offset = 0;
2282 enum machine_mode mode, tmode;
2283 enum insn_code icode;
2291 ? SET_BY_PIECES_P (len, align)
2292 : STORE_BY_PIECES_P (len, align)))
2295 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2296 if (align >= GET_MODE_ALIGNMENT (tmode))
2297 align = GET_MODE_ALIGNMENT (tmode);
2300 enum machine_mode xmode;
2302 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2304 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2305 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2306 || SLOW_UNALIGNED_ACCESS (tmode, align))
2309 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2312 /* We would first store what we can in the largest integer mode, then go to
2313 successively smaller modes. */
2316 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2321 max_size = STORE_MAX_PIECES + 1;
2322 while (max_size > 1)
2324 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2325 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2326 if (GET_MODE_SIZE (tmode) < max_size)
2329 if (mode == VOIDmode)
2332 icode = optab_handler (mov_optab, mode)->insn_code;
2333 if (icode != CODE_FOR_nothing
2334 && align >= GET_MODE_ALIGNMENT (mode))
2336 unsigned int size = GET_MODE_SIZE (mode);
2343 cst = (*constfun) (constfundata, offset, mode);
2344 if (!LEGITIMATE_CONSTANT_P (cst))
2354 max_size = GET_MODE_SIZE (mode);
2357 /* The code above should have handled everything. */
2364 /* Generate several move instructions to store LEN bytes generated by
2365 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2366 pointer which will be passed as argument in every CONSTFUN call.
2367 ALIGN is maximum alignment we can assume. MEMSETP is true if this is
2368 a memset operation and false if it's a copy of a constant string.
2369 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2370 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2374 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2375 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2376 void *constfundata, unsigned int align, bool memsetp, int endp)
2378 struct store_by_pieces data;
2382 gcc_assert (endp != 2);
2387 ? SET_BY_PIECES_P (len, align)
2388 : STORE_BY_PIECES_P (len, align));
2389 data.constfun = constfun;
2390 data.constfundata = constfundata;
2393 store_by_pieces_1 (&data, align);
2398 gcc_assert (!data.reverse);
2403 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2404 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2406 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2409 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2416 to1 = adjust_address (data.to, QImode, data.offset);
2424 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2425 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2428 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2430 struct store_by_pieces data;
2435 data.constfun = clear_by_pieces_1;
2436 data.constfundata = NULL;
2439 store_by_pieces_1 (&data, align);
2442 /* Callback routine for clear_by_pieces.
2443 Return const0_rtx unconditionally. */
2446 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2447 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2448 enum machine_mode mode ATTRIBUTE_UNUSED)
2453 /* Subroutine of clear_by_pieces and store_by_pieces.
2454 Generate several move instructions to store LEN bytes of block TO. (A MEM
2455 rtx with BLKmode). ALIGN is maximum alignment we can assume. */
2458 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2459 unsigned int align ATTRIBUTE_UNUSED)
2461 rtx to_addr = XEXP (data->to, 0);
2462 unsigned int max_size = STORE_MAX_PIECES + 1;
2463 enum machine_mode mode = VOIDmode, tmode;
2464 enum insn_code icode;
2467 data->to_addr = to_addr;
2469 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2470 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2472 data->explicit_inc_to = 0;
2474 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2476 data->offset = data->len;
2478 /* If storing requires more than two move insns,
2479 copy addresses to registers (to make displacements shorter)
2480 and use post-increment if available. */
2481 if (!data->autinc_to
2482 && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2484 /* Determine the main mode we'll be using. */
2485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2487 if (GET_MODE_SIZE (tmode) < max_size)
2490 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2492 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2493 data->autinc_to = 1;
2494 data->explicit_inc_to = -1;
2497 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2498 && ! data->autinc_to)
2500 data->to_addr = copy_addr_to_reg (to_addr);
2501 data->autinc_to = 1;
2502 data->explicit_inc_to = 1;
2505 if ( !data->autinc_to && CONSTANT_P (to_addr))
2506 data->to_addr = copy_addr_to_reg (to_addr);
2509 tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2510 if (align >= GET_MODE_ALIGNMENT (tmode))
2511 align = GET_MODE_ALIGNMENT (tmode);
2514 enum machine_mode xmode;
2516 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2518 xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2519 if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2520 || SLOW_UNALIGNED_ACCESS (tmode, align))
2523 align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2526 /* First store what we can in the largest integer mode, then go to
2527 successively smaller modes. */
2529 while (max_size > 1)
2531 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2532 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2533 if (GET_MODE_SIZE (tmode) < max_size)
2536 if (mode == VOIDmode)
2539 icode = optab_handler (mov_optab, mode)->insn_code;
2540 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2541 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2543 max_size = GET_MODE_SIZE (mode);
2546 /* The code above should have handled everything. */
2547 gcc_assert (!data->len);
2550 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2551 with move instructions for mode MODE. GENFUN is the gen_... function
2552 to make a move insn for that mode. DATA has all the other info. */
2555 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2556 struct store_by_pieces *data)
2558 unsigned int size = GET_MODE_SIZE (mode);
2561 while (data->len >= size)
2564 data->offset -= size;
2566 if (data->autinc_to)
2567 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2570 to1 = adjust_address (data->to, mode, data->offset);
2572 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2573 emit_insn (gen_add2_insn (data->to_addr,
2574 GEN_INT (-(HOST_WIDE_INT) size)));
2576 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2577 emit_insn ((*genfun) (to1, cst));
2579 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2580 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2582 if (! data->reverse)
2583 data->offset += size;
2589 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2590 its length in bytes. */
2593 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2594 unsigned int expected_align, HOST_WIDE_INT expected_size)
2596 enum machine_mode mode = GET_MODE (object);
2599 gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2601 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2602 just move a zero. Otherwise, do this a piece at a time. */
2604 && GET_CODE (size) == CONST_INT
2605 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2607 rtx zero = CONST0_RTX (mode);
2610 emit_move_insn (object, zero);
2614 if (COMPLEX_MODE_P (mode))
2616 zero = CONST0_RTX (GET_MODE_INNER (mode));
2619 write_complex_part (object, zero, 0);
2620 write_complex_part (object, zero, 1);
2626 if (size == const0_rtx)
2629 align = MEM_ALIGN (object);
2631 if (GET_CODE (size) == CONST_INT
2632 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2633 clear_by_pieces (object, INTVAL (size), align);
2634 else if (set_storage_via_setmem (object, size, const0_rtx, align,
2635 expected_align, expected_size))
2638 return set_storage_via_libcall (object, size, const0_rtx,
2639 method == BLOCK_OP_TAILCALL);
2645 clear_storage (rtx object, rtx size, enum block_op_methods method)
2647 return clear_storage_hints (object, size, method, 0, -1);
2651 /* A subroutine of clear_storage. Expand a call to memset.
2652 Return the return value of memset, 0 otherwise. */
2655 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2657 tree call_expr, fn, object_tree, size_tree, val_tree;
2658 enum machine_mode size_mode;
2661 /* Emit code to copy OBJECT and SIZE into new pseudos. We can then
2662 place those into new pseudos into a VAR_DECL and use them later. */
2664 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2666 size_mode = TYPE_MODE (sizetype);
2667 size = convert_to_mode (size_mode, size, 1);
2668 size = copy_to_mode_reg (size_mode, size);
2670 /* It is incorrect to use the libcall calling conventions to call
2671 memset in this context. This could be a user call to memset and
2672 the user may wish to examine the return value from memset. For
2673 targets where libcalls and normal calls have different conventions
2674 for returning pointers, we could end up generating incorrect code. */
2676 object_tree = make_tree (ptr_type_node, object);
2677 if (GET_CODE (val) != CONST_INT)
2678 val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2679 size_tree = make_tree (sizetype, size);
2680 val_tree = make_tree (integer_type_node, val);
2682 fn = clear_storage_libcall_fn (true);
2683 call_expr = build_call_expr (fn, 3,
2684 object_tree, integer_zero_node, size_tree);
2685 CALL_EXPR_TAILCALL (call_expr) = tailcall;
2687 retval = expand_normal (call_expr);
2692 /* A subroutine of set_storage_via_libcall. Create the tree node
2693 for the function we use for block clears. The first time FOR_CALL
2694 is true, we call assemble_external. */
2696 static GTY(()) tree block_clear_fn;
2699 init_block_clear_fn (const char *asmspec)
2701 if (!block_clear_fn)
2705 fn = get_identifier ("memset");
2706 args = build_function_type_list (ptr_type_node, ptr_type_node,
2707 integer_type_node, sizetype,
2710 fn = build_decl (FUNCTION_DECL, fn, args);
2711 DECL_EXTERNAL (fn) = 1;
2712 TREE_PUBLIC (fn) = 1;
2713 DECL_ARTIFICIAL (fn) = 1;
2714 TREE_NOTHROW (fn) = 1;
2715 DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2716 DECL_VISIBILITY_SPECIFIED (fn) = 1;
2718 block_clear_fn = fn;
2722 set_user_assembler_name (block_clear_fn, asmspec);
2726 clear_storage_libcall_fn (int for_call)
2728 static bool emitted_extern;
2730 if (!block_clear_fn)
2731 init_block_clear_fn (NULL);
2733 if (for_call && !emitted_extern)
2735 emitted_extern = true;
2736 make_decl_rtl (block_clear_fn);
2737 assemble_external (block_clear_fn);
2740 return block_clear_fn;
2743 /* Expand a setmem pattern; return true if successful. */
2746 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2747 unsigned int expected_align, HOST_WIDE_INT expected_size)
2749 /* Try the most limited insn first, because there's no point
2750 including more than one in the machine description unless
2751 the more limited one has some advantage. */
2753 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2754 enum machine_mode mode;
2756 if (expected_align < align)
2757 expected_align = align;
2759 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2760 mode = GET_MODE_WIDER_MODE (mode))
2762 enum insn_code code = setmem_optab[(int) mode];
2763 insn_operand_predicate_fn pred;
2765 if (code != CODE_FOR_nothing
2766 /* We don't need MODE to be narrower than
2767 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2768 the mode mask, as it is returned by the macro, it will
2769 definitely be less than the actual mode mask. */
2770 && ((GET_CODE (size) == CONST_INT
2771 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2772 <= (GET_MODE_MASK (mode) >> 1)))
2773 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2774 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2775 || (*pred) (object, BLKmode))
2776 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2777 || (*pred) (opalign, VOIDmode)))
2780 enum machine_mode char_mode;
2781 rtx last = get_last_insn ();
2784 opsize = convert_to_mode (mode, size, 1);
2785 pred = insn_data[(int) code].operand[1].predicate;
2786 if (pred != 0 && ! (*pred) (opsize, mode))
2787 opsize = copy_to_mode_reg (mode, opsize);
2790 char_mode = insn_data[(int) code].operand[2].mode;
2791 if (char_mode != VOIDmode)
2793 opchar = convert_to_mode (char_mode, opchar, 1);
2794 pred = insn_data[(int) code].operand[2].predicate;
2795 if (pred != 0 && ! (*pred) (opchar, char_mode))
2796 opchar = copy_to_mode_reg (char_mode, opchar);
2799 if (insn_data[(int) code].n_operands == 4)
2800 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2802 pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2803 GEN_INT (expected_align
2805 GEN_INT (expected_size));
2812 delete_insns_since (last);
2820 /* Write to one of the components of the complex value CPLX. Write VAL to
2821 the real part if IMAG_P is false, and the imaginary part if its true. */
2824 write_complex_part (rtx cplx, rtx val, bool imag_p)
2826 enum machine_mode cmode;
2827 enum machine_mode imode;
2830 if (GET_CODE (cplx) == CONCAT)
2832 emit_move_insn (XEXP (cplx, imag_p), val);
2836 cmode = GET_MODE (cplx);
2837 imode = GET_MODE_INNER (cmode);
2838 ibitsize = GET_MODE_BITSIZE (imode);
2840 /* For MEMs simplify_gen_subreg may generate an invalid new address
2841 because, e.g., the original address is considered mode-dependent
2842 by the target, which restricts simplify_subreg from invoking
2843 adjust_address_nv. Instead of preparing fallback support for an
2844 invalid address, we call adjust_address_nv directly. */
2847 emit_move_insn (adjust_address_nv (cplx, imode,
2848 imag_p ? GET_MODE_SIZE (imode) : 0),
2853 /* If the sub-object is at least word sized, then we know that subregging
2854 will work. This special case is important, since store_bit_field
2855 wants to operate on integer modes, and there's rarely an OImode to
2856 correspond to TCmode. */
2857 if (ibitsize >= BITS_PER_WORD
2858 /* For hard regs we have exact predicates. Assume we can split
2859 the original object if it spans an even number of hard regs.
2860 This special case is important for SCmode on 64-bit platforms
2861 where the natural size of floating-point regs is 32-bit. */
2863 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2864 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2866 rtx part = simplify_gen_subreg (imode, cplx, cmode,
2867 imag_p ? GET_MODE_SIZE (imode) : 0);
2870 emit_move_insn (part, val);
2874 /* simplify_gen_subreg may fail for sub-word MEMs. */
2875 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2878 store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
2881 /* Extract one of the components of the complex value CPLX. Extract the
2882 real part if IMAG_P is false, and the imaginary part if it's true. */
2885 read_complex_part (rtx cplx, bool imag_p)
2887 enum machine_mode cmode, imode;
2890 if (GET_CODE (cplx) == CONCAT)
2891 return XEXP (cplx, imag_p);
2893 cmode = GET_MODE (cplx);
2894 imode = GET_MODE_INNER (cmode);
2895 ibitsize = GET_MODE_BITSIZE (imode);
2897 /* Special case reads from complex constants that got spilled to memory. */
2898 if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2900 tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2901 if (decl && TREE_CODE (decl) == COMPLEX_CST)
2903 tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2904 if (CONSTANT_CLASS_P (part))
2905 return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2909 /* For MEMs simplify_gen_subreg may generate an invalid new address
2910 because, e.g., the original address is considered mode-dependent
2911 by the target, which restricts simplify_subreg from invoking
2912 adjust_address_nv. Instead of preparing fallback support for an
2913 invalid address, we call adjust_address_nv directly. */
2915 return adjust_address_nv (cplx, imode,
2916 imag_p ? GET_MODE_SIZE (imode) : 0);
2918 /* If the sub-object is at least word sized, then we know that subregging
2919 will work. This special case is important, since extract_bit_field
2920 wants to operate on integer modes, and there's rarely an OImode to
2921 correspond to TCmode. */
2922 if (ibitsize >= BITS_PER_WORD
2923 /* For hard regs we have exact predicates. Assume we can split
2924 the original object if it spans an even number of hard regs.
2925 This special case is important for SCmode on 64-bit platforms
2926 where the natural size of floating-point regs is 32-bit. */
2928 && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2929 && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2931 rtx ret = simplify_gen_subreg (imode, cplx, cmode,
2932 imag_p ? GET_MODE_SIZE (imode) : 0);
2936 /* simplify_gen_subreg may fail for sub-word MEMs. */
2937 gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2940 return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
2941 true, NULL_RTX, imode, imode);
2944 /* A subroutine of emit_move_insn_1. Yet another lowpart generator.
2945 NEW_MODE and OLD_MODE are the same size. Return NULL if X cannot be
2946 represented in NEW_MODE. If FORCE is true, this will never happen, as
2947 we'll force-create a SUBREG if needed. */
2950 emit_move_change_mode (enum machine_mode new_mode,
2951 enum machine_mode old_mode, rtx x, bool force)
2955 if (push_operand (x, GET_MODE (x)))
2957 ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
2958 MEM_COPY_ATTRIBUTES (ret, x);
2962 /* We don't have to worry about changing the address since the
2963 size in bytes is supposed to be the same. */
2964 if (reload_in_progress)
2966 /* Copy the MEM to change the mode and move any
2967 substitutions from the old MEM to the new one. */
2968 ret = adjust_address_nv (x, new_mode, 0);
2969 copy_replacements (x, ret);
2972 ret = adjust_address (x, new_mode, 0);
2976 /* Note that we do want simplify_subreg's behavior of validating
2977 that the new mode is ok for a hard register. If we were to use
2978 simplify_gen_subreg, we would create the subreg, but would
2979 probably run into the target not being able to implement it. */
2980 /* Except, of course, when FORCE is true, when this is exactly what
2981 we want. Which is needed for CCmodes on some targets. */
2983 ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
2985 ret = simplify_subreg (new_mode, x, old_mode, 0);
2991 /* A subroutine of emit_move_insn_1. Generate a move from Y into X using
2992 an integer mode of the same size as MODE. Returns the instruction
2993 emitted, or NULL if such a move could not be generated. */
2996 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
2998 enum machine_mode imode;
2999 enum insn_code code;
3001 /* There must exist a mode of the exact size we require. */
3002 imode = int_mode_for_mode (mode);
3003 if (imode == BLKmode)
3006 /* The target must support moves in this mode. */
3007 code = optab_handler (mov_optab, imode)->insn_code;
3008 if (code == CODE_FOR_nothing)
3011 x = emit_move_change_mode (imode, mode, x, force);
3014 y = emit_move_change_mode (imode, mode, y, force);
3017 return emit_insn (GEN_FCN (code) (x, y));
3020 /* A subroutine of emit_move_insn_1. X is a push_operand in MODE.
3021 Return an equivalent MEM that does not use an auto-increment. */
3024 emit_move_resolve_push (enum machine_mode mode, rtx x)
3026 enum rtx_code code = GET_CODE (XEXP (x, 0));
3027 HOST_WIDE_INT adjust;
3030 adjust = GET_MODE_SIZE (mode);
3031 #ifdef PUSH_ROUNDING
3032 adjust = PUSH_ROUNDING (adjust);
3034 if (code == PRE_DEC || code == POST_DEC)
3036 else if (code == PRE_MODIFY || code == POST_MODIFY)
3038 rtx expr = XEXP (XEXP (x, 0), 1);
3041 gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3042 gcc_assert (GET_CODE (XEXP (expr, 1)) == CONST_INT);
3043 val = INTVAL (XEXP (expr, 1));
3044 if (GET_CODE (expr) == MINUS)
3046 gcc_assert (adjust == val || adjust == -val);
3050 /* Do not use anti_adjust_stack, since we don't want to update
3051 stack_pointer_delta. */
3052 temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3053 GEN_INT (adjust), stack_pointer_rtx,
3054 0, OPTAB_LIB_WIDEN);
3055 if (temp != stack_pointer_rtx)
3056 emit_move_insn (stack_pointer_rtx, temp);
3063 temp = stack_pointer_rtx;
3068 temp = plus_constant (stack_pointer_rtx, -adjust);
3074 return replace_equiv_address (x, temp);
3077 /* A subroutine of emit_move_complex. Generate a move from Y into X.
3078 X is known to satisfy push_operand, and MODE is known to be complex.
3079 Returns the last instruction emitted. */
3082 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3084 enum machine_mode submode = GET_MODE_INNER (mode);
3087 #ifdef PUSH_ROUNDING
3088 unsigned int submodesize = GET_MODE_SIZE (submode);
3090 /* In case we output to the stack, but the size is smaller than the
3091 machine can push exactly, we need to use move instructions. */
3092 if (PUSH_ROUNDING (submodesize) != submodesize)
3094 x = emit_move_resolve_push (mode, x);
3095 return emit_move_insn (x, y);
3099 /* Note that the real part always precedes the imag part in memory
3100 regardless of machine's endianness. */
3101 switch (GET_CODE (XEXP (x, 0)))
3115 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3116 read_complex_part (y, imag_first));
3117 return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3118 read_complex_part (y, !imag_first));
3121 /* A subroutine of emit_move_complex. Perform the move from Y to X
3122 via two moves of the parts. Returns the last instruction emitted. */
3125 emit_move_complex_parts (rtx x, rtx y)
3127 /* Show the output dies here. This is necessary for SUBREGs
3128 of pseudos since we cannot track their lifetimes correctly;
3129 hard regs shouldn't appear here except as return values. */
3130 if (!reload_completed && !reload_in_progress
3131 && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3134 write_complex_part (x, read_complex_part (y, false), false);
3135 write_complex_part (x, read_complex_part (y, true), true);
3137 return get_last_insn ();
3140 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3141 MODE is known to be complex. Returns the last instruction emitted. */
3144 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3148 /* Need to take special care for pushes, to maintain proper ordering
3149 of the data, and possibly extra padding. */
3150 if (push_operand (x, mode))
3151 return emit_move_complex_push (mode, x, y);
3153 /* See if we can coerce the target into moving both values at once. */
3155 /* Move floating point as parts. */
3156 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3157 && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3159 /* Not possible if the values are inherently not adjacent. */
3160 else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3162 /* Is possible if both are registers (or subregs of registers). */
3163 else if (register_operand (x, mode) && register_operand (y, mode))
3165 /* If one of the operands is a memory, and alignment constraints
3166 are friendly enough, we may be able to do combined memory operations.
3167 We do not attempt this if Y is a constant because that combination is
3168 usually better with the by-parts thing below. */
3169 else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3170 && (!STRICT_ALIGNMENT
3171 || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3180 /* For memory to memory moves, optimal behavior can be had with the
3181 existing block move logic. */
3182 if (MEM_P (x) && MEM_P (y))
3184 emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3185 BLOCK_OP_NO_LIBCALL);
3186 return get_last_insn ();
3189 ret = emit_move_via_integer (mode, x, y, true);
3194 return emit_move_complex_parts (x, y);
3197 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3198 MODE is known to be MODE_CC. Returns the last instruction emitted. */
3201 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3205 /* Assume all MODE_CC modes are equivalent; if we have movcc, use it. */
3208 enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3209 if (code != CODE_FOR_nothing)
3211 x = emit_move_change_mode (CCmode, mode, x, true);
3212 y = emit_move_change_mode (CCmode, mode, y, true);
3213 return emit_insn (GEN_FCN (code) (x, y));
3217 /* Otherwise, find the MODE_INT mode of the same width. */
3218 ret = emit_move_via_integer (mode, x, y, false);
3219 gcc_assert (ret != NULL);
3223 /* Return true if word I of OP lies entirely in the
3224 undefined bits of a paradoxical subreg. */
3227 undefined_operand_subword_p (const_rtx op, int i)
3229 enum machine_mode innermode, innermostmode;
3231 if (GET_CODE (op) != SUBREG)
3233 innermode = GET_MODE (op);
3234 innermostmode = GET_MODE (SUBREG_REG (op));
3235 offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3236 /* The SUBREG_BYTE represents offset, as if the value were stored in
3237 memory, except for a paradoxical subreg where we define
3238 SUBREG_BYTE to be 0; undo this exception as in
3240 if (SUBREG_BYTE (op) == 0
3241 && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3243 int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3244 if (WORDS_BIG_ENDIAN)
3245 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3246 if (BYTES_BIG_ENDIAN)
3247 offset += difference % UNITS_PER_WORD;
3249 if (offset >= GET_MODE_SIZE (innermostmode)
3250 || offset <= -GET_MODE_SIZE (word_mode))
3255 /* A subroutine of emit_move_insn_1. Generate a move from Y into X.
3256 MODE is any multi-word or full-word mode that lacks a move_insn
3257 pattern. Note that you will get better code if you define such
3258 patterns, even if they must turn into multiple assembler instructions. */
3261 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3268 gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3270 /* If X is a push on the stack, do the push now and replace
3271 X with a reference to the stack pointer. */
3272 if (push_operand (x, mode))
3273 x = emit_move_resolve_push (mode, x);
3275 /* If we are in reload, see if either operand is a MEM whose address
3276 is scheduled for replacement. */
3277 if (reload_in_progress && MEM_P (x)
3278 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3279 x = replace_equiv_address_nv (x, inner);
3280 if (reload_in_progress && MEM_P (y)
3281 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3282 y = replace_equiv_address_nv (y, inner);
3286 need_clobber = false;
3288 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3291 rtx xpart = operand_subword (x, i, 1, mode);
3294 /* Do not generate code for a move if it would come entirely
3295 from the undefined bits of a paradoxical subreg. */
3296 if (undefined_operand_subword_p (y, i))
3299 ypart = operand_subword (y, i, 1, mode);
3301 /* If we can't get a part of Y, put Y into memory if it is a
3302 constant. Otherwise, force it into a register. Then we must
3303 be able to get a part of Y. */
3304 if (ypart == 0 && CONSTANT_P (y))
3306 y = use_anchored_address (force_const_mem (mode, y));
3307 ypart = operand_subword (y, i, 1, mode);
3309 else if (ypart == 0)
3310 ypart = operand_subword_force (y, i, mode);
3312 gcc_assert (xpart && ypart);
3314 need_clobber |= (GET_CODE (xpart) == SUBREG);
3316 last_insn = emit_move_insn (xpart, ypart);
3322 /* Show the output dies here. This is necessary for SUBREGs
3323 of pseudos since we cannot track their lifetimes correctly;
3324 hard regs shouldn't appear here except as return values.
3325 We never want to emit such a clobber after reload. */
3327 && ! (reload_in_progress || reload_completed)
3328 && need_clobber != 0)
3336 /* Low level part of emit_move_insn.
3337 Called just like emit_move_insn, but assumes X and Y
3338 are basically valid. */
3341 emit_move_insn_1 (rtx x, rtx y)
3343 enum machine_mode mode = GET_MODE (x);
3344 enum insn_code code;
3346 gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3348 code = optab_handler (mov_optab, mode)->insn_code;
3349 if (code != CODE_FOR_nothing)
3350 return emit_insn (GEN_FCN (code) (x, y));
3352 /* Expand complex moves by moving real part and imag part. */
3353 if (COMPLEX_MODE_P (mode))
3354 return emit_move_complex (mode, x, y);
3356 if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3357 || ALL_FIXED_POINT_MODE_P (mode))
3359 rtx result = emit_move_via_integer (mode, x, y, true);
3361 /* If we can't find an integer mode, use multi words. */
3365 return emit_move_multi_word (mode, x, y);
3368 if (GET_MODE_CLASS (mode) == MODE_CC)
3369 return emit_move_ccmode (mode, x, y);
3371 /* Try using a move pattern for the corresponding integer mode. This is
3372 only safe when simplify_subreg can convert MODE constants into integer
3373 constants. At present, it can only do this reliably if the value
3374 fits within a HOST_WIDE_INT. */
3375 if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3377 rtx ret = emit_move_via_integer (mode, x, y, false);
3382 return emit_move_multi_word (mode, x, y);
3385 /* Generate code to copy Y into X.
3386 Both Y and X must have the same mode, except that
3387 Y can be a constant with VOIDmode.
3388 This mode cannot be BLKmode; use emit_block_move for that.
3390 Return the last instruction emitted. */
3393 emit_move_insn (rtx x, rtx y)
3395 enum machine_mode mode = GET_MODE (x);
3396 rtx y_cst = NULL_RTX;
3399 gcc_assert (mode != BLKmode
3400 && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3405 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3406 && (last_insn = compress_float_constant (x, y)))
3411 if (!LEGITIMATE_CONSTANT_P (y))
3413 y = force_const_mem (mode, y);
3415 /* If the target's cannot_force_const_mem prevented the spill,
3416 assume that the target's move expanders will also take care
3417 of the non-legitimate constant. */
3421 y = use_anchored_address (y);
3425 /* If X or Y are memory references, verify that their addresses are valid
3428 && (! memory_address_p (GET_MODE (x), XEXP (x, 0))
3429 && ! push_operand (x, GET_MODE (x))))
3430 x = validize_mem (x);
3433 && ! memory_address_p (GET_MODE (y), XEXP (y, 0)))
3434 y = validize_mem (y);
3436 gcc_assert (mode != BLKmode);
3438 last_insn = emit_move_insn_1 (x, y);
3440 if (y_cst && REG_P (x)
3441 && (set = single_set (last_insn)) != NULL_RTX
3442 && SET_DEST (set) == x
3443 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3444 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3449 /* If Y is representable exactly in a narrower mode, and the target can
3450 perform the extension directly from constant or memory, then emit the
3451 move as an extension. */
3454 compress_float_constant (rtx x, rtx y)
3456 enum machine_mode dstmode = GET_MODE (x);
3457 enum machine_mode orig_srcmode = GET_MODE (y);
3458 enum machine_mode srcmode;
3460 int oldcost, newcost;
3462 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3464 if (LEGITIMATE_CONSTANT_P (y))
3465 oldcost = rtx_cost (y, SET);
3467 oldcost = rtx_cost (force_const_mem (dstmode, y), SET);
3469 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3470 srcmode != orig_srcmode;
3471 srcmode = GET_MODE_WIDER_MODE (srcmode))
3474 rtx trunc_y, last_insn;
3476 /* Skip if the target can't extend this way. */
3477 ic = can_extend_p (dstmode, srcmode, 0);
3478 if (ic == CODE_FOR_nothing)
3481 /* Skip if the narrowed value isn't exact. */
3482 if (! exact_real_truncate (srcmode, &r))
3485 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3487 if (LEGITIMATE_CONSTANT_P (trunc_y))
3489 /* Skip if the target needs extra instructions to perform
3491 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3493 /* This is valid, but may not be cheaper than the original. */
3494 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3495 if (oldcost < newcost)
3498 else if (float_extend_from_mem[dstmode][srcmode])
3500 trunc_y = force_const_mem (srcmode, trunc_y);
3501 /* This is valid, but may not be cheaper than the original. */
3502 newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET);
3503 if (oldcost < newcost)
3505 trunc_y = validize_mem (trunc_y);
3510 /* For CSE's benefit, force the compressed constant pool entry
3511 into a new pseudo. This constant may be used in different modes,
3512 and if not, combine will put things back together for us. */
3513 trunc_y = force_reg (srcmode, trunc_y);
3514 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3515 last_insn = get_last_insn ();
3518 set_unique_reg_note (last_insn, REG_EQUAL, y);
3526 /* Pushing data onto the stack. */
3528 /* Push a block of length SIZE (perhaps variable)
3529 and return an rtx to address the beginning of the block.
3530 The value may be virtual_outgoing_args_rtx.
3532 EXTRA is the number of bytes of padding to push in addition to SIZE.
3533 BELOW nonzero means this padding comes at low addresses;
3534 otherwise, the padding comes at high addresses. */
3537 push_block (rtx size, int extra, int below)
3541 size = convert_modes (Pmode, ptr_mode, size, 1);
3542 if (CONSTANT_P (size))
3543 anti_adjust_stack (plus_constant (size, extra));
3544 else if (REG_P (size) && extra == 0)
3545 anti_adjust_stack (size);
3548 temp = copy_to_mode_reg (Pmode, size);
3550 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3551 temp, 0, OPTAB_LIB_WIDEN);
3552 anti_adjust_stack (temp);
3555 #ifndef STACK_GROWS_DOWNWARD
3561 temp = virtual_outgoing_args_rtx;
3562 if (extra != 0 && below)
3563 temp = plus_constant (temp, extra);
3567 if (GET_CODE (size) == CONST_INT)
3568 temp = plus_constant (virtual_outgoing_args_rtx,
3569 -INTVAL (size) - (below ? 0 : extra));
3570 else if (extra != 0 && !below)
3571 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3572 negate_rtx (Pmode, plus_constant (size, extra)));
3574 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3575 negate_rtx (Pmode, size));
3578 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3581 #ifdef PUSH_ROUNDING
3583 /* Emit single push insn. */
3586 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3589 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3591 enum insn_code icode;
3592 insn_operand_predicate_fn pred;
3594 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3595 /* If there is push pattern, use it. Otherwise try old way of throwing
3596 MEM representing push operation to move expander. */
3597 icode = optab_handler (push_optab, mode)->insn_code;
3598 if (icode != CODE_FOR_nothing)
3600 if (((pred = insn_data[(int) icode].operand[0].predicate)
3601 && !((*pred) (x, mode))))
3602 x = force_reg (mode, x);
3603 emit_insn (GEN_FCN (icode) (x));
3606 if (GET_MODE_SIZE (mode) == rounded_size)
3607 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3608 /* If we are to pad downward, adjust the stack pointer first and
3609 then store X into the stack location using an offset. This is
3610 because emit_move_insn does not know how to pad; it does not have
3612 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3614 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3615 HOST_WIDE_INT offset;
3617 emit_move_insn (stack_pointer_rtx,
3618 expand_binop (Pmode,
3619 #ifdef STACK_GROWS_DOWNWARD
3625 GEN_INT (rounded_size),
3626 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3628 offset = (HOST_WIDE_INT) padding_size;
3629 #ifdef STACK_GROWS_DOWNWARD
3630 if (STACK_PUSH_CODE == POST_DEC)
3631 /* We have already decremented the stack pointer, so get the
3633 offset += (HOST_WIDE_INT) rounded_size;
3635 if (STACK_PUSH_CODE == POST_INC)
3636 /* We have already incremented the stack pointer, so get the
3638 offset -= (HOST_WIDE_INT) rounded_size;
3640 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3644 #ifdef STACK_GROWS_DOWNWARD
3645 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3646 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3647 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3649 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3650 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3651 GEN_INT (rounded_size));
3653 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3656 dest = gen_rtx_MEM (mode, dest_addr);
3660 set_mem_attributes (dest, type, 1);
3662 if (flag_optimize_sibling_calls)
3663 /* Function incoming arguments may overlap with sibling call
3664 outgoing arguments and we cannot allow reordering of reads
3665 from function arguments with stores to outgoing arguments
3666 of sibling calls. */
3667 set_mem_alias_set (dest, 0);
3669 emit_move_insn (dest, x);
3673 /* Generate code to push X onto the stack, assuming it has mode MODE and
3675 MODE is redundant except when X is a CONST_INT (since they don't
3677 SIZE is an rtx for the size of data to be copied (in bytes),
3678 needed only if X is BLKmode.
3680 ALIGN (in bits) is maximum alignment we can assume.
3682 If PARTIAL and REG are both nonzero, then copy that many of the first
3683 bytes of X into registers starting with REG, and push the rest of X.
3684 The amount of space pushed is decreased by PARTIAL bytes.
3685 REG must be a hard register in this case.
3686 If REG is zero but PARTIAL is not, take any all others actions for an
3687 argument partially in registers, but do not actually load any
3690 EXTRA is the amount in bytes of extra space to leave next to this arg.
3691 This is ignored if an argument block has already been allocated.
3693 On a machine that lacks real push insns, ARGS_ADDR is the address of
3694 the bottom of the argument block for this call. We use indexing off there
3695 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3696 argument block has not been preallocated.
3698 ARGS_SO_FAR is the size of args previously pushed for this call.
3700 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3701 for arguments passed in registers. If nonzero, it will be the number
3702 of bytes required. */
3705 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3706 unsigned int align, int partial, rtx reg, int extra,
3707 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3711 enum direction stack_direction
3712 #ifdef STACK_GROWS_DOWNWARD
3718 /* Decide where to pad the argument: `downward' for below,
3719 `upward' for above, or `none' for don't pad it.
3720 Default is below for small data on big-endian machines; else above. */
3721 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3723 /* Invert direction if stack is post-decrement.
3725 if (STACK_PUSH_CODE == POST_DEC)
3726 if (where_pad != none)
3727 where_pad = (where_pad == downward ? upward : downward);
3732 || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)))
3734 /* Copy a block into the stack, entirely or partially. */
3741 offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3742 used = partial - offset;
3744 if (mode != BLKmode)
3746 /* A value is to be stored in an insufficiently aligned
3747 stack slot; copy via a suitably aligned slot if
3749 size = GEN_INT (GET_MODE_SIZE (mode));
3750 if (!MEM_P (xinner))
3752 temp = assign_temp (type, 0, 1, 1);
3753 emit_move_insn (temp, xinner);
3760 /* USED is now the # of bytes we need not copy to the stack
3761 because registers will take care of them. */
3764 xinner = adjust_address (xinner, BLKmode, used);
3766 /* If the partial register-part of the arg counts in its stack size,
3767 skip the part of stack space corresponding to the registers.
3768 Otherwise, start copying to the beginning of the stack space,
3769 by setting SKIP to 0. */
3770 skip = (reg_parm_stack_space == 0) ? 0 : used;
3772 #ifdef PUSH_ROUNDING
3773 /* Do it with several push insns if that doesn't take lots of insns
3774 and if there is no difficulty with push insns that skip bytes
3775 on the stack for alignment purposes. */
3778 && GET_CODE (size) == CONST_INT
3780 && MEM_ALIGN (xinner) >= align
3781 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3782 /* Here we avoid the case of a structure whose weak alignment
3783 forces many pushes of a small amount of data,
3784 and such small pushes do rounding that causes trouble. */
3785 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3786 || align >= BIGGEST_ALIGNMENT
3787 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3788 == (align / BITS_PER_UNIT)))
3789 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3791 /* Push padding now if padding above and stack grows down,
3792 or if padding below and stack grows up.
3793 But if space already allocated, this has already been done. */
3794 if (extra && args_addr == 0
3795 && where_pad != none && where_pad != stack_direction)
3796 anti_adjust_stack (GEN_INT (extra));
3798 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3801 #endif /* PUSH_ROUNDING */
3805 /* Otherwise make space on the stack and copy the data
3806 to the address of that space. */
3808 /* Deduct words put into registers from the size we must copy. */
3811 if (GET_CODE (size) == CONST_INT)
3812 size = GEN_INT (INTVAL (size) - used);
3814 size = expand_binop (GET_MODE (size), sub_optab, size,
3815 GEN_INT (used), NULL_RTX, 0,
3819 /* Get the address of the stack space.
3820 In this case, we do not deal with EXTRA separately.
3821 A single stack adjust will do. */
3824 temp = push_block (size, extra, where_pad == downward);
3827 else if (GET_CODE (args_so_far) == CONST_INT)
3828 temp = memory_address (BLKmode,
3829 plus_constant (args_addr,
3830 skip + INTVAL (args_so_far)));
3832 temp = memory_address (BLKmode,
3833 plus_constant (gen_rtx_PLUS (Pmode,
3838 if (!ACCUMULATE_OUTGOING_ARGS)
3840 /* If the source is referenced relative to the stack pointer,
3841 copy it to another register to stabilize it. We do not need
3842 to do this if we know that we won't be changing sp. */
3844 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3845 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3846 temp = copy_to_reg (temp);
3849 target = gen_rtx_MEM (BLKmode, temp);
3851 /* We do *not* set_mem_attributes here, because incoming arguments
3852 may overlap with sibling call outgoing arguments and we cannot
3853 allow reordering of reads from function arguments with stores
3854 to outgoing arguments of sibling calls. We do, however, want
3855 to record the alignment of the stack slot. */
3856 /* ALIGN may well be better aligned than TYPE, e.g. due to
3857 PARM_BOUNDARY. Assume the caller isn't lying. */
3858 set_mem_align (target, align);
3860 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3863 else if (partial > 0)
3865 /* Scalar partly in registers. */
3867 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3870 /* # bytes of start of argument
3871 that we must make space for but need not store. */
3872 int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3873 int args_offset = INTVAL (args_so_far);
3876 /* Push padding now if padding above and stack grows down,
3877 or if padding below and stack grows up.
3878 But if space already allocated, this has already been done. */
3879 if (extra && args_addr == 0
3880 && where_pad != none && where_pad != stack_direction)
3881 anti_adjust_stack (GEN_INT (extra));
3883 /* If we make space by pushing it, we might as well push
3884 the real data. Otherwise, we can leave OFFSET nonzero
3885 and leave the space uninitialized. */
3889 /* Now NOT_STACK gets the number of words that we don't need to
3890 allocate on the stack. Convert OFFSET to words too. */
3891 not_stack = (partial - offset) / UNITS_PER_WORD;
3892 offset /= UNITS_PER_WORD;
3894 /* If the partial register-part of the arg counts in its stack size,
3895 skip the part of stack space corresponding to the registers.
3896 Otherwise, start copying to the beginning of the stack space,
3897 by setting SKIP to 0. */
3898 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3900 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3901 x = validize_mem (force_const_mem (mode, x));
3903 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3904 SUBREGs of such registers are not allowed. */
3905 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3906 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3907 x = copy_to_reg (x);
3909 /* Loop over all the words allocated on the stack for this arg. */
3910 /* We can do it by words, because any scalar bigger than a word
3911 has a size a multiple of a word. */
3912 #ifndef PUSH_ARGS_REVERSED
3913 for (i = not_stack; i < size; i++)
3915 for (i = size - 1; i >= not_stack; i--)
3917 if (i >= not_stack + offset)
3918 emit_push_insn (operand_subword_force (x, i, mode),
3919 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3921 GEN_INT (args_offset + ((i - not_stack + skip)
3923 reg_parm_stack_space, alignment_pad);
3930 /* Push padding now if padding above and stack grows down,
3931 or if padding below and stack grows up.
3932 But if space already allocated, this has already been done. */
3933 if (extra && args_addr == 0
3934 && where_pad != none && where_pad != stack_direction)
3935 anti_adjust_stack (GEN_INT (extra));
3937 #ifdef PUSH_ROUNDING
3938 if (args_addr == 0 && PUSH_ARGS)
3939 emit_single_push_insn (mode, x, type);
3943 if (GET_CODE (args_so_far) == CONST_INT)
3945 = memory_address (mode,
3946 plus_constant (args_addr,
3947 INTVAL (args_so_far)));
3949 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3951 dest = gen_rtx_MEM (mode, addr);
3953 /* We do *not* set_mem_attributes here, because incoming arguments
3954 may overlap with sibling call outgoing arguments and we cannot
3955 allow reordering of reads from function arguments with stores
3956 to outgoing arguments of sibling calls. We do, however, want
3957 to record the alignment of the stack slot. */
3958 /* ALIGN may well be better aligned than TYPE, e.g. due to
3959 PARM_BOUNDARY. Assume the caller isn't lying. */
3960 set_mem_align (dest, align);
3962 emit_move_insn (dest, x);
3966 /* If part should go in registers, copy that part
3967 into the appropriate registers. Do this now, at the end,
3968 since mem-to-mem copies above may do function calls. */
3969 if (partial > 0 && reg != 0)
3971 /* Handle calls that pass values in multiple non-contiguous locations.
3972 The Irix 6 ABI has examples of this. */
3973 if (GET_CODE (reg) == PARALLEL)
3974 emit_group_load (reg, x, type, -1);
3977 gcc_assert (partial % UNITS_PER_WORD == 0);
3978 move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
3982 if (extra && args_addr == 0 && where_pad == stack_direction)
3983 anti_adjust_stack (GEN_INT (extra));
3985 if (alignment_pad && args_addr == 0)
3986 anti_adjust_stack (alignment_pad);
3989 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3993 get_subtarget (rtx x)
3997 /* Only registers can be subtargets. */
3999 /* Don't use hard regs to avoid extending their life. */
4000 || REGNO (x) < FIRST_PSEUDO_REGISTER
4004 /* A subroutine of expand_assignment. Optimize FIELD op= VAL, where
4005 FIELD is a bitfield. Returns true if the optimization was successful,
4006 and there's nothing else to do. */
4009 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4010 unsigned HOST_WIDE_INT bitpos,
4011 enum machine_mode mode1, rtx str_rtx,
4014 enum machine_mode str_mode = GET_MODE (str_rtx);
4015 unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4020 if (mode1 != VOIDmode
4021 || bitsize >= BITS_PER_WORD
4022 || str_bitsize > BITS_PER_WORD
4023 || TREE_SIDE_EFFECTS (to)
4024 || TREE_THIS_VOLATILE (to))
4028 if (!BINARY_CLASS_P (src)
4029 || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4032 op0 = TREE_OPERAND (src, 0);
4033 op1 = TREE_OPERAND (src, 1);
4036 if (!operand_equal_p (to, op0, 0))
4039 if (MEM_P (str_rtx))
4041 unsigned HOST_WIDE_INT offset1;
4043 if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4044 str_mode = word_mode;
4045 str_mode = get_best_mode (bitsize, bitpos,
4046 MEM_ALIGN (str_rtx), str_mode, 0);
4047 if (str_mode == VOIDmode)
4049 str_bitsize = GET_MODE_BITSIZE (str_mode);
4052 bitpos %= str_bitsize;
4053 offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4054 str_rtx = adjust_address (str_rtx, str_mode, offset1);
4056 else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4059 /* If the bit field covers the whole REG/MEM, store_field
4060 will likely generate better code. */
4061 if (bitsize >= str_bitsize)
4064 /* We can't handle fields split across multiple entities. */
4065 if (bitpos + bitsize > str_bitsize)
4068 if (BYTES_BIG_ENDIAN)
4069 bitpos = str_bitsize - bitpos - bitsize;
4071 switch (TREE_CODE (src))
4075 /* For now, just optimize the case of the topmost bitfield
4076 where we don't need to do any masking and also
4077 1 bit bitfields where xor can be used.
4078 We might win by one instruction for the other bitfields
4079 too if insv/extv instructions aren't used, so that
4080 can be added later. */
4081 if (bitpos + bitsize != str_bitsize
4082 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4085 value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4086 value = convert_modes (str_mode,
4087 TYPE_MODE (TREE_TYPE (op1)), value,
4088 TYPE_UNSIGNED (TREE_TYPE (op1)));
4090 /* We may be accessing data outside the field, which means
4091 we can alias adjacent data. */
4092 if (MEM_P (str_rtx))
4094 str_rtx = shallow_copy_rtx (str_rtx);
4095 set_mem_alias_set (str_rtx, 0);
4096 set_mem_expr (str_rtx, 0);
4099 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4100 if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4102 value = expand_and (str_mode, value, const1_rtx, NULL);
4105 value = expand_shift (LSHIFT_EXPR, str_mode, value,
4106 build_int_cst (NULL_TREE, bitpos),
4108 result = expand_binop (str_mode, binop, str_rtx,
4109 value, str_rtx, 1, OPTAB_WIDEN);
4110 if (result != str_rtx)
4111 emit_move_insn (str_rtx, result);
4116 if (TREE_CODE (op1) != INTEGER_CST)
4118 value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4119 value = convert_modes (GET_MODE (str_rtx),
4120 TYPE_MODE (TREE_TYPE (op1)), value,
4121 TYPE_UNSIGNED (TREE_TYPE (op1)));
4123 /* We may be accessing data outside the field, which means
4124 we can alias adjacent data. */
4125 if (MEM_P (str_rtx))
4127 str_rtx = shallow_copy_rtx (str_rtx);
4128 set_mem_alias_set (str_rtx, 0);
4129 set_mem_expr (str_rtx, 0);
4132 binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4133 if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4135 rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4137 value = expand_and (GET_MODE (str_rtx), value, mask,
4140 value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4141 build_int_cst (NULL_TREE, bitpos),
4143 result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4144 value, str_rtx, 1, OPTAB_WIDEN);
4145 if (result != str_rtx)
4146 emit_move_insn (str_rtx, result);
4157 /* Expand an assignment that stores the value of FROM into TO. If NONTEMPORAL
4158 is true, try generating a nontemporal store. */
4161 expand_assignment (tree to, tree from, bool nontemporal)
4166 /* Don't crash if the lhs of the assignment was erroneous. */
4167 if (TREE_CODE (to) == ERROR_MARK)
4169 result = expand_normal (from);
4173 /* Optimize away no-op moves without side-effects. */
4174 if (operand_equal_p (to, from, 0))
4177 /* Assignment of a structure component needs special treatment
4178 if the structure component's rtx is not simply a MEM.
4179 Assignment of an array element at a constant index, and assignment of
4180 an array element in an unaligned packed structure field, has the same
4182 if (handled_component_p (to)
4183 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4185 enum machine_mode mode1;
4186 HOST_WIDE_INT bitsize, bitpos;
4193 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4194 &unsignedp, &volatilep, true);
4196 /* If we are going to use store_bit_field and extract_bit_field,
4197 make sure to_rtx will be safe for multiple use. */
4199 to_rtx = expand_normal (tem);
4205 if (!MEM_P (to_rtx))
4207 /* We can get constant negative offsets into arrays with broken
4208 user code. Translate this to a trap instead of ICEing. */
4209 gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4210 expand_builtin_trap ();
4211 to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4214 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4215 #ifdef POINTERS_EXTEND_UNSIGNED
4216 if (GET_MODE (offset_rtx) != Pmode)
4217 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4219 if (GET_MODE (offset_rtx) != ptr_mode)
4220 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4223 /* A constant address in TO_RTX can have VOIDmode, we must not try
4224 to call force_reg for that case. Avoid that case. */
4226 && GET_MODE (to_rtx) == BLKmode
4227 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4229 && (bitpos % bitsize) == 0
4230 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4231 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4233 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4237 to_rtx = offset_address (to_rtx, offset_rtx,
4238 highest_pow2_factor_for_target (to,
4242 /* Handle expand_expr of a complex value returning a CONCAT. */
4243 if (GET_CODE (to_rtx) == CONCAT)
4245 if (TREE_CODE (TREE_TYPE (from)) == COMPLEX_TYPE)
4247 gcc_assert (bitpos == 0);
4248 result = store_expr (from, to_rtx, false, nontemporal);
4252 gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4253 result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4261 /* If the field is at offset zero, we could have been given the
4262 DECL_RTX of the parent struct. Don't munge it. */
4263 to_rtx = shallow_copy_rtx (to_rtx);
4265 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4267 /* Deal with volatile and readonly fields. The former is only
4268 done for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4270 MEM_VOLATILE_P (to_rtx) = 1;
4271 if (component_uses_parent_alias_set (to))
4272 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4275 if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4279 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4280 TREE_TYPE (tem), get_alias_set (to),
4285 preserve_temp_slots (result);
4291 /* If the rhs is a function call and its value is not an aggregate,
4292 call the function before we start to compute the lhs.
4293 This is needed for correct code for cases such as
4294 val = setjmp (buf) on machines where reference to val
4295 requires loading up part of an address in a separate insn.
4297 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4298 since it might be a promoted variable where the zero- or sign- extension
4299 needs to be done. Handling this in the normal way is safe because no
4300 computation is done before the call. */
4301 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4302 && COMPLETE_TYPE_P (TREE_TYPE (from))
4303 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4304 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4305 && REG_P (DECL_RTL (to))))
4310 value = expand_normal (from);
4312 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4314 /* Handle calls that return values in multiple non-contiguous locations.
4315 The Irix 6 ABI has examples of this. */
4316 if (GET_CODE (to_rtx) == PARALLEL)
4317 emit_group_load (to_rtx, value, TREE_TYPE (from),
4318 int_size_in_bytes (TREE_TYPE (from)));
4319 else if (GET_MODE (to_rtx) == BLKmode)
4320 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4323 if (POINTER_TYPE_P (TREE_TYPE (to)))
4324 value = convert_memory_address (GET_MODE (to_rtx), value);
4325 emit_move_insn (to_rtx, value);
4327 preserve_temp_slots (to_rtx);
4333 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4334 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4337 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4339 /* Don't move directly into a return register. */
4340 if (TREE_CODE (to) == RESULT_DECL
4341 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4346 temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4348 if (GET_CODE (to_rtx) == PARALLEL)
4349 emit_group_load (to_rtx, temp, TREE_TYPE (from),
4350 int_size_in_bytes (TREE_TYPE (from)));
4352 emit_move_insn (to_rtx, temp);
4354 preserve_temp_slots (to_rtx);
4360 /* In case we are returning the contents of an object which overlaps
4361 the place the value is being stored, use a safe function when copying
4362 a value through a pointer into a structure value return block. */
4363 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4364 && cfun->returns_struct
4365 && !cfun->returns_pcc_struct)
4370 size = expr_size (from);
4371 from_rtx = expand_normal (from);
4373 emit_library_call (memmove_libfunc, LCT_NORMAL,
4374 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4375 XEXP (from_rtx, 0), Pmode,
4376 convert_to_mode (TYPE_MODE (sizetype),
4377 size, TYPE_UNSIGNED (sizetype)),
4378 TYPE_MODE (sizetype));
4380 preserve_temp_slots (to_rtx);
4386 /* Compute FROM and store the value in the rtx we got. */
4389 result = store_expr (from, to_rtx, 0, nontemporal);
4390 preserve_temp_slots (result);
4396 /* Emits nontemporal store insn that moves FROM to TO. Returns true if this
4397 succeeded, false otherwise. */
4400 emit_storent_insn (rtx to, rtx from)
4402 enum machine_mode mode = GET_MODE (to), imode;
4403 enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4406 if (code == CODE_FOR_nothing)
4409 imode = insn_data[code].operand[0].mode;
4410 if (!insn_data[code].operand[0].predicate (to, imode))
4413 imode = insn_data[code].operand[1].mode;
4414 if (!insn_data[code].operand[1].predicate (from, imode))
4416 from = copy_to_mode_reg (imode, from);
4417 if (!insn_data[code].operand[1].predicate (from, imode))
4421 pattern = GEN_FCN (code) (to, from);
4422 if (pattern == NULL_RTX)
4425 emit_insn (pattern);
4429 /* Generate code for computing expression EXP,
4430 and storing the value into TARGET.
4432 If the mode is BLKmode then we may return TARGET itself.
4433 It turns out that in BLKmode it doesn't cause a problem.
4434 because C has no operators that could combine two different
4435 assignments into the same BLKmode object with different values
4436 with no sequence point. Will other languages need this to
4439 If CALL_PARAM_P is nonzero, this is a store into a call param on the
4440 stack, and block moves may need to be treated specially.
4442 If NONTEMPORAL is true, try using a nontemporal store instruction. */
4445 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4448 rtx alt_rtl = NULL_RTX;
4449 int dont_return_target = 0;
4451 if (VOID_TYPE_P (TREE_TYPE (exp)))
4453 /* C++ can generate ?: expressions with a throw expression in one
4454 branch and an rvalue in the other. Here, we resolve attempts to
4455 store the throw expression's nonexistent result. */
4456 gcc_assert (!call_param_p);
4457 expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4460 if (TREE_CODE (exp) == COMPOUND_EXPR)
4462 /* Perform first part of compound expression, then assign from second
4464 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4465 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4466 return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4469 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4471 /* For conditional expression, get safe form of the target. Then
4472 test the condition, doing the appropriate assignment on either
4473 side. This avoids the creation of unnecessary temporaries.
4474 For non-BLKmode, it is more efficient not to do this. */
4476 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4478 do_pending_stack_adjust ();
4480 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4481 store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4483 emit_jump_insn (gen_jump (lab2));
4486 store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4493 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4494 /* If this is a scalar in a register that is stored in a wider mode
4495 than the declared mode, compute the result into its declared mode
4496 and then convert to the wider mode. Our value is the computed
4499 rtx inner_target = 0;
4501 /* We can do the conversion inside EXP, which will often result
4502 in some optimizations. Do the conversion in two steps: first
4503 change the signedness, if needed, then the extend. But don't
4504 do this if the type of EXP is a subtype of something else
4505 since then the conversion might involve more than just
4506 converting modes. */
4507 if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4508 && TREE_TYPE (TREE_TYPE (exp)) == 0
4509 && GET_MODE_PRECISION (GET_MODE (target))
4510 == TYPE_PRECISION (TREE_TYPE (exp)))
4512 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4513 != SUBREG_PROMOTED_UNSIGNED_P (target))
4515 /* Some types, e.g. Fortran's logical*4, won't have a signed
4516 version, so use the mode instead. */
4518 = (signed_or_unsigned_type_for
4519 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4521 ntype = lang_hooks.types.type_for_mode
4522 (TYPE_MODE (TREE_TYPE (exp)),
4523 SUBREG_PROMOTED_UNSIGNED_P (target));
4525 exp = fold_convert (ntype, exp);
4528 exp = fold_convert (lang_hooks.types.type_for_mode
4529 (GET_MODE (SUBREG_REG (target)),
4530 SUBREG_PROMOTED_UNSIGNED_P (target)),
4533 inner_target = SUBREG_REG (target);
4536 temp = expand_expr (exp, inner_target, VOIDmode,
4537 call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4539 /* If TEMP is a VOIDmode constant, use convert_modes to make
4540 sure that we properly convert it. */
4541 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4543 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4544 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4545 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4546 GET_MODE (target), temp,
4547 SUBREG_PROMOTED_UNSIGNED_P (target));
4550 convert_move (SUBREG_REG (target), temp,
4551 SUBREG_PROMOTED_UNSIGNED_P (target));
4555 else if (TREE_CODE (exp) == STRING_CST
4556 && !nontemporal && !call_param_p
4557 && TREE_STRING_LENGTH (exp) > 0
4558 && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4560 /* Optimize initialization of an array with a STRING_CST. */
4561 HOST_WIDE_INT exp_len, str_copy_len;
4564 exp_len = int_expr_size (exp);
4568 str_copy_len = strlen (TREE_STRING_POINTER (exp));
4569 if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4572 str_copy_len = TREE_STRING_LENGTH (exp);
4573 if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4575 str_copy_len += STORE_MAX_PIECES - 1;
4576 str_copy_len &= ~(STORE_MAX_PIECES - 1);
4578 str_copy_len = MIN (str_copy_len, exp_len);
4579 if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4580 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4581 MEM_ALIGN (target), false))
4586 dest_mem = store_by_pieces (dest_mem,
4587 str_copy_len, builtin_strncpy_read_str,
4588 CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4589 MEM_ALIGN (target), false,
4590 exp_len > str_copy_len ? 1 : 0);
4591 if (exp_len > str_copy_len)
4592 clear_storage (adjust_address (dest_mem, BLKmode, 0),
4593 GEN_INT (exp_len - str_copy_len),
4602 /* If we want to use a nontemporal store, force the value to
4604 tmp_target = nontemporal ? NULL_RTX : target;
4605 temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4607 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4609 /* Return TARGET if it's a specified hardware register.
4610 If TARGET is a volatile mem ref, either return TARGET
4611 or return a reg copied *from* TARGET; ANSI requires this.
4613 Otherwise, if TEMP is not TARGET, return TEMP
4614 if it is constant (for efficiency),
4615 or if we really want the correct value. */
4616 if (!(target && REG_P (target)
4617 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4618 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4619 && ! rtx_equal_p (temp, target)
4620 && CONSTANT_P (temp))
4621 dont_return_target = 1;
4624 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4625 the same as that of TARGET, adjust the constant. This is needed, for
4626 example, in case it is a CONST_DOUBLE and we want only a word-sized
4628 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4629 && TREE_CODE (exp) != ERROR_MARK
4630 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4631 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4632 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4634 /* If value was not generated in the target, store it there.
4635 Convert the value to TARGET's type first if necessary and emit the
4636 pending incrementations that have been queued when expanding EXP.
4637 Note that we cannot emit the whole queue blindly because this will
4638 effectively disable the POST_INC optimization later.
4640 If TEMP and TARGET compare equal according to rtx_equal_p, but
4641 one or both of them are volatile memory refs, we have to distinguish
4643 - expand_expr has used TARGET. In this case, we must not generate
4644 another copy. This can be detected by TARGET being equal according
4646 - expand_expr has not used TARGET - that means that the source just
4647 happens to have the same RTX form. Since temp will have been created
4648 by expand_expr, it will compare unequal according to == .
4649 We must generate a copy in this case, to reach the correct number
4650 of volatile memory references. */
4652 if ((! rtx_equal_p (temp, target)
4653 || (temp != target && (side_effects_p (temp)
4654 || side_effects_p (target))))
4655 && TREE_CODE (exp) != ERROR_MARK
4656 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4657 but TARGET is not valid memory reference, TEMP will differ
4658 from TARGET although it is really the same location. */
4659 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4660 /* If there's nothing to copy, don't bother. Don't call
4661 expr_size unless necessary, because some front-ends (C++)
4662 expr_size-hook must not be given objects that are not
4663 supposed to be bit-copied or bit-initialized. */
4664 && expr_size (exp) != const0_rtx)
4666 if (GET_MODE (temp) != GET_MODE (target)
4667 && GET_MODE (temp) != VOIDmode)
4669 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4670 if (dont_return_target)
4672 /* In this case, we will return TEMP,
4673 so make sure it has the proper mode.
4674 But don't forget to store the value into TARGET. */
4675 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4676 emit_move_insn (target, temp);
4678 else if (GET_MODE (target) == BLKmode
4679 || GET_MODE (temp) == BLKmode)
4680 emit_block_move (target, temp, expr_size (exp),
4682 ? BLOCK_OP_CALL_PARM
4683 : BLOCK_OP_NORMAL));
4685 convert_move (target, temp, unsignedp);
4688 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4690 /* Handle copying a string constant into an array. The string
4691 constant may be shorter than the array. So copy just the string's
4692 actual length, and clear the rest. First get the size of the data
4693 type of the string, which is actually the size of the target. */
4694 rtx size = expr_size (exp);
4696 if (GET_CODE (size) == CONST_INT
4697 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4698 emit_block_move (target, temp, size,
4700 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4703 /* Compute the size of the data to copy from the string. */
4705 = size_binop (MIN_EXPR,
4706 make_tree (sizetype, size),
4707 size_int (TREE_STRING_LENGTH (exp)));
4709 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4711 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4714 /* Copy that much. */
4715 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4716 TYPE_UNSIGNED (sizetype));
4717 emit_block_move (target, temp, copy_size_rtx,
4719 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4721 /* Figure out how much is left in TARGET that we have to clear.
4722 Do all calculations in ptr_mode. */
4723 if (GET_CODE (copy_size_rtx) == CONST_INT)
4725 size = plus_constant (size, -INTVAL (copy_size_rtx));
4726 target = adjust_address (target, BLKmode,
4727 INTVAL (copy_size_rtx));
4731 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4732 copy_size_rtx, NULL_RTX, 0,
4735 #ifdef POINTERS_EXTEND_UNSIGNED
4736 if (GET_MODE (copy_size_rtx) != Pmode)
4737 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4738 TYPE_UNSIGNED (sizetype));
4741 target = offset_address (target, copy_size_rtx,
4742 highest_pow2_factor (copy_size));
4743 label = gen_label_rtx ();
4744 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4745 GET_MODE (size), 0, label);
4748 if (size != const0_rtx)
4749 clear_storage (target, size, BLOCK_OP_NORMAL);
4755 /* Handle calls that return values in multiple non-contiguous locations.
4756 The Irix 6 ABI has examples of this. */
4757 else if (GET_CODE (target) == PARALLEL)
4758 emit_group_load (target, temp, TREE_TYPE (exp),
4759 int_size_in_bytes (TREE_TYPE (exp)));
4760 else if (GET_MODE (temp) == BLKmode)
4761 emit_block_move (target, temp, expr_size (exp),
4763 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4764 else if (nontemporal
4765 && emit_storent_insn (target, temp))
4766 /* If we managed to emit a nontemporal store, there is nothing else to
4771 temp = force_operand (temp, target);
4773 emit_move_insn (target, temp);
4780 /* Helper for categorize_ctor_elements. Identical interface. */
4783 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4784 HOST_WIDE_INT *p_elt_count,
4787 unsigned HOST_WIDE_INT idx;
4788 HOST_WIDE_INT nz_elts, elt_count;
4789 tree value, purpose;
4791 /* Whether CTOR is a valid constant initializer, in accordance with what
4792 initializer_constant_valid_p does. If inferred from the constructor
4793 elements, true until proven otherwise. */
4794 bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4795 bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4800 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4805 if (TREE_CODE (purpose) == RANGE_EXPR)
4807 tree lo_index = TREE_OPERAND (purpose, 0);
4808 tree hi_index = TREE_OPERAND (purpose, 1);
4810 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4811 mult = (tree_low_cst (hi_index, 1)
4812 - tree_low_cst (lo_index, 1) + 1);
4815 switch (TREE_CODE (value))
4819 HOST_WIDE_INT nz = 0, ic = 0;
4822 = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
4824 nz_elts += mult * nz;
4825 elt_count += mult * ic;
4827 if (const_from_elts_p && const_p)
4828 const_p = const_elt_p;
4835 if (!initializer_zerop (value))
4841 nz_elts += mult * TREE_STRING_LENGTH (value);
4842 elt_count += mult * TREE_STRING_LENGTH (value);
4846 if (!initializer_zerop (TREE_REALPART (value)))
4848 if (!initializer_zerop (TREE_IMAGPART (value)))
4856 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4858 if (!initializer_zerop (TREE_VALUE (v)))
4869 if (const_from_elts_p && const_p)
4870 const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
4877 && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
4878 || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
4881 bool clear_this = true;
4883 if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
4885 /* We don't expect more than one element of the union to be
4886 initialized. Not sure what we should do otherwise... */
4887 gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
4890 init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
4891 CONSTRUCTOR_ELTS (ctor),
4894 /* ??? We could look at each element of the union, and find the
4895 largest element. Which would avoid comparing the size of the
4896 initialized element against any tail padding in the union.
4897 Doesn't seem worth the effort... */
4898 if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
4899 TYPE_SIZE (init_sub_type)) == 1)
4901 /* And now we have to find out if the element itself is fully
4902 constructed. E.g. for union { struct { int a, b; } s; } u
4903 = { .s = { .a = 1 } }. */
4904 if (elt_count == count_type_elements (init_sub_type, false))
4909 *p_must_clear = clear_this;
4912 *p_nz_elts += nz_elts;
4913 *p_elt_count += elt_count;
4918 /* Examine CTOR to discover:
4919 * how many scalar fields are set to nonzero values,
4920 and place it in *P_NZ_ELTS;
4921 * how many scalar fields in total are in CTOR,
4922 and place it in *P_ELT_COUNT.
4923 * if a type is a union, and the initializer from the constructor
4924 is not the largest element in the union, then set *p_must_clear.
4926 Return whether or not CTOR is a valid static constant initializer, the same
4927 as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0". */
4930 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4931 HOST_WIDE_INT *p_elt_count,
4936 *p_must_clear = false;
4939 categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
4942 /* Count the number of scalars in TYPE. Return -1 on overflow or
4943 variable-sized. If ALLOW_FLEXARR is true, don't count flexible
4944 array member at the end of the structure. */
4947 count_type_elements (const_tree type, bool allow_flexarr)
4949 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4950 switch (TREE_CODE (type))
4954 tree telts = array_type_nelts (type);
4955 if (telts && host_integerp (telts, 1))
4957 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4958 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
4961 else if (max / n > m)
4969 HOST_WIDE_INT n = 0, t;
4972 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4973 if (TREE_CODE (f) == FIELD_DECL)
4975 t = count_type_elements (TREE_TYPE (f), false);
4978 /* Check for structures with flexible array member. */
4979 tree tf = TREE_TYPE (f);
4981 && TREE_CHAIN (f) == NULL
4982 && TREE_CODE (tf) == ARRAY_TYPE
4984 && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
4985 && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
4986 && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
4987 && int_size_in_bytes (type) >= 0)
4999 case QUAL_UNION_TYPE:
5006 return TYPE_VECTOR_SUBPARTS (type);
5010 case FIXED_POINT_TYPE:
5015 case REFERENCE_TYPE:
5027 /* Return 1 if EXP contains mostly (3/4) zeros. */
5030 mostly_zeros_p (const_tree exp)
5032 if (TREE_CODE (exp) == CONSTRUCTOR)
5035 HOST_WIDE_INT nz_elts, count, elts;
5038 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5042 elts = count_type_elements (TREE_TYPE (exp), false);
5044 return nz_elts < elts / 4;
5047 return initializer_zerop (exp);
5050 /* Return 1 if EXP contains all zeros. */
5053 all_zeros_p (const_tree exp)
5055 if (TREE_CODE (exp) == CONSTRUCTOR)
5058 HOST_WIDE_INT nz_elts, count;
5061 categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5062 return nz_elts == 0;
5065 return initializer_zerop (exp);
5068 /* Helper function for store_constructor.
5069 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5070 TYPE is the type of the CONSTRUCTOR, not the element type.
5071 CLEARED is as for store_constructor.
5072 ALIAS_SET is the alias set to use for any stores.
5074 This provides a recursive shortcut back to store_constructor when it isn't
5075 necessary to go through store_field. This is so that we can pass through
5076 the cleared field to let store_constructor know that we may not have to
5077 clear a substructure if the outer structure has already been cleared. */
5080 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5081 HOST_WIDE_INT bitpos, enum machine_mode mode,
5082 tree exp, tree type, int cleared,
5083 alias_set_type alias_set)
5085 if (TREE_CODE (exp) == CONSTRUCTOR
5086 /* We can only call store_constructor recursively if the size and
5087 bit position are on a byte boundary. */
5088 && bitpos % BITS_PER_UNIT == 0
5089 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5090 /* If we have a nonzero bitpos for a register target, then we just
5091 let store_field do the bitfield handling. This is unlikely to
5092 generate unnecessary clear instructions anyways. */
5093 && (bitpos == 0 || MEM_P (target)))
5097 = adjust_address (target,
5098 GET_MODE (target) == BLKmode
5100 % GET_MODE_ALIGNMENT (GET_MODE (target)))
5101 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5104 /* Update the alias set, if required. */
5105 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5106 && MEM_ALIAS_SET (target) != 0)
5108 target = copy_rtx (target);
5109 set_mem_alias_set (target, alias_set);
5112 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5115 store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5118 /* Store the value of constructor EXP into the rtx TARGET.
5119 TARGET is either a REG or a MEM; we know it cannot conflict, since
5120 safe_from_p has been called.
5121 CLEARED is true if TARGET is known to have been zero'd.
5122 SIZE is the number of bytes of TARGET we are allowed to modify: this
5123 may not be the same as the size of EXP if we are assigning to a field
5124 which has been packed to exclude padding bits. */
5127 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5129 tree type = TREE_TYPE (exp);
5130 #ifdef WORD_REGISTER_OPERATIONS
5131 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5134 switch (TREE_CODE (type))
5138 case QUAL_UNION_TYPE:
5140 unsigned HOST_WIDE_INT idx;
5143 /* If size is zero or the target is already cleared, do nothing. */
5144 if (size == 0 || cleared)
5146 /* We either clear the aggregate or indicate the value is dead. */
5147 else if ((TREE_CODE (type) == UNION_TYPE
5148 || TREE_CODE (type) == QUAL_UNION_TYPE)
5149 && ! CONSTRUCTOR_ELTS (exp))
5150 /* If the constructor is empty, clear the union. */
5152 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5156 /* If we are building a static constructor into a register,
5157 set the initial value as zero so we can fold the value into
5158 a constant. But if more than one register is involved,
5159 this probably loses. */
5160 else if (REG_P (target) && TREE_STATIC (exp)
5161 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5163 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5167 /* If the constructor has fewer fields than the structure or
5168 if we are initializing the structure to mostly zeros, clear
5169 the whole structure first. Don't do this if TARGET is a
5170 register whose mode size isn't equal to SIZE since
5171 clear_storage can't handle this case. */
5173 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5174 != fields_length (type))
5175 || mostly_zeros_p (exp))
5177 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5180 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5184 if (REG_P (target) && !cleared)
5185 emit_clobber (target);
5187 /* Store each element of the constructor into the
5188 corresponding field of TARGET. */
5189 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5191 enum machine_mode mode;
5192 HOST_WIDE_INT bitsize;
5193 HOST_WIDE_INT bitpos = 0;
5195 rtx to_rtx = target;
5197 /* Just ignore missing fields. We cleared the whole
5198 structure, above, if any fields are missing. */
5202 if (cleared && initializer_zerop (value))
5205 if (host_integerp (DECL_SIZE (field), 1))
5206 bitsize = tree_low_cst (DECL_SIZE (field), 1);
5210 mode = DECL_MODE (field);
5211 if (DECL_BIT_FIELD (field))
5214 offset = DECL_FIELD_OFFSET (field);
5215 if (host_integerp (offset, 0)
5216 && host_integerp (bit_position (field), 0))
5218 bitpos = int_bit_position (field);
5222 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5229 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5230 make_tree (TREE_TYPE (exp),
5233 offset_rtx = expand_normal (offset);
5234 gcc_assert (MEM_P (to_rtx));
5236 #ifdef POINTERS_EXTEND_UNSIGNED
5237 if (GET_MODE (offset_rtx) != Pmode)
5238 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
5240 if (GET_MODE (offset_rtx) != ptr_mode)
5241 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5244 to_rtx = offset_address (to_rtx, offset_rtx,
5245 highest_pow2_factor (offset));
5248 #ifdef WORD_REGISTER_OPERATIONS
5249 /* If this initializes a field that is smaller than a
5250 word, at the start of a word, try to widen it to a full
5251 word. This special case allows us to output C++ member
5252 function initializations in a form that the optimizers
5255 && bitsize < BITS_PER_WORD
5256 && bitpos % BITS_PER_WORD == 0
5257 && GET_MODE_CLASS (mode) == MODE_INT
5258 && TREE_CODE (value) == INTEGER_CST
5260 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5262 tree type = TREE_TYPE (value);
5264 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5266 type = lang_hooks.types.type_for_size
5267 (BITS_PER_WORD, TYPE_UNSIGNED (type));
5268 value = fold_convert (type, value);
5271 if (BYTES_BIG_ENDIAN)
5273 = fold_build2 (LSHIFT_EXPR, type, value,
5274 build_int_cst (type,
5275 BITS_PER_WORD - bitsize));
5276 bitsize = BITS_PER_WORD;
5281 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5282 && DECL_NONADDRESSABLE_P (field))
5284 to_rtx = copy_rtx (to_rtx);
5285 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5288 store_constructor_field (to_rtx, bitsize, bitpos, mode,
5289 value, type, cleared,
5290 get_alias_set (TREE_TYPE (field)));
5297 unsigned HOST_WIDE_INT i;
5300 tree elttype = TREE_TYPE (type);
5302 HOST_WIDE_INT minelt = 0;
5303 HOST_WIDE_INT maxelt = 0;
5305 domain = TYPE_DOMAIN (type);
5306 const_bounds_p = (TYPE_MIN_VALUE (domain)
5307 && TYPE_MAX_VALUE (domain)
5308 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5309 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5311 /* If we have constant bounds for the range of the type, get them. */
5314 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5315 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5318 /* If the constructor has fewer elements than the array, clear
5319 the whole array first. Similarly if this is static
5320 constructor of a non-BLKmode object. */
5323 else if (REG_P (target) && TREE_STATIC (exp))
5327 unsigned HOST_WIDE_INT idx;
5329 HOST_WIDE_INT count = 0, zero_count = 0;
5330 need_to_clear = ! const_bounds_p;
5332 /* This loop is a more accurate version of the loop in
5333 mostly_zeros_p (it handles RANGE_EXPR in an index). It
5334 is also needed to check for missing elements. */
5335 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5337 HOST_WIDE_INT this_node_count;
5342 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5344 tree lo_index = TREE_OPERAND (index, 0);
5345 tree hi_index = TREE_OPERAND (index, 1);
5347 if (! host_integerp (lo_index, 1)
5348 || ! host_integerp (hi_index, 1))
5354 this_node_count = (tree_low_cst (hi_index, 1)
5355 - tree_low_cst (lo_index, 1) + 1);
5358 this_node_count = 1;
5360 count += this_node_count;
5361 if (mostly_zeros_p (value))
5362 zero_count += this_node_count;
5365 /* Clear the entire array first if there are any missing
5366 elements, or if the incidence of zero elements is >=
5369 && (count < maxelt - minelt + 1
5370 || 4 * zero_count >= 3 * count))
5374 if (need_to_clear && size > 0)
5377 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5379 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5383 if (!cleared && REG_P (target))
5384 /* Inform later passes that the old value is dead. */
5385 emit_clobber (target);
5387 /* Store each element of the constructor into the
5388 corresponding element of TARGET, determined by counting the
5390 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5392 enum machine_mode mode;
5393 HOST_WIDE_INT bitsize;
5394 HOST_WIDE_INT bitpos;
5396 rtx xtarget = target;
5398 if (cleared && initializer_zerop (value))
5401 unsignedp = TYPE_UNSIGNED (elttype);
5402 mode = TYPE_MODE (elttype);
5403 if (mode == BLKmode)
5404 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5405 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5408 bitsize = GET_MODE_BITSIZE (mode);
5410 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5412 tree lo_index = TREE_OPERAND (index, 0);
5413 tree hi_index = TREE_OPERAND (index, 1);
5414 rtx index_r, pos_rtx;
5415 HOST_WIDE_INT lo, hi, count;
5418 /* If the range is constant and "small", unroll the loop. */
5420 && host_integerp (lo_index, 0)
5421 && host_integerp (hi_index, 0)
5422 && (lo = tree_low_cst (lo_index, 0),
5423 hi = tree_low_cst (hi_index, 0),
5424 count = hi - lo + 1,
5427 || (host_integerp (TYPE_SIZE (elttype), 1)
5428 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5431 lo -= minelt; hi -= minelt;
5432 for (; lo <= hi; lo++)
5434 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5437 && !MEM_KEEP_ALIAS_SET_P (target)
5438 && TREE_CODE (type) == ARRAY_TYPE
5439 && TYPE_NONALIASED_COMPONENT (type))
5441 target = copy_rtx (target);
5442 MEM_KEEP_ALIAS_SET_P (target) = 1;
5445 store_constructor_field
5446 (target, bitsize, bitpos, mode, value, type, cleared,
5447 get_alias_set (elttype));
5452 rtx loop_start = gen_label_rtx ();
5453 rtx loop_end = gen_label_rtx ();
5456 expand_normal (hi_index);
5457 unsignedp = TYPE_UNSIGNED (domain);
5459 index = build_decl (VAR_DECL, NULL_TREE, domain);
5462 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5464 SET_DECL_RTL (index, index_r);
5465 store_expr (lo_index, index_r, 0, false);
5467 /* Build the head of the loop. */
5468 do_pending_stack_adjust ();
5469 emit_label (loop_start);
5471 /* Assign value to element index. */
5473 fold_convert (ssizetype,
5474 fold_build2 (MINUS_EXPR,
5477 TYPE_MIN_VALUE (domain)));
5480 size_binop (MULT_EXPR, position,
5481 fold_convert (ssizetype,
5482 TYPE_SIZE_UNIT (elttype)));
5484 pos_rtx = expand_normal (position);
5485 xtarget = offset_address (target, pos_rtx,
5486 highest_pow2_factor (position));
5487 xtarget = adjust_address (xtarget, mode, 0);
5488 if (TREE_CODE (value) == CONSTRUCTOR)
5489 store_constructor (value, xtarget, cleared,
5490 bitsize / BITS_PER_UNIT);
5492 store_expr (value, xtarget, 0, false);
5494 /* Generate a conditional jump to exit the loop. */
5495 exit_cond = build2 (LT_EXPR, integer_type_node,
5497 jumpif (exit_cond, loop_end);
5499 /* Update the loop counter, and jump to the head of
5501 expand_assignment (index,
5502 build2 (PLUS_EXPR, TREE_TYPE (index),
5503 index, integer_one_node),
5506 emit_jump (loop_start);
5508 /* Build the end of the loop. */
5509 emit_label (loop_end);
5512 else if ((index != 0 && ! host_integerp (index, 0))
5513 || ! host_integerp (TYPE_SIZE (elttype), 1))
5518 index = ssize_int (1);
5521 index = fold_convert (ssizetype,
5522 fold_build2 (MINUS_EXPR,
5525 TYPE_MIN_VALUE (domain)));
5528 size_binop (MULT_EXPR, index,
5529 fold_convert (ssizetype,
5530 TYPE_SIZE_UNIT (elttype)));
5531 xtarget = offset_address (target,
5532 expand_normal (position),
5533 highest_pow2_factor (position));
5534 xtarget = adjust_address (xtarget, mode, 0);
5535 store_expr (value, xtarget, 0, false);
5540 bitpos = ((tree_low_cst (index, 0) - minelt)
5541 * tree_low_cst (TYPE_SIZE (elttype), 1));
5543 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5545 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5546 && TREE_CODE (type) == ARRAY_TYPE
5547 && TYPE_NONALIASED_COMPONENT (type))
5549 target = copy_rtx (target);
5550 MEM_KEEP_ALIAS_SET_P (target) = 1;
5552 store_constructor_field (target, bitsize, bitpos, mode, value,
5553 type, cleared, get_alias_set (elttype));
5561 unsigned HOST_WIDE_INT idx;
5562 constructor_elt *ce;
5566 tree elttype = TREE_TYPE (type);
5567 int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5568 enum machine_mode eltmode = TYPE_MODE (elttype);
5569 HOST_WIDE_INT bitsize;
5570 HOST_WIDE_INT bitpos;
5571 rtvec vector = NULL;
5574 gcc_assert (eltmode != BLKmode);
5576 n_elts = TYPE_VECTOR_SUBPARTS (type);
5577 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5579 enum machine_mode mode = GET_MODE (target);
5581 icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5582 if (icode != CODE_FOR_nothing)
5586 vector = rtvec_alloc (n_elts);
5587 for (i = 0; i < n_elts; i++)
5588 RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5592 /* If the constructor has fewer elements than the vector,
5593 clear the whole array first. Similarly if this is static
5594 constructor of a non-BLKmode object. */
5597 else if (REG_P (target) && TREE_STATIC (exp))
5601 unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5604 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5606 int n_elts_here = tree_low_cst
5607 (int_const_binop (TRUNC_DIV_EXPR,
5608 TYPE_SIZE (TREE_TYPE (value)),
5609 TYPE_SIZE (elttype), 0), 1);
5611 count += n_elts_here;
5612 if (mostly_zeros_p (value))
5613 zero_count += n_elts_here;
5616 /* Clear the entire vector first if there are any missing elements,
5617 or if the incidence of zero elements is >= 75%. */
5618 need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5621 if (need_to_clear && size > 0 && !vector)
5624 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5626 clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5630 /* Inform later passes that the old value is dead. */
5631 if (!cleared && !vector && REG_P (target))
5632 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5634 /* Store each element of the constructor into the corresponding
5635 element of TARGET, determined by counting the elements. */
5636 for (idx = 0, i = 0;
5637 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5638 idx++, i += bitsize / elt_size)
5640 HOST_WIDE_INT eltpos;
5641 tree value = ce->value;
5643 bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5644 if (cleared && initializer_zerop (value))
5648 eltpos = tree_low_cst (ce->index, 1);
5654 /* Vector CONSTRUCTORs should only be built from smaller
5655 vectors in the case of BLKmode vectors. */
5656 gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5657 RTVEC_ELT (vector, eltpos)
5658 = expand_normal (value);
5662 enum machine_mode value_mode =
5663 TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5664 ? TYPE_MODE (TREE_TYPE (value))
5666 bitpos = eltpos * elt_size;
5667 store_constructor_field (target, bitsize, bitpos,
5668 value_mode, value, type,
5669 cleared, get_alias_set (elttype));
5674 emit_insn (GEN_FCN (icode)
5676 gen_rtx_PARALLEL (GET_MODE (target), vector)));
5685 /* Store the value of EXP (an expression tree)
5686 into a subfield of TARGET which has mode MODE and occupies
5687 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5688 If MODE is VOIDmode, it means that we are storing into a bit-field.
5690 Always return const0_rtx unless we have something particular to
5693 TYPE is the type of the underlying object,
5695 ALIAS_SET is the alias set for the destination. This value will
5696 (in general) be different from that for TARGET, since TARGET is a
5697 reference to the containing structure.
5699 If NONTEMPORAL is true, try generating a nontemporal store. */
5702 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5703 enum machine_mode mode, tree exp, tree type,
5704 alias_set_type alias_set, bool nontemporal)
5706 HOST_WIDE_INT width_mask = 0;
5708 if (TREE_CODE (exp) == ERROR_MARK)
5711 /* If we have nothing to store, do nothing unless the expression has
5714 return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5715 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5716 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5718 /* If we are storing into an unaligned field of an aligned union that is
5719 in a register, we may have the mode of TARGET being an integer mode but
5720 MODE == BLKmode. In that case, get an aligned object whose size and
5721 alignment are the same as TARGET and store TARGET into it (we can avoid
5722 the store if the field being stored is the entire width of TARGET). Then
5723 call ourselves recursively to store the field into a BLKmode version of
5724 that object. Finally, load from the object into TARGET. This is not
5725 very efficient in general, but should only be slightly more expensive
5726 than the otherwise-required unaligned accesses. Perhaps this can be
5727 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5728 twice, once with emit_move_insn and once via store_field. */
5731 && (REG_P (target) || GET_CODE (target) == SUBREG))
5733 rtx object = assign_temp (type, 0, 1, 1);
5734 rtx blk_object = adjust_address (object, BLKmode, 0);
5736 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5737 emit_move_insn (object, target);
5739 store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5742 emit_move_insn (target, object);
5744 /* We want to return the BLKmode version of the data. */
5748 if (GET_CODE (target) == CONCAT)
5750 /* We're storing into a struct containing a single __complex. */
5752 gcc_assert (!bitpos);
5753 return store_expr (exp, target, 0, nontemporal);
5756 /* If the structure is in a register or if the component
5757 is a bit field, we cannot use addressing to access it.
5758 Use bit-field techniques or SUBREG to store in it. */
5760 if (mode == VOIDmode
5761 || (mode != BLKmode && ! direct_store[(int) mode]
5762 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5763 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5765 || GET_CODE (target) == SUBREG
5766 /* If the field isn't aligned enough to store as an ordinary memref,
5767 store it as a bit field. */
5769 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5770 || bitpos % GET_MODE_ALIGNMENT (mode))
5771 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5772 || (bitpos % BITS_PER_UNIT != 0)))
5773 /* If the RHS and field are a constant size and the size of the
5774 RHS isn't the same size as the bitfield, we must use bitfield
5777 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5778 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5782 /* If EXP is a NOP_EXPR of precision less than its mode, then that
5783 implies a mask operation. If the precision is the same size as
5784 the field we're storing into, that mask is redundant. This is
5785 particularly common with bit field assignments generated by the
5787 if (TREE_CODE (exp) == NOP_EXPR)
5789 tree type = TREE_TYPE (exp);
5790 if (INTEGRAL_TYPE_P (type)
5791 && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5792 && bitsize == TYPE_PRECISION (type))
5794 type = TREE_TYPE (TREE_OPERAND (exp, 0));
5795 if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5796 exp = TREE_OPERAND (exp, 0);
5800 temp = expand_normal (exp);
5802 /* If BITSIZE is narrower than the size of the type of EXP
5803 we will be narrowing TEMP. Normally, what's wanted are the
5804 low-order bits. However, if EXP's type is a record and this is
5805 big-endian machine, we want the upper BITSIZE bits. */
5806 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5807 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5808 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5809 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5810 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5814 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5816 if (mode != VOIDmode && mode != BLKmode
5817 && mode != TYPE_MODE (TREE_TYPE (exp)))
5818 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5820 /* If the modes of TEMP and TARGET are both BLKmode, both
5821 must be in memory and BITPOS must be aligned on a byte
5822 boundary. If so, we simply do a block copy. Likewise
5823 for a BLKmode-like TARGET. */
5824 if (GET_MODE (temp) == BLKmode
5825 && (GET_MODE (target) == BLKmode
5827 && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
5828 && (bitpos % BITS_PER_UNIT) == 0
5829 && (bitsize % BITS_PER_UNIT) == 0)))
5831 gcc_assert (MEM_P (target) && MEM_P (temp)
5832 && (bitpos % BITS_PER_UNIT) == 0);
5834 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5835 emit_block_move (target, temp,
5836 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5843 /* Store the value in the bitfield. */
5844 store_bit_field (target, bitsize, bitpos, mode, temp);
5850 /* Now build a reference to just the desired component. */
5851 rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5853 if (to_rtx == target)
5854 to_rtx = copy_rtx (to_rtx);
5856 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5857 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5858 set_mem_alias_set (to_rtx, alias_set);
5860 return store_expr (exp, to_rtx, 0, nontemporal);
5864 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5865 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5866 codes and find the ultimate containing object, which we return.
5868 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5869 bit position, and *PUNSIGNEDP to the signedness of the field.
5870 If the position of the field is variable, we store a tree
5871 giving the variable offset (in units) in *POFFSET.
5872 This offset is in addition to the bit position.
5873 If the position is not variable, we store 0 in *POFFSET.
5875 If any of the extraction expressions is volatile,
5876 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5878 If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
5879 Otherwise, it is a mode that can be used to access the field.
5881 If the field describes a variable-sized object, *PMODE is set to
5882 BLKmode and *PBITSIZE is set to -1. An access cannot be made in
5883 this case, but the address of the object can be found.
5885 If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
5886 look through nodes that serve as markers of a greater alignment than
5887 the one that can be deduced from the expression. These nodes make it
5888 possible for front-ends to prevent temporaries from being created by
5889 the middle-end on alignment considerations. For that purpose, the
5890 normal operating mode at high-level is to always pass FALSE so that
5891 the ultimate containing object is really returned; moreover, the
5892 associated predicate handled_component_p will always return TRUE
5893 on these nodes, thus indicating that they are essentially handled
5894 by get_inner_reference. TRUE should only be passed when the caller
5895 is scanning the expression in order to build another representation
5896 and specifically knows how to handle these nodes; as such, this is
5897 the normal operating mode in the RTL expanders. */
5900 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5901 HOST_WIDE_INT *pbitpos, tree *poffset,
5902 enum machine_mode *pmode, int *punsignedp,
5903 int *pvolatilep, bool keep_aligning)
5906 enum machine_mode mode = VOIDmode;
5907 bool blkmode_bitfield = false;
5908 tree offset = size_zero_node;
5909 tree bit_offset = bitsize_zero_node;
5911 /* First get the mode, signedness, and size. We do this from just the
5912 outermost expression. */
5913 if (TREE_CODE (exp) == COMPONENT_REF)
5915 tree field = TREE_OPERAND (exp, 1);
5916 size_tree = DECL_SIZE (field);
5917 if (!DECL_BIT_FIELD (field))
5918 mode = DECL_MODE (field);
5919 else if (DECL_MODE (field) == BLKmode)
5920 blkmode_bitfield = true;
5922 *punsignedp = DECL_UNSIGNED (field);
5924 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5926 size_tree = TREE_OPERAND (exp, 1);
5927 *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
5928 || TYPE_UNSIGNED (TREE_TYPE (exp)));
5930 /* For vector types, with the correct size of access, use the mode of
5932 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
5933 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
5934 && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
5935 mode = TYPE_MODE (TREE_TYPE (exp));
5939 mode = TYPE_MODE (TREE_TYPE (exp));
5940 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5942 if (mode == BLKmode)
5943 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5945 *pbitsize = GET_MODE_BITSIZE (mode);
5950 if (! host_integerp (size_tree, 1))
5951 mode = BLKmode, *pbitsize = -1;
5953 *pbitsize = tree_low_cst (size_tree, 1);
5956 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5957 and find the ultimate containing object. */
5960 switch (TREE_CODE (exp))
5963 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5964 TREE_OPERAND (exp, 2));
5969 tree field = TREE_OPERAND (exp, 1);
5970 tree this_offset = component_ref_field_offset (exp);
5972 /* If this field hasn't been filled in yet, don't go past it.
5973 This should only happen when folding expressions made during
5974 type construction. */
5975 if (this_offset == 0)
5978 offset = size_binop (PLUS_EXPR, offset, this_offset);
5979 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5980 DECL_FIELD_BIT_OFFSET (field));
5982 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5987 case ARRAY_RANGE_REF:
5989 tree index = TREE_OPERAND (exp, 1);
5990 tree low_bound = array_ref_low_bound (exp);
5991 tree unit_size = array_ref_element_size (exp);
5993 /* We assume all arrays have sizes that are a multiple of a byte.
5994 First subtract the lower bound, if any, in the type of the
5995 index, then convert to sizetype and multiply by the size of
5996 the array element. */
5997 if (! integer_zerop (low_bound))
5998 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6001 offset = size_binop (PLUS_EXPR, offset,
6002 size_binop (MULT_EXPR,
6003 fold_convert (sizetype, index),
6012 bit_offset = size_binop (PLUS_EXPR, bit_offset,
6013 bitsize_int (*pbitsize));
6016 case VIEW_CONVERT_EXPR:
6017 if (keep_aligning && STRICT_ALIGNMENT
6018 && (TYPE_ALIGN (TREE_TYPE (exp))
6019 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6020 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6021 < BIGGEST_ALIGNMENT)
6022 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6023 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6031 /* If any reference in the chain is volatile, the effect is volatile. */
6032 if (TREE_THIS_VOLATILE (exp))
6035 exp = TREE_OPERAND (exp, 0);
6039 /* If OFFSET is constant, see if we can return the whole thing as a
6040 constant bit position. Make sure to handle overflow during
6042 if (host_integerp (offset, 0))
6044 double_int tem = double_int_mul (tree_to_double_int (offset),
6045 uhwi_to_double_int (BITS_PER_UNIT));
6046 tem = double_int_add (tem, tree_to_double_int (bit_offset));
6047 if (double_int_fits_in_shwi_p (tem))
6049 *pbitpos = double_int_to_shwi (tem);
6050 *poffset = offset = NULL_TREE;
6054 /* Otherwise, split it up. */
6057 *pbitpos = tree_low_cst (bit_offset, 0);
6061 /* We can use BLKmode for a byte-aligned BLKmode bitfield. */
6062 if (mode == VOIDmode
6064 && (*pbitpos % BITS_PER_UNIT) == 0
6065 && (*pbitsize % BITS_PER_UNIT) == 0)
6073 /* Given an expression EXP that may be a COMPONENT_REF or an ARRAY_REF,
6074 look for whether EXP or any nested component-refs within EXP is marked
6078 contains_packed_reference (const_tree exp)
6080 bool packed_p = false;
6084 switch (TREE_CODE (exp))
6088 tree field = TREE_OPERAND (exp, 1);
6089 packed_p = DECL_PACKED (field)
6090 || TYPE_PACKED (TREE_TYPE (field))
6091 || TYPE_PACKED (TREE_TYPE (exp));
6099 case ARRAY_RANGE_REF:
6102 case VIEW_CONVERT_EXPR:
6108 exp = TREE_OPERAND (exp, 0);
6114 /* Return a tree of sizetype representing the size, in bytes, of the element
6115 of EXP, an ARRAY_REF. */
6118 array_ref_element_size (tree exp)
6120 tree aligned_size = TREE_OPERAND (exp, 3);
6121 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6123 /* If a size was specified in the ARRAY_REF, it's the size measured
6124 in alignment units of the element type. So multiply by that value. */
6127 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6128 sizetype from another type of the same width and signedness. */
6129 if (TREE_TYPE (aligned_size) != sizetype)
6130 aligned_size = fold_convert (sizetype, aligned_size);
6131 return size_binop (MULT_EXPR, aligned_size,
6132 size_int (TYPE_ALIGN_UNIT (elmt_type)));
6135 /* Otherwise, take the size from that of the element type. Substitute
6136 any PLACEHOLDER_EXPR that we have. */
6138 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6141 /* Return a tree representing the lower bound of the array mentioned in
6142 EXP, an ARRAY_REF. */
6145 array_ref_low_bound (tree exp)
6147 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6149 /* If a lower bound is specified in EXP, use it. */
6150 if (TREE_OPERAND (exp, 2))
6151 return TREE_OPERAND (exp, 2);
6153 /* Otherwise, if there is a domain type and it has a lower bound, use it,
6154 substituting for a PLACEHOLDER_EXPR as needed. */
6155 if (domain_type && TYPE_MIN_VALUE (domain_type))
6156 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6158 /* Otherwise, return a zero of the appropriate type. */
6159 return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6162 /* Return a tree representing the upper bound of the array mentioned in
6163 EXP, an ARRAY_REF. */
6166 array_ref_up_bound (tree exp)
6168 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6170 /* If there is a domain type and it has an upper bound, use it, substituting
6171 for a PLACEHOLDER_EXPR as needed. */
6172 if (domain_type && TYPE_MAX_VALUE (domain_type))
6173 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6175 /* Otherwise fail. */
6179 /* Return a tree representing the offset, in bytes, of the field referenced
6180 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
6183 component_ref_field_offset (tree exp)
6185 tree aligned_offset = TREE_OPERAND (exp, 2);
6186 tree field = TREE_OPERAND (exp, 1);
6188 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6189 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
6193 /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6194 sizetype from another type of the same width and signedness. */
6195 if (TREE_TYPE (aligned_offset) != sizetype)
6196 aligned_offset = fold_convert (sizetype, aligned_offset);
6197 return size_binop (MULT_EXPR, aligned_offset,
6198 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
6201 /* Otherwise, take the offset from that of the field. Substitute
6202 any PLACEHOLDER_EXPR that we have. */
6204 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6207 /* Return 1 if T is an expression that get_inner_reference handles. */
6210 handled_component_p (const_tree t)
6212 switch (TREE_CODE (t))
6217 case ARRAY_RANGE_REF:
6218 case VIEW_CONVERT_EXPR:
6228 /* Given an rtx VALUE that may contain additions and multiplications, return
6229 an equivalent value that just refers to a register, memory, or constant.
6230 This is done by generating instructions to perform the arithmetic and
6231 returning a pseudo-register containing the value.
6233 The returned value may be a REG, SUBREG, MEM or constant. */
6236 force_operand (rtx value, rtx target)
6239 /* Use subtarget as the target for operand 0 of a binary operation. */
6240 rtx subtarget = get_subtarget (target);
6241 enum rtx_code code = GET_CODE (value);
6243 /* Check for subreg applied to an expression produced by loop optimizer. */
6245 && !REG_P (SUBREG_REG (value))
6246 && !MEM_P (SUBREG_REG (value)))
6249 = simplify_gen_subreg (GET_MODE (value),
6250 force_reg (GET_MODE (SUBREG_REG (value)),
6251 force_operand (SUBREG_REG (value),
6253 GET_MODE (SUBREG_REG (value)),
6254 SUBREG_BYTE (value));
6255 code = GET_CODE (value);
6258 /* Check for a PIC address load. */
6259 if ((code == PLUS || code == MINUS)
6260 && XEXP (value, 0) == pic_offset_table_rtx
6261 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6262 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6263 || GET_CODE (XEXP (value, 1)) == CONST))
6266 subtarget = gen_reg_rtx (GET_MODE (value));
6267 emit_move_insn (subtarget, value);
6271 if (ARITHMETIC_P (value))
6273 op2 = XEXP (value, 1);
6274 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6276 if (code == MINUS && GET_CODE (op2) == CONST_INT)
6279 op2 = negate_rtx (GET_MODE (value), op2);
6282 /* Check for an addition with OP2 a constant integer and our first
6283 operand a PLUS of a virtual register and something else. In that
6284 case, we want to emit the sum of the virtual register and the
6285 constant first and then add the other value. This allows virtual
6286 register instantiation to simply modify the constant rather than
6287 creating another one around this addition. */
6288 if (code == PLUS && GET_CODE (op2) == CONST_INT
6289 && GET_CODE (XEXP (value, 0)) == PLUS
6290 && REG_P (XEXP (XEXP (value, 0), 0))
6291 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6292 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6294 rtx temp = expand_simple_binop (GET_MODE (value), code,
6295 XEXP (XEXP (value, 0), 0), op2,
6296 subtarget, 0, OPTAB_LIB_WIDEN);
6297 return expand_simple_binop (GET_MODE (value), code, temp,
6298 force_operand (XEXP (XEXP (value,
6300 target, 0, OPTAB_LIB_WIDEN);
6303 op1 = force_operand (XEXP (value, 0), subtarget);
6304 op2 = force_operand (op2, NULL_RTX);
6308 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6310 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6311 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6312 target, 1, OPTAB_LIB_WIDEN);
6314 return expand_divmod (0,
6315 FLOAT_MODE_P (GET_MODE (value))
6316 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6317 GET_MODE (value), op1, op2, target, 0);
6319 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6322 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6325 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6328 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6329 target, 0, OPTAB_LIB_WIDEN);
6331 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6332 target, 1, OPTAB_LIB_WIDEN);
6335 if (UNARY_P (value))
6338 target = gen_reg_rtx (GET_MODE (value));
6339 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6346 case FLOAT_TRUNCATE:
6347 convert_move (target, op1, code == ZERO_EXTEND);
6352 expand_fix (target, op1, code == UNSIGNED_FIX);
6356 case UNSIGNED_FLOAT:
6357 expand_float (target, op1, code == UNSIGNED_FLOAT);
6361 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6365 #ifdef INSN_SCHEDULING
6366 /* On machines that have insn scheduling, we want all memory reference to be
6367 explicit, so we need to deal with such paradoxical SUBREGs. */
6368 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6369 && (GET_MODE_SIZE (GET_MODE (value))
6370 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6372 = simplify_gen_subreg (GET_MODE (value),
6373 force_reg (GET_MODE (SUBREG_REG (value)),
6374 force_operand (SUBREG_REG (value),
6376 GET_MODE (SUBREG_REG (value)),
6377 SUBREG_BYTE (value));
6383 /* Subroutine of expand_expr: return nonzero iff there is no way that
6384 EXP can reference X, which is being modified. TOP_P is nonzero if this
6385 call is going to be used to determine whether we need a temporary
6386 for EXP, as opposed to a recursive call to this function.
6388 It is always safe for this routine to return zero since it merely
6389 searches for optimization opportunities. */
6392 safe_from_p (const_rtx x, tree exp, int top_p)
6398 /* If EXP has varying size, we MUST use a target since we currently
6399 have no way of allocating temporaries of variable size
6400 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6401 So we assume here that something at a higher level has prevented a
6402 clash. This is somewhat bogus, but the best we can do. Only
6403 do this when X is BLKmode and when we are at the top level. */
6404 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6405 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6406 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6407 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6408 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6410 && GET_MODE (x) == BLKmode)
6411 /* If X is in the outgoing argument area, it is always safe. */
6413 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6414 || (GET_CODE (XEXP (x, 0)) == PLUS
6415 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6418 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6419 find the underlying pseudo. */
6420 if (GET_CODE (x) == SUBREG)
6423 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6427 /* Now look at our tree code and possibly recurse. */
6428 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6430 case tcc_declaration:
6431 exp_rtl = DECL_RTL_IF_SET (exp);
6437 case tcc_exceptional:
6438 if (TREE_CODE (exp) == TREE_LIST)
6442 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6444 exp = TREE_CHAIN (exp);
6447 if (TREE_CODE (exp) != TREE_LIST)
6448 return safe_from_p (x, exp, 0);
6451 else if (TREE_CODE (exp) == CONSTRUCTOR)
6453 constructor_elt *ce;
6454 unsigned HOST_WIDE_INT idx;
6457 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6459 if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6460 || !safe_from_p (x, ce->value, 0))
6464 else if (TREE_CODE (exp) == ERROR_MARK)
6465 return 1; /* An already-visited SAVE_EXPR? */
6470 /* The only case we look at here is the DECL_INITIAL inside a
6472 return (TREE_CODE (exp) != DECL_EXPR
6473 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6474 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6475 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6478 case tcc_comparison:
6479 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6484 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6486 case tcc_expression:
6489 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6490 the expression. If it is set, we conflict iff we are that rtx or
6491 both are in memory. Otherwise, we check all operands of the
6492 expression recursively. */
6494 switch (TREE_CODE (exp))
6497 /* If the operand is static or we are static, we can't conflict.
6498 Likewise if we don't conflict with the operand at all. */
6499 if (staticp (TREE_OPERAND (exp, 0))
6500 || TREE_STATIC (exp)
6501 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6504 /* Otherwise, the only way this can conflict is if we are taking
6505 the address of a DECL a that address if part of X, which is
6507 exp = TREE_OPERAND (exp, 0);
6510 if (!DECL_RTL_SET_P (exp)
6511 || !MEM_P (DECL_RTL (exp)))
6514 exp_rtl = XEXP (DECL_RTL (exp), 0);
6518 case MISALIGNED_INDIRECT_REF:
6519 case ALIGN_INDIRECT_REF:
6522 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6523 get_alias_set (exp)))
6528 /* Assume that the call will clobber all hard registers and
6530 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6535 case WITH_CLEANUP_EXPR:
6536 case CLEANUP_POINT_EXPR:
6537 /* Lowered by gimplify.c. */
6541 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6547 /* If we have an rtx, we do not need to scan our operands. */
6551 nops = TREE_OPERAND_LENGTH (exp);
6552 for (i = 0; i < nops; i++)
6553 if (TREE_OPERAND (exp, i) != 0
6554 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6560 /* Should never get a type here. */
6564 /* If we have an rtl, find any enclosed object. Then see if we conflict
6568 if (GET_CODE (exp_rtl) == SUBREG)
6570 exp_rtl = SUBREG_REG (exp_rtl);
6572 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6576 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6577 are memory and they conflict. */
6578 return ! (rtx_equal_p (x, exp_rtl)
6579 || (MEM_P (x) && MEM_P (exp_rtl)
6580 && true_dependence (exp_rtl, VOIDmode, x,
6581 rtx_addr_varies_p)));
6584 /* If we reach here, it is safe. */
6589 /* Return the highest power of two that EXP is known to be a multiple of.
6590 This is used in updating alignment of MEMs in array references. */
6592 unsigned HOST_WIDE_INT
6593 highest_pow2_factor (const_tree exp)
6595 unsigned HOST_WIDE_INT c0, c1;
6597 switch (TREE_CODE (exp))
6600 /* We can find the lowest bit that's a one. If the low
6601 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6602 We need to handle this case since we can find it in a COND_EXPR,
6603 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6604 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6606 if (TREE_OVERFLOW (exp))
6607 return BIGGEST_ALIGNMENT;
6610 /* Note: tree_low_cst is intentionally not used here,
6611 we don't care about the upper bits. */
6612 c0 = TREE_INT_CST_LOW (exp);
6614 return c0 ? c0 : BIGGEST_ALIGNMENT;
6618 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6619 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6620 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6621 return MIN (c0, c1);
6624 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6625 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6628 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6630 if (integer_pow2p (TREE_OPERAND (exp, 1))
6631 && host_integerp (TREE_OPERAND (exp, 1), 1))
6633 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6634 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6635 return MAX (1, c0 / c1);
6640 /* The highest power of two of a bit-and expression is the maximum of
6641 that of its operands. We typically get here for a complex LHS and
6642 a constant negative power of two on the RHS to force an explicit
6643 alignment, so don't bother looking at the LHS. */
6644 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6648 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6651 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6654 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6655 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6656 return MIN (c0, c1);
6665 /* Similar, except that the alignment requirements of TARGET are
6666 taken into account. Assume it is at least as aligned as its
6667 type, unless it is a COMPONENT_REF in which case the layout of
6668 the structure gives the alignment. */
6670 static unsigned HOST_WIDE_INT
6671 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6673 unsigned HOST_WIDE_INT target_align, factor;
6675 factor = highest_pow2_factor (exp);
6676 if (TREE_CODE (target) == COMPONENT_REF)
6677 target_align = DECL_ALIGN_UNIT (TREE_OPERAND (target, 1));
6679 target_align = TYPE_ALIGN_UNIT (TREE_TYPE (target));
6680 return MAX (factor, target_align);
6683 /* Return &VAR expression for emulated thread local VAR. */
6686 emutls_var_address (tree var)
6688 tree emuvar = emutls_decl (var);
6689 tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6690 tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6691 tree arglist = build_tree_list (NULL_TREE, arg);
6692 tree call = build_function_call_expr (fn, arglist);
6693 return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6697 /* Subroutine of expand_expr. Expand the two operands of a binary
6698 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6699 The value may be stored in TARGET if TARGET is nonzero. The
6700 MODIFIER argument is as documented by expand_expr. */
6703 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6704 enum expand_modifier modifier)
6706 if (! safe_from_p (target, exp1, 1))
6708 if (operand_equal_p (exp0, exp1, 0))
6710 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6711 *op1 = copy_rtx (*op0);
6715 /* If we need to preserve evaluation order, copy exp0 into its own
6716 temporary variable so that it can't be clobbered by exp1. */
6717 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6718 exp0 = save_expr (exp0);
6719 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6720 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6725 /* Return a MEM that contains constant EXP. DEFER is as for
6726 output_constant_def and MODIFIER is as for expand_expr. */
6729 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6733 mem = output_constant_def (exp, defer);
6734 if (modifier != EXPAND_INITIALIZER)
6735 mem = use_anchored_address (mem);
6739 /* A subroutine of expand_expr_addr_expr. Evaluate the address of EXP.
6740 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6743 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6744 enum expand_modifier modifier)
6746 rtx result, subtarget;
6748 HOST_WIDE_INT bitsize, bitpos;
6749 int volatilep, unsignedp;
6750 enum machine_mode mode1;
6752 /* If we are taking the address of a constant and are at the top level,
6753 we have to use output_constant_def since we can't call force_const_mem
6755 /* ??? This should be considered a front-end bug. We should not be
6756 generating ADDR_EXPR of something that isn't an LVALUE. The only
6757 exception here is STRING_CST. */
6758 if (CONSTANT_CLASS_P (exp))
6759 return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6761 /* Everything must be something allowed by is_gimple_addressable. */
6762 switch (TREE_CODE (exp))
6765 /* This case will happen via recursion for &a->b. */
6766 return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6769 /* Recurse and make the output_constant_def clause above handle this. */
6770 return expand_expr_addr_expr_1 (DECL_INITIAL (exp), target,
6774 /* The real part of the complex number is always first, therefore
6775 the address is the same as the address of the parent object. */
6778 inner = TREE_OPERAND (exp, 0);
6782 /* The imaginary part of the complex number is always second.
6783 The expression is therefore always offset by the size of the
6786 bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6787 inner = TREE_OPERAND (exp, 0);
6791 /* TLS emulation hook - replace __thread VAR's &VAR with
6792 __emutls_get_address (&_emutls.VAR). */
6793 if (! targetm.have_tls
6794 && TREE_CODE (exp) == VAR_DECL
6795 && DECL_THREAD_LOCAL_P (exp))
6797 exp = emutls_var_address (exp);
6798 return expand_expr (exp, target, tmode, modifier);
6803 /* If the object is a DECL, then expand it for its rtl. Don't bypass
6804 expand_expr, as that can have various side effects; LABEL_DECLs for
6805 example, may not have their DECL_RTL set yet. Expand the rtl of
6806 CONSTRUCTORs too, which should yield a memory reference for the
6807 constructor's contents. Assume language specific tree nodes can
6808 be expanded in some interesting way. */
6810 || TREE_CODE (exp) == CONSTRUCTOR
6811 || TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE)
6813 result = expand_expr (exp, target, tmode,
6814 modifier == EXPAND_INITIALIZER
6815 ? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
6817 /* If the DECL isn't in memory, then the DECL wasn't properly
6818 marked TREE_ADDRESSABLE, which will be either a front-end
6819 or a tree optimizer bug. */
6820 gcc_assert (MEM_P (result));
6821 result = XEXP (result, 0);
6823 /* ??? Is this needed anymore? */
6824 if (DECL_P (exp) && !TREE_USED (exp) == 0)
6826 assemble_external (exp);
6827 TREE_USED (exp) = 1;
6830 if (modifier != EXPAND_INITIALIZER
6831 && modifier != EXPAND_CONST_ADDRESS)
6832 result = force_operand (result, target);
6836 /* Pass FALSE as the last argument to get_inner_reference although
6837 we are expanding to RTL. The rationale is that we know how to
6838 handle "aligning nodes" here: we can just bypass them because
6839 they won't change the final object whose address will be returned
6840 (they actually exist only for that purpose). */
6841 inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6842 &mode1, &unsignedp, &volatilep, false);
6846 /* We must have made progress. */
6847 gcc_assert (inner != exp);
6849 subtarget = offset || bitpos ? NULL_RTX : target;
6850 result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier);
6856 if (modifier != EXPAND_NORMAL)
6857 result = force_operand (result, NULL);
6858 tmp = expand_expr (offset, NULL_RTX, tmode,
6859 modifier == EXPAND_INITIALIZER
6860 ? EXPAND_INITIALIZER : EXPAND_NORMAL);
6862 result = convert_memory_address (tmode, result);
6863 tmp = convert_memory_address (tmode, tmp);
6865 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6866 result = gen_rtx_PLUS (tmode, result, tmp);
6869 subtarget = bitpos ? NULL_RTX : target;
6870 result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
6871 1, OPTAB_LIB_WIDEN);
6877 /* Someone beforehand should have rejected taking the address
6878 of such an object. */
6879 gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
6881 result = plus_constant (result, bitpos / BITS_PER_UNIT);
6882 if (modifier < EXPAND_SUM)
6883 result = force_operand (result, target);
6889 /* A subroutine of expand_expr. Evaluate EXP, which is an ADDR_EXPR.
6890 The TARGET, TMODE and MODIFIER arguments are as for expand_expr. */
6893 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
6894 enum expand_modifier modifier)
6896 enum machine_mode rmode;
6899 /* Target mode of VOIDmode says "whatever's natural". */
6900 if (tmode == VOIDmode)
6901 tmode = TYPE_MODE (TREE_TYPE (exp));
6903 /* We can get called with some Weird Things if the user does silliness
6904 like "(short) &a". In that case, convert_memory_address won't do
6905 the right thing, so ignore the given target mode. */
6906 if (tmode != Pmode && tmode != ptr_mode)
6909 result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
6912 /* Despite expand_expr claims concerning ignoring TMODE when not
6913 strictly convenient, stuff breaks if we don't honor it. Note
6914 that combined with the above, we only do this for pointer modes. */
6915 rmode = GET_MODE (result);
6916 if (rmode == VOIDmode)
6919 result = convert_memory_address (tmode, result);
6924 /* Generate code for computing CONSTRUCTOR EXP.
6925 An rtx for the computed value is returned. If AVOID_TEMP_MEM
6926 is TRUE, instead of creating a temporary variable in memory
6927 NULL is returned and the caller needs to handle it differently. */
6930 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
6931 bool avoid_temp_mem)
6933 tree type = TREE_TYPE (exp);
6934 enum machine_mode mode = TYPE_MODE (type);
6936 /* Try to avoid creating a temporary at all. This is possible
6937 if all of the initializer is zero.
6938 FIXME: try to handle all [0..255] initializers we can handle
6940 if (TREE_STATIC (exp)
6941 && !TREE_ADDRESSABLE (exp)
6942 && target != 0 && mode == BLKmode
6943 && all_zeros_p (exp))
6945 clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6949 /* All elts simple constants => refer to a constant in memory. But
6950 if this is a non-BLKmode mode, let it store a field at a time
6951 since that should make a CONST_INT or CONST_DOUBLE when we
6952 fold. Likewise, if we have a target we can use, it is best to
6953 store directly into the target unless the type is large enough
6954 that memcpy will be used. If we are making an initializer and
6955 all operands are constant, put it in memory as well.
6957 FIXME: Avoid trying to fill vector constructors piece-meal.
6958 Output them with output_constant_def below unless we're sure
6959 they're zeros. This should go away when vector initializers
6960 are treated like VECTOR_CST instead of arrays. */
6961 if ((TREE_STATIC (exp)
6962 && ((mode == BLKmode
6963 && ! (target != 0 && safe_from_p (target, exp, 1)))
6964 || TREE_ADDRESSABLE (exp)
6965 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6966 && (! MOVE_BY_PIECES_P
6967 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6969 && ! mostly_zeros_p (exp))))
6970 || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
6971 && TREE_CONSTANT (exp)))
6978 constructor = expand_expr_constant (exp, 1, modifier);
6980 if (modifier != EXPAND_CONST_ADDRESS
6981 && modifier != EXPAND_INITIALIZER
6982 && modifier != EXPAND_SUM)
6983 constructor = validize_mem (constructor);
6988 /* Handle calls that pass values in multiple non-contiguous
6989 locations. The Irix 6 ABI has examples of this. */
6990 if (target == 0 || ! safe_from_p (target, exp, 1)
6991 || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
6997 = assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
6998 | (TREE_READONLY (exp)
6999 * TYPE_QUAL_CONST))),
7000 0, TREE_ADDRESSABLE (exp), 1);
7003 store_constructor (exp, target, 0, int_expr_size (exp));
7008 /* expand_expr: generate code for computing expression EXP.
7009 An rtx for the computed value is returned. The value is never null.
7010 In the case of a void EXP, const0_rtx is returned.
7012 The value may be stored in TARGET if TARGET is nonzero.
7013 TARGET is just a suggestion; callers must assume that
7014 the rtx returned may not be the same as TARGET.
7016 If TARGET is CONST0_RTX, it means that the value will be ignored.
7018 If TMODE is not VOIDmode, it suggests generating the
7019 result in mode TMODE. But this is done only when convenient.
7020 Otherwise, TMODE is ignored and the value generated in its natural mode.
7021 TMODE is just a suggestion; callers must assume that
7022 the rtx returned may not have mode TMODE.
7024 Note that TARGET may have neither TMODE nor MODE. In that case, it
7025 probably will not be used.
7027 If MODIFIER is EXPAND_SUM then when EXP is an addition
7028 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7029 or a nest of (PLUS ...) and (MINUS ...) where the terms are
7030 products as above, or REG or MEM, or constant.
7031 Ordinarily in such cases we would output mul or add instructions
7032 and then return a pseudo reg containing the sum.
7034 EXPAND_INITIALIZER is much like EXPAND_SUM except that
7035 it also marks a label as absolutely required (it can't be dead).
7036 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7037 This is used for outputting expressions used in initializers.
7039 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7040 with a constant address even if that address is not normally legitimate.
7041 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7043 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7044 a call parameter. Such targets require special care as we haven't yet
7045 marked TARGET so that it's safe from being trashed by libcalls. We
7046 don't want to use TARGET for anything but the final result;
7047 Intermediate values must go elsewhere. Additionally, calls to
7048 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7050 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7051 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7052 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
7053 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7056 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
7057 enum expand_modifier, rtx *);
7060 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7061 enum expand_modifier modifier, rtx *alt_rtl)
7064 rtx ret, last = NULL;
7066 /* Handle ERROR_MARK before anybody tries to access its type. */
7067 if (TREE_CODE (exp) == ERROR_MARK
7068 || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7070 ret = CONST0_RTX (tmode);
7071 return ret ? ret : const0_rtx;
7074 if (flag_non_call_exceptions)
7076 rn = lookup_expr_eh_region (exp);
7078 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
7080 last = get_last_insn ();
7083 /* If this is an expression of some kind and it has an associated line
7084 number, then emit the line number before expanding the expression.
7086 We need to save and restore the file and line information so that
7087 errors discovered during expansion are emitted with the right
7088 information. It would be better of the diagnostic routines
7089 used the file/line information embedded in the tree nodes rather
7091 if (cfun && EXPR_HAS_LOCATION (exp))
7093 location_t saved_location = input_location;
7094 input_location = EXPR_LOCATION (exp);
7095 set_curr_insn_source_location (input_location);
7097 /* Record where the insns produced belong. */
7098 set_curr_insn_block (TREE_BLOCK (exp));
7100 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7102 input_location = saved_location;
7106 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7109 /* If using non-call exceptions, mark all insns that may trap.
7110 expand_call() will mark CALL_INSNs before we get to this code,
7111 but it doesn't handle libcalls, and these may trap. */
7115 for (insn = next_real_insn (last); insn;
7116 insn = next_real_insn (insn))
7118 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
7119 /* If we want exceptions for non-call insns, any
7120 may_trap_p instruction may throw. */
7121 && GET_CODE (PATTERN (insn)) != CLOBBER
7122 && GET_CODE (PATTERN (insn)) != USE
7123 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
7124 add_reg_note (insn, REG_EH_REGION, GEN_INT (rn));
7132 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
7133 enum expand_modifier modifier, rtx *alt_rtl)
7135 rtx op0, op1, op2, temp, decl_rtl;
7138 enum machine_mode mode;
7139 enum tree_code code = TREE_CODE (exp);
7141 rtx subtarget, original_target;
7143 tree context, subexp0, subexp1;
7144 bool reduce_bit_field;
7145 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
7146 ? reduce_to_bit_field_precision ((expr), \
7151 type = TREE_TYPE (exp);
7152 mode = TYPE_MODE (type);
7153 unsignedp = TYPE_UNSIGNED (type);
7155 ignore = (target == const0_rtx
7156 || ((CONVERT_EXPR_CODE_P (code)
7157 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7158 && TREE_CODE (type) == VOID_TYPE));
7160 /* An operation in what may be a bit-field type needs the
7161 result to be reduced to the precision of the bit-field type,
7162 which is narrower than that of the type's mode. */
7163 reduce_bit_field = (!ignore
7164 && TREE_CODE (type) == INTEGER_TYPE
7165 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7167 /* If we are going to ignore this result, we need only do something
7168 if there is a side-effect somewhere in the expression. If there
7169 is, short-circuit the most common cases here. Note that we must
7170 not call expand_expr with anything but const0_rtx in case this
7171 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
7175 if (! TREE_SIDE_EFFECTS (exp))
7178 /* Ensure we reference a volatile object even if value is ignored, but
7179 don't do this if all we are doing is taking its address. */
7180 if (TREE_THIS_VOLATILE (exp)
7181 && TREE_CODE (exp) != FUNCTION_DECL
7182 && mode != VOIDmode && mode != BLKmode
7183 && modifier != EXPAND_CONST_ADDRESS)
7185 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
7187 temp = copy_to_reg (temp);
7191 if (TREE_CODE_CLASS (code) == tcc_unary
7192 || code == COMPONENT_REF || code == INDIRECT_REF)
7193 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
7196 else if (TREE_CODE_CLASS (code) == tcc_binary
7197 || TREE_CODE_CLASS (code) == tcc_comparison
7198 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
7200 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7201 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7204 else if (code == BIT_FIELD_REF)
7206 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
7207 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
7208 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
7215 if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7218 /* Use subtarget as the target for operand 0 of a binary operation. */
7219 subtarget = get_subtarget (target);
7220 original_target = target;
7226 tree function = decl_function_context (exp);
7228 temp = label_rtx (exp);
7229 temp = gen_rtx_LABEL_REF (Pmode, temp);
7231 if (function != current_function_decl
7233 LABEL_REF_NONLOCAL_P (temp) = 1;
7235 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
7240 return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier,
7245 /* If a static var's type was incomplete when the decl was written,
7246 but the type is complete now, lay out the decl now. */
7247 if (DECL_SIZE (exp) == 0
7248 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
7249 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
7250 layout_decl (exp, 0);
7252 /* TLS emulation hook - replace __thread vars with
7253 *__emutls_get_address (&_emutls.var). */
7254 if (! targetm.have_tls
7255 && TREE_CODE (exp) == VAR_DECL
7256 && DECL_THREAD_LOCAL_P (exp))
7258 exp = build_fold_indirect_ref (emutls_var_address (exp));
7259 return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
7262 /* ... fall through ... */
7266 decl_rtl = DECL_RTL (exp);
7267 gcc_assert (decl_rtl);
7268 decl_rtl = copy_rtx (decl_rtl);
7270 /* Ensure variable marked as used even if it doesn't go through
7271 a parser. If it hasn't be used yet, write out an external
7273 if (! TREE_USED (exp))
7275 assemble_external (exp);
7276 TREE_USED (exp) = 1;
7279 /* Show we haven't gotten RTL for this yet. */
7282 /* Variables inherited from containing functions should have
7283 been lowered by this point. */
7284 context = decl_function_context (exp);
7285 gcc_assert (!context
7286 || context == current_function_decl
7287 || TREE_STATIC (exp)
7288 /* ??? C++ creates functions that are not TREE_STATIC. */
7289 || TREE_CODE (exp) == FUNCTION_DECL);
7291 /* This is the case of an array whose size is to be determined
7292 from its initializer, while the initializer is still being parsed.
7295 if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
7296 temp = validize_mem (decl_rtl);
7298 /* If DECL_RTL is memory, we are in the normal case and the
7299 address is not valid, get the address into a register. */
7301 else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
7304 *alt_rtl = decl_rtl;
7305 decl_rtl = use_anchored_address (decl_rtl);
7306 if (modifier != EXPAND_CONST_ADDRESS
7307 && modifier != EXPAND_SUM
7308 && !memory_address_p (DECL_MODE (exp), XEXP (decl_rtl, 0)))
7309 temp = replace_equiv_address (decl_rtl,
7310 copy_rtx (XEXP (decl_rtl, 0)));
7313 /* If we got something, return it. But first, set the alignment
7314 if the address is a register. */
7317 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
7318 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
7323 /* If the mode of DECL_RTL does not match that of the decl, it
7324 must be a promoted value. We return a SUBREG of the wanted mode,
7325 but mark it so that we know that it was already extended. */
7327 if (REG_P (decl_rtl)
7328 && GET_MODE (decl_rtl) != DECL_MODE (exp))
7330 enum machine_mode pmode;
7332 /* Get the signedness used for this variable. Ensure we get the
7333 same mode we got when the variable was declared. */
7334 pmode = promote_mode (type, DECL_MODE (exp), &unsignedp,
7335 (TREE_CODE (exp) == RESULT_DECL
7336 || TREE_CODE (exp) == PARM_DECL) ? 1 : 0);
7337 gcc_assert (GET_MODE (decl_rtl) == pmode);
7339 temp = gen_lowpart_SUBREG (mode, decl_rtl);
7340 SUBREG_PROMOTED_VAR_P (temp) = 1;
7341 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
7348 temp = immed_double_const (TREE_INT_CST_LOW (exp),
7349 TREE_INT_CST_HIGH (exp), mode);
7355 tree tmp = NULL_TREE;
7356 if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
7357 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
7358 || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
7359 || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
7360 || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
7361 || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
7362 return const_vector_from_tree (exp);
7363 if (GET_MODE_CLASS (mode) == MODE_INT)
7365 tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
7367 tmp = fold_unary (VIEW_CONVERT_EXPR, type_for_mode, exp);
7370 tmp = build_constructor_from_list (type,
7371 TREE_VECTOR_CST_ELTS (exp));
7372 return expand_expr (tmp, ignore ? const0_rtx : target,
7377 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
7380 /* If optimized, generate immediate CONST_DOUBLE
7381 which will be turned into memory by reload if necessary.
7383 We used to force a register so that loop.c could see it. But
7384 this does not allow gen_* patterns to perform optimizations with
7385 the constants. It also produces two insns in cases like "x = 1.0;".
7386 On most machines, floating-point constants are not permitted in
7387 many insns, so we'd end up copying it to a register in any case.
7389 Now, we do the copying in expand_binop, if appropriate. */
7390 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
7391 TYPE_MODE (TREE_TYPE (exp)));
7394 return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
7395 TYPE_MODE (TREE_TYPE (exp)));
7398 /* Handle evaluating a complex constant in a CONCAT target. */
7399 if (original_target && GET_CODE (original_target) == CONCAT)
7401 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7404 rtarg = XEXP (original_target, 0);
7405 itarg = XEXP (original_target, 1);
7407 /* Move the real and imaginary parts separately. */
7408 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
7409 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
7412 emit_move_insn (rtarg, op0);
7414 emit_move_insn (itarg, op1);
7416 return original_target;
7419 /* ... fall through ... */
7422 temp = expand_expr_constant (exp, 1, modifier);
7424 /* temp contains a constant address.
7425 On RISC machines where a constant address isn't valid,
7426 make some insns to get that address into a register. */
7427 if (modifier != EXPAND_CONST_ADDRESS
7428 && modifier != EXPAND_INITIALIZER
7429 && modifier != EXPAND_SUM
7430 && ! memory_address_p (mode, XEXP (temp, 0)))
7431 return replace_equiv_address (temp,
7432 copy_rtx (XEXP (temp, 0)));
7437 tree val = TREE_OPERAND (exp, 0);
7438 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
7440 if (!SAVE_EXPR_RESOLVED_P (exp))
7442 /* We can indeed still hit this case, typically via builtin
7443 expanders calling save_expr immediately before expanding
7444 something. Assume this means that we only have to deal
7445 with non-BLKmode values. */
7446 gcc_assert (GET_MODE (ret) != BLKmode);
7448 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
7449 DECL_ARTIFICIAL (val) = 1;
7450 DECL_IGNORED_P (val) = 1;
7451 TREE_OPERAND (exp, 0) = val;
7452 SAVE_EXPR_RESOLVED_P (exp) = 1;
7454 if (!CONSTANT_P (ret))
7455 ret = copy_to_reg (ret);
7456 SET_DECL_RTL (val, ret);
7463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7464 expand_goto (TREE_OPERAND (exp, 0));
7466 expand_computed_goto (TREE_OPERAND (exp, 0));
7470 /* If we don't need the result, just ensure we evaluate any
7474 unsigned HOST_WIDE_INT idx;
7477 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
7478 expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
7483 return expand_constructor (exp, target, modifier, false);
7485 case MISALIGNED_INDIRECT_REF:
7486 case ALIGN_INDIRECT_REF:
7489 tree exp1 = TREE_OPERAND (exp, 0);
7491 if (modifier != EXPAND_WRITE)
7495 t = fold_read_from_constant_string (exp);
7497 return expand_expr (t, target, tmode, modifier);
7500 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7501 op0 = memory_address (mode, op0);
7503 if (code == ALIGN_INDIRECT_REF)
7505 int align = TYPE_ALIGN_UNIT (type);
7506 op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align));
7507 op0 = memory_address (mode, op0);
7510 temp = gen_rtx_MEM (mode, op0);
7512 set_mem_attributes (temp, exp, 0);
7514 /* Resolve the misalignment now, so that we don't have to remember
7515 to resolve it later. Of course, this only works for reads. */
7516 /* ??? When we get around to supporting writes, we'll have to handle
7517 this in store_expr directly. The vectorizer isn't generating
7518 those yet, however. */
7519 if (code == MISALIGNED_INDIRECT_REF)
7524 gcc_assert (modifier == EXPAND_NORMAL
7525 || modifier == EXPAND_STACK_PARM);
7527 /* The vectorizer should have already checked the mode. */
7528 icode = optab_handler (movmisalign_optab, mode)->insn_code;
7529 gcc_assert (icode != CODE_FOR_nothing);
7531 /* We've already validated the memory, and we're creating a
7532 new pseudo destination. The predicates really can't fail. */
7533 reg = gen_reg_rtx (mode);
7535 /* Nor can the insn generator. */
7536 insn = GEN_FCN (icode) (reg, temp);
7545 case TARGET_MEM_REF:
7547 struct mem_address addr;
7549 get_address_description (exp, &addr);
7550 op0 = addr_for_mem_ref (&addr, true);
7551 op0 = memory_address (mode, op0);
7552 temp = gen_rtx_MEM (mode, op0);
7553 set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
7560 tree array = TREE_OPERAND (exp, 0);
7561 tree index = TREE_OPERAND (exp, 1);
7563 /* Fold an expression like: "foo"[2].
7564 This is not done in fold so it won't happen inside &.
7565 Don't fold if this is for wide characters since it's too
7566 difficult to do correctly and this is a very rare case. */
7568 if (modifier != EXPAND_CONST_ADDRESS
7569 && modifier != EXPAND_INITIALIZER
7570 && modifier != EXPAND_MEMORY)
7572 tree t = fold_read_from_constant_string (exp);
7575 return expand_expr (t, target, tmode, modifier);
7578 /* If this is a constant index into a constant array,
7579 just get the value from the array. Handle both the cases when
7580 we have an explicit constructor and when our operand is a variable
7581 that was declared const. */
7583 if (modifier != EXPAND_CONST_ADDRESS
7584 && modifier != EXPAND_INITIALIZER
7585 && modifier != EXPAND_MEMORY
7586 && TREE_CODE (array) == CONSTRUCTOR
7587 && ! TREE_SIDE_EFFECTS (array)
7588 && TREE_CODE (index) == INTEGER_CST)
7590 unsigned HOST_WIDE_INT ix;
7593 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
7595 if (tree_int_cst_equal (field, index))
7597 if (!TREE_SIDE_EFFECTS (value))
7598 return expand_expr (fold (value), target, tmode, modifier);
7603 else if (optimize >= 1
7604 && modifier != EXPAND_CONST_ADDRESS
7605 && modifier != EXPAND_INITIALIZER
7606 && modifier != EXPAND_MEMORY
7607 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7608 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7609 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7610 && targetm.binds_local_p (array))
7612 if (TREE_CODE (index) == INTEGER_CST)
7614 tree init = DECL_INITIAL (array);
7616 if (TREE_CODE (init) == CONSTRUCTOR)
7618 unsigned HOST_WIDE_INT ix;
7621 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
7623 if (tree_int_cst_equal (field, index))
7625 if (TREE_SIDE_EFFECTS (value))
7628 if (TREE_CODE (value) == CONSTRUCTOR)
7630 /* If VALUE is a CONSTRUCTOR, this
7631 optimization is only useful if
7632 this doesn't store the CONSTRUCTOR
7633 into memory. If it does, it is more
7634 efficient to just load the data from
7635 the array directly. */
7636 rtx ret = expand_constructor (value, target,
7638 if (ret == NULL_RTX)
7642 return expand_expr (fold (value), target, tmode,
7646 else if(TREE_CODE (init) == STRING_CST)
7648 tree index1 = index;
7649 tree low_bound = array_ref_low_bound (exp);
7650 index1 = fold_convert (sizetype, TREE_OPERAND (exp, 1));
7652 /* Optimize the special-case of a zero lower bound.
7654 We convert the low_bound to sizetype to avoid some problems
7655 with constant folding. (E.g. suppose the lower bound is 1,
7656 and its mode is QI. Without the conversion,l (ARRAY
7657 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7658 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
7660 if (! integer_zerop (low_bound))
7661 index1 = size_diffop (index1, fold_convert (sizetype,
7664 if (0 > compare_tree_int (index1,
7665 TREE_STRING_LENGTH (init)))
7667 tree type = TREE_TYPE (TREE_TYPE (init));
7668 enum machine_mode mode = TYPE_MODE (type);
7670 if (GET_MODE_CLASS (mode) == MODE_INT
7671 && GET_MODE_SIZE (mode) == 1)
7672 return gen_int_mode (TREE_STRING_POINTER (init)
7673 [TREE_INT_CST_LOW (index1)],
7680 goto normal_inner_ref;
7683 /* If the operand is a CONSTRUCTOR, we can just extract the
7684 appropriate field if it is present. */
7685 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7687 unsigned HOST_WIDE_INT idx;
7690 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7692 if (field == TREE_OPERAND (exp, 1)
7693 /* We can normally use the value of the field in the
7694 CONSTRUCTOR. However, if this is a bitfield in
7695 an integral mode that we can fit in a HOST_WIDE_INT,
7696 we must mask only the number of bits in the bitfield,
7697 since this is done implicitly by the constructor. If
7698 the bitfield does not meet either of those conditions,
7699 we can't do this optimization. */
7700 && (! DECL_BIT_FIELD (field)
7701 || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
7702 && (GET_MODE_BITSIZE (DECL_MODE (field))
7703 <= HOST_BITS_PER_WIDE_INT))))
7705 if (DECL_BIT_FIELD (field)
7706 && modifier == EXPAND_STACK_PARM)
7708 op0 = expand_expr (value, target, tmode, modifier);
7709 if (DECL_BIT_FIELD (field))
7711 HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
7712 enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
7714 if (TYPE_UNSIGNED (TREE_TYPE (field)))
7716 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7717 op0 = expand_and (imode, op0, op1, target);
7722 = build_int_cst (NULL_TREE,
7723 GET_MODE_BITSIZE (imode) - bitsize);
7725 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7727 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7735 goto normal_inner_ref;
7738 case ARRAY_RANGE_REF:
7741 enum machine_mode mode1;
7742 HOST_WIDE_INT bitsize, bitpos;
7745 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7746 &mode1, &unsignedp, &volatilep, true);
7749 /* If we got back the original object, something is wrong. Perhaps
7750 we are evaluating an expression too early. In any event, don't
7751 infinitely recurse. */
7752 gcc_assert (tem != exp);
7754 /* If TEM's type is a union of variable size, pass TARGET to the inner
7755 computation, since it will need a temporary and TARGET is known
7756 to have to do. This occurs in unchecked conversion in Ada. */
7760 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7761 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7763 && modifier != EXPAND_STACK_PARM
7764 ? target : NULL_RTX),
7766 (modifier == EXPAND_INITIALIZER
7767 || modifier == EXPAND_CONST_ADDRESS
7768 || modifier == EXPAND_STACK_PARM)
7769 ? modifier : EXPAND_NORMAL);
7771 /* If this is a constant, put it into a register if it is a legitimate
7772 constant, OFFSET is 0, and we won't try to extract outside the
7773 register (in case we were passed a partially uninitialized object
7774 or a view_conversion to a larger size) or a BLKmode piece of it
7775 (e.g. if it is unchecked-converted to a record type in Ada). Force
7776 the constant to memory otherwise. */
7777 if (CONSTANT_P (op0))
7779 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7780 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7783 && bitpos + bitsize <= GET_MODE_BITSIZE (mode))
7784 op0 = force_reg (mode, op0);
7786 op0 = validize_mem (force_const_mem (mode, op0));
7789 /* Otherwise, if this object not in memory and we either have an
7790 offset, a BLKmode result, or a reference outside the object, put it
7791 there. Such cases can occur in Ada if we have unchecked conversion
7792 of an expression from a scalar type to an array or record type or
7793 for an ARRAY_RANGE_REF whose type is BLKmode. */
7794 else if (!MEM_P (op0)
7797 || (bitpos + bitsize
7798 > GET_MODE_BITSIZE (GET_MODE (op0)))))
7800 tree nt = build_qualified_type (TREE_TYPE (tem),
7801 (TYPE_QUALS (TREE_TYPE (tem))
7802 | TYPE_QUAL_CONST));
7803 rtx memloc = assign_temp (nt, 1, 1, 1);
7805 emit_move_insn (memloc, op0);
7811 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7814 gcc_assert (MEM_P (op0));
7816 #ifdef POINTERS_EXTEND_UNSIGNED
7817 if (GET_MODE (offset_rtx) != Pmode)
7818 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7820 if (GET_MODE (offset_rtx) != ptr_mode)
7821 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7824 if (GET_MODE (op0) == BLKmode
7825 /* A constant address in OP0 can have VOIDmode, we must
7826 not try to call force_reg in that case. */
7827 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7829 && (bitpos % bitsize) == 0
7830 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7831 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7833 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7837 op0 = offset_address (op0, offset_rtx,
7838 highest_pow2_factor (offset));
7841 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7842 record its alignment as BIGGEST_ALIGNMENT. */
7843 if (MEM_P (op0) && bitpos == 0 && offset != 0
7844 && is_aligning_offset (offset, tem))
7845 set_mem_align (op0, BIGGEST_ALIGNMENT);
7847 /* Don't forget about volatility even if this is a bitfield. */
7848 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7850 if (op0 == orig_op0)
7851 op0 = copy_rtx (op0);
7853 MEM_VOLATILE_P (op0) = 1;
7856 /* The following code doesn't handle CONCAT.
7857 Assume only bitpos == 0 can be used for CONCAT, due to
7858 one element arrays having the same mode as its element. */
7859 if (GET_CODE (op0) == CONCAT)
7861 gcc_assert (bitpos == 0
7862 && bitsize == GET_MODE_BITSIZE (GET_MODE (op0)));
7866 /* In cases where an aligned union has an unaligned object
7867 as a field, we might be extracting a BLKmode value from
7868 an integer-mode (e.g., SImode) object. Handle this case
7869 by doing the extract into an object as wide as the field
7870 (which we know to be the width of a basic mode), then
7871 storing into memory, and changing the mode to BLKmode. */
7872 if (mode1 == VOIDmode
7873 || REG_P (op0) || GET_CODE (op0) == SUBREG
7874 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7875 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7876 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7877 && modifier != EXPAND_CONST_ADDRESS
7878 && modifier != EXPAND_INITIALIZER)
7879 /* If the field isn't aligned enough to fetch as a memref,
7880 fetch it as a bit field. */
7881 || (mode1 != BLKmode
7882 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7883 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7885 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7886 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7887 && ((modifier == EXPAND_CONST_ADDRESS
7888 || modifier == EXPAND_INITIALIZER)
7890 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7891 || (bitpos % BITS_PER_UNIT != 0)))
7892 /* If the type and the field are a constant size and the
7893 size of the type isn't the same size as the bitfield,
7894 we must use bitfield operations. */
7896 && TYPE_SIZE (TREE_TYPE (exp))
7897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
7898 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7901 enum machine_mode ext_mode = mode;
7903 if (ext_mode == BLKmode
7904 && ! (target != 0 && MEM_P (op0)
7906 && bitpos % BITS_PER_UNIT == 0))
7907 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7909 if (ext_mode == BLKmode)
7912 target = assign_temp (type, 0, 1, 1);
7917 /* In this case, BITPOS must start at a byte boundary and
7918 TARGET, if specified, must be a MEM. */
7919 gcc_assert (MEM_P (op0)
7920 && (!target || MEM_P (target))
7921 && !(bitpos % BITS_PER_UNIT));
7923 emit_block_move (target,
7924 adjust_address (op0, VOIDmode,
7925 bitpos / BITS_PER_UNIT),
7926 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7928 (modifier == EXPAND_STACK_PARM
7929 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7934 op0 = validize_mem (op0);
7936 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7937 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7939 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7940 (modifier == EXPAND_STACK_PARM
7941 ? NULL_RTX : target),
7942 ext_mode, ext_mode);
7944 /* If the result is a record type and BITSIZE is narrower than
7945 the mode of OP0, an integral mode, and this is a big endian
7946 machine, we must put the field into the high-order bits. */
7947 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7948 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7949 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7950 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7951 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7955 /* If the result type is BLKmode, store the data into a temporary
7956 of the appropriate type, but with the mode corresponding to the
7957 mode for the data we have (op0's mode). It's tempting to make
7958 this a constant type, since we know it's only being stored once,
7959 but that can cause problems if we are taking the address of this
7960 COMPONENT_REF because the MEM of any reference via that address
7961 will have flags corresponding to the type, which will not
7962 necessarily be constant. */
7963 if (mode == BLKmode)
7965 HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
7968 /* If the reference doesn't use the alias set of its type,
7969 we cannot create the temporary using that type. */
7970 if (component_uses_parent_alias_set (exp))
7972 new_rtx = assign_stack_local (ext_mode, size, 0);
7973 set_mem_alias_set (new_rtx, get_alias_set (exp));
7976 new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
7978 emit_move_insn (new_rtx, op0);
7979 op0 = copy_rtx (new_rtx);
7980 PUT_MODE (op0, BLKmode);
7981 set_mem_attributes (op0, exp, 1);
7987 /* If the result is BLKmode, use that to access the object
7989 if (mode == BLKmode)
7992 /* Get a reference to just this component. */
7993 if (modifier == EXPAND_CONST_ADDRESS
7994 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7995 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7997 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7999 if (op0 == orig_op0)
8000 op0 = copy_rtx (op0);
8002 set_mem_attributes (op0, exp, 0);
8003 if (REG_P (XEXP (op0, 0)))
8004 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8006 MEM_VOLATILE_P (op0) |= volatilep;
8007 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
8008 || modifier == EXPAND_CONST_ADDRESS
8009 || modifier == EXPAND_INITIALIZER)
8011 else if (target == 0)
8012 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8014 convert_move (target, op0, unsignedp);
8019 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
8022 /* All valid uses of __builtin_va_arg_pack () are removed during
8024 if (CALL_EXPR_VA_ARG_PACK (exp))
8025 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
8027 tree fndecl = get_callee_fndecl (exp), attr;
8030 && (attr = lookup_attribute ("error",
8031 DECL_ATTRIBUTES (fndecl))) != NULL)
8032 error ("%Kcall to %qs declared with attribute error: %s",
8033 exp, lang_hooks.decl_printable_name (fndecl, 1),
8034 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8036 && (attr = lookup_attribute ("warning",
8037 DECL_ATTRIBUTES (fndecl))) != NULL)
8038 warning (0, "%Kcall to %qs declared with attribute warning: %s",
8039 exp, lang_hooks.decl_printable_name (fndecl, 1),
8040 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
8042 /* Check for a built-in function. */
8043 if (fndecl && DECL_BUILT_IN (fndecl))
8045 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_FRONTEND)
8046 return lang_hooks.expand_expr (exp, original_target,
8047 tmode, modifier, alt_rtl);
8049 return expand_builtin (exp, target, subtarget, tmode, ignore);
8052 return expand_call (exp, target, ignore);
8056 if (TREE_OPERAND (exp, 0) == error_mark_node)
8059 if (TREE_CODE (type) == UNION_TYPE)
8061 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
8063 /* If both input and output are BLKmode, this conversion isn't doing
8064 anything except possibly changing memory attribute. */
8065 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8067 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
8070 result = copy_rtx (result);
8071 set_mem_attributes (result, exp, 0);
8077 if (TYPE_MODE (type) != BLKmode)
8078 target = gen_reg_rtx (TYPE_MODE (type));
8080 target = assign_temp (type, 0, 1, 1);
8084 /* Store data into beginning of memory target. */
8085 store_expr (TREE_OPERAND (exp, 0),
8086 adjust_address (target, TYPE_MODE (valtype), 0),
8087 modifier == EXPAND_STACK_PARM,
8092 gcc_assert (REG_P (target));
8094 /* Store this field into a union of the proper type. */
8095 store_field (target,
8096 MIN ((int_size_in_bytes (TREE_TYPE
8097 (TREE_OPERAND (exp, 0)))
8099 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8100 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
8104 /* Return the entire union. */
8108 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
8113 /* If the signedness of the conversion differs and OP0 is
8114 a promoted SUBREG, clear that indication since we now
8115 have to do the proper extension. */
8116 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
8117 && GET_CODE (op0) == SUBREG)
8118 SUBREG_PROMOTED_VAR_P (op0) = 0;
8120 return REDUCE_BIT_FIELD (op0);
8123 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode,
8124 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8125 if (GET_MODE (op0) == mode)
8128 /* If OP0 is a constant, just convert it into the proper mode. */
8129 else if (CONSTANT_P (op0))
8131 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8132 enum machine_mode inner_mode = TYPE_MODE (inner_type);
8134 if (modifier == EXPAND_INITIALIZER)
8135 op0 = simplify_gen_subreg (mode, op0, inner_mode,
8136 subreg_lowpart_offset (mode,
8139 op0= convert_modes (mode, inner_mode, op0,
8140 TYPE_UNSIGNED (inner_type));
8143 else if (modifier == EXPAND_INITIALIZER)
8144 op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8146 else if (target == 0)
8147 op0 = convert_to_mode (mode, op0,
8148 TYPE_UNSIGNED (TREE_TYPE
8149 (TREE_OPERAND (exp, 0))));
8152 convert_move (target, op0,
8153 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8157 return REDUCE_BIT_FIELD (op0);
8159 case VIEW_CONVERT_EXPR:
8162 /* If we are converting to BLKmode, try to avoid an intermediate
8163 temporary by fetching an inner memory reference. */
8165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
8166 && TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != BLKmode
8167 && handled_component_p (TREE_OPERAND (exp, 0)))
8169 enum machine_mode mode1;
8170 HOST_WIDE_INT bitsize, bitpos;
8175 = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, &bitpos,
8176 &offset, &mode1, &unsignedp, &volatilep,
8180 /* ??? We should work harder and deal with non-zero offsets. */
8182 && (bitpos % BITS_PER_UNIT) == 0
8184 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
8186 /* See the normal_inner_ref case for the rationale. */
8189 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
8190 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
8192 && modifier != EXPAND_STACK_PARM
8193 ? target : NULL_RTX),
8195 (modifier == EXPAND_INITIALIZER
8196 || modifier == EXPAND_CONST_ADDRESS
8197 || modifier == EXPAND_STACK_PARM)
8198 ? modifier : EXPAND_NORMAL);
8200 if (MEM_P (orig_op0))
8204 /* Get a reference to just this component. */
8205 if (modifier == EXPAND_CONST_ADDRESS
8206 || modifier == EXPAND_SUM
8207 || modifier == EXPAND_INITIALIZER)
8208 op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
8210 op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
8212 if (op0 == orig_op0)
8213 op0 = copy_rtx (op0);
8215 set_mem_attributes (op0, TREE_OPERAND (exp, 0), 0);
8216 if (REG_P (XEXP (op0, 0)))
8217 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
8219 MEM_VOLATILE_P (op0) |= volatilep;
8225 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
8227 /* If the input and output modes are both the same, we are done. */
8228 if (mode == GET_MODE (op0))
8230 /* If neither mode is BLKmode, and both modes are the same size
8231 then we can use gen_lowpart. */
8232 else if (mode != BLKmode && GET_MODE (op0) != BLKmode
8233 && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0)))
8235 if (GET_CODE (op0) == SUBREG)
8236 op0 = force_reg (GET_MODE (op0), op0);
8237 op0 = gen_lowpart (mode, op0);
8239 /* If both modes are integral, then we can convert from one to the
8241 else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
8242 op0 = convert_modes (mode, GET_MODE (op0), op0,
8243 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8244 /* As a last resort, spill op0 to memory, and reload it in a
8246 else if (!MEM_P (op0))
8248 /* If the operand is not a MEM, force it into memory. Since we
8249 are going to be changing the mode of the MEM, don't call
8250 force_const_mem for constants because we don't allow pool
8251 constants to change mode. */
8252 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8254 gcc_assert (!TREE_ADDRESSABLE (exp));
8256 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8258 = assign_stack_temp_for_type
8259 (TYPE_MODE (inner_type),
8260 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
8262 emit_move_insn (target, op0);
8266 /* At this point, OP0 is in the correct mode. If the output type is
8267 such that the operand is known to be aligned, indicate that it is.
8268 Otherwise, we need only be concerned about alignment for non-BLKmode
8272 op0 = copy_rtx (op0);
8274 if (TYPE_ALIGN_OK (type))
8275 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8276 else if (STRICT_ALIGNMENT
8278 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
8280 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8281 HOST_WIDE_INT temp_size
8282 = MAX (int_size_in_bytes (inner_type),
8283 (HOST_WIDE_INT) GET_MODE_SIZE (mode));
8285 = assign_stack_temp_for_type (mode, temp_size, 0, type);
8286 rtx new_with_op0_mode
8287 = adjust_address (new_rtx, GET_MODE (op0), 0);
8289 gcc_assert (!TREE_ADDRESSABLE (exp));
8291 if (GET_MODE (op0) == BLKmode)
8292 emit_block_move (new_with_op0_mode, op0,
8293 GEN_INT (GET_MODE_SIZE (mode)),
8294 (modifier == EXPAND_STACK_PARM
8295 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8297 emit_move_insn (new_with_op0_mode, op0);
8302 op0 = adjust_address (op0, mode, 0);
8307 case POINTER_PLUS_EXPR:
8308 /* Even though the sizetype mode and the pointer's mode can be different
8309 expand is able to handle this correctly and get the correct result out
8310 of the PLUS_EXPR code. */
8313 /* Check if this is a case for multiplication and addition. */
8314 if ((TREE_CODE (type) == INTEGER_TYPE
8315 || TREE_CODE (type) == FIXED_POINT_TYPE)
8316 && TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
8318 tree subsubexp0, subsubexp1;
8319 enum tree_code code0, code1, this_code;
8321 subexp0 = TREE_OPERAND (exp, 0);
8322 subsubexp0 = TREE_OPERAND (subexp0, 0);
8323 subsubexp1 = TREE_OPERAND (subexp0, 1);
8324 code0 = TREE_CODE (subsubexp0);
8325 code1 = TREE_CODE (subsubexp1);
8326 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8327 : FIXED_CONVERT_EXPR;
8328 if (code0 == this_code && code1 == this_code
8329 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8330 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8331 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8332 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8333 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8334 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8336 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8337 enum machine_mode innermode = TYPE_MODE (op0type);
8338 bool zextend_p = TYPE_UNSIGNED (op0type);
8339 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8341 this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
8343 this_optab = zextend_p ? usmadd_widen_optab
8344 : ssmadd_widen_optab;
8345 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8346 && (optab_handler (this_optab, mode)->insn_code
8347 != CODE_FOR_nothing))
8349 expand_operands (TREE_OPERAND (subsubexp0, 0),
8350 TREE_OPERAND (subsubexp1, 0),
8351 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8352 op2 = expand_expr (TREE_OPERAND (exp, 1), subtarget,
8353 VOIDmode, EXPAND_NORMAL);
8354 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8357 return REDUCE_BIT_FIELD (temp);
8362 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8363 something else, make sure we add the register to the constant and
8364 then to the other thing. This case can occur during strength
8365 reduction and doing it this way will produce better code if the
8366 frame pointer or argument pointer is eliminated.
8368 fold-const.c will ensure that the constant is always in the inner
8369 PLUS_EXPR, so the only case we need to do anything about is if
8370 sp, ap, or fp is our second argument, in which case we must swap
8371 the innermost first argument and our second argument. */
8373 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8374 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8375 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
8376 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8377 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8378 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8380 tree t = TREE_OPERAND (exp, 1);
8382 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8383 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8386 /* If the result is to be ptr_mode and we are adding an integer to
8387 something, we might be forming a constant. So try to use
8388 plus_constant. If it produces a sum and we can't accept it,
8389 use force_operand. This allows P = &ARR[const] to generate
8390 efficient code on machines where a SYMBOL_REF is not a valid
8393 If this is an EXPAND_SUM call, always return the sum. */
8394 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8395 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8397 if (modifier == EXPAND_STACK_PARM)
8399 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8400 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8401 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8405 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8407 /* Use immed_double_const to ensure that the constant is
8408 truncated according to the mode of OP1, then sign extended
8409 to a HOST_WIDE_INT. Using the constant directly can result
8410 in non-canonical RTL in a 64x32 cross compile. */
8412 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8414 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
8415 op1 = plus_constant (op1, INTVAL (constant_part));
8416 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8417 op1 = force_operand (op1, target);
8418 return REDUCE_BIT_FIELD (op1);
8421 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8422 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8423 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8427 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8428 (modifier == EXPAND_INITIALIZER
8429 ? EXPAND_INITIALIZER : EXPAND_SUM));
8430 if (! CONSTANT_P (op0))
8432 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8433 VOIDmode, modifier);
8434 /* Return a PLUS if modifier says it's OK. */
8435 if (modifier == EXPAND_SUM
8436 || modifier == EXPAND_INITIALIZER)
8437 return simplify_gen_binary (PLUS, mode, op0, op1);
8440 /* Use immed_double_const to ensure that the constant is
8441 truncated according to the mode of OP1, then sign extended
8442 to a HOST_WIDE_INT. Using the constant directly can result
8443 in non-canonical RTL in a 64x32 cross compile. */
8445 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8447 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
8448 op0 = plus_constant (op0, INTVAL (constant_part));
8449 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8450 op0 = force_operand (op0, target);
8451 return REDUCE_BIT_FIELD (op0);
8455 /* No sense saving up arithmetic to be done
8456 if it's all in the wrong mode to form part of an address.
8457 And force_operand won't know whether to sign-extend or
8459 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8460 || mode != ptr_mode)
8462 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8463 subtarget, &op0, &op1, 0);
8464 if (op0 == const0_rtx)
8466 if (op1 == const0_rtx)
8471 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8472 subtarget, &op0, &op1, modifier);
8473 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8476 /* Check if this is a case for multiplication and subtraction. */
8477 if ((TREE_CODE (type) == INTEGER_TYPE
8478 || TREE_CODE (type) == FIXED_POINT_TYPE)
8479 && TREE_CODE (TREE_OPERAND (exp, 1)) == MULT_EXPR)
8481 tree subsubexp0, subsubexp1;
8482 enum tree_code code0, code1, this_code;
8484 subexp1 = TREE_OPERAND (exp, 1);
8485 subsubexp0 = TREE_OPERAND (subexp1, 0);
8486 subsubexp1 = TREE_OPERAND (subexp1, 1);
8487 code0 = TREE_CODE (subsubexp0);
8488 code1 = TREE_CODE (subsubexp1);
8489 this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
8490 : FIXED_CONVERT_EXPR;
8491 if (code0 == this_code && code1 == this_code
8492 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8493 < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
8494 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8495 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subsubexp1, 0))))
8496 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp0, 0)))
8497 == TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subsubexp1, 0)))))
8499 tree op0type = TREE_TYPE (TREE_OPERAND (subsubexp0, 0));
8500 enum machine_mode innermode = TYPE_MODE (op0type);
8501 bool zextend_p = TYPE_UNSIGNED (op0type);
8502 bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
8504 this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
8506 this_optab = zextend_p ? usmsub_widen_optab
8507 : ssmsub_widen_optab;
8508 if (mode == GET_MODE_2XWIDER_MODE (innermode)
8509 && (optab_handler (this_optab, mode)->insn_code
8510 != CODE_FOR_nothing))
8512 expand_operands (TREE_OPERAND (subsubexp0, 0),
8513 TREE_OPERAND (subsubexp1, 0),
8514 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8515 op2 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8516 VOIDmode, EXPAND_NORMAL);
8517 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
8520 return REDUCE_BIT_FIELD (temp);
8525 /* For initializers, we are allowed to return a MINUS of two
8526 symbolic constants. Here we handle all cases when both operands
8528 /* Handle difference of two symbolic constants,
8529 for the sake of an initializer. */
8530 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8531 && really_constant_p (TREE_OPERAND (exp, 0))
8532 && really_constant_p (TREE_OPERAND (exp, 1)))
8534 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8535 NULL_RTX, &op0, &op1, modifier);
8537 /* If the last operand is a CONST_INT, use plus_constant of
8538 the negated constant. Else make the MINUS. */
8539 if (GET_CODE (op1) == CONST_INT)
8540 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
8542 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8545 /* No sense saving up arithmetic to be done
8546 if it's all in the wrong mode to form part of an address.
8547 And force_operand won't know whether to sign-extend or
8549 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8550 || mode != ptr_mode)
8553 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8554 subtarget, &op0, &op1, modifier);
8556 /* Convert A - const to A + (-const). */
8557 if (GET_CODE (op1) == CONST_INT)
8559 op1 = negate_rtx (mode, op1);
8560 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8566 /* If this is a fixed-point operation, then we cannot use the code
8567 below because "expand_mult" doesn't support sat/no-sat fixed-point
8569 if (ALL_FIXED_POINT_MODE_P (mode))
8572 /* If first operand is constant, swap them.
8573 Thus the following special case checks need only
8574 check the second operand. */
8575 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8577 tree t1 = TREE_OPERAND (exp, 0);
8578 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8579 TREE_OPERAND (exp, 1) = t1;
8582 /* Attempt to return something suitable for generating an
8583 indexed address, for machines that support that. */
8585 if (modifier == EXPAND_SUM && mode == ptr_mode
8586 && host_integerp (TREE_OPERAND (exp, 1), 0))
8588 tree exp1 = TREE_OPERAND (exp, 1);
8590 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8594 op0 = force_operand (op0, NULL_RTX);
8596 op0 = copy_to_mode_reg (mode, op0);
8598 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8599 gen_int_mode (tree_low_cst (exp1, 0),
8600 TYPE_MODE (TREE_TYPE (exp1)))));
8603 if (modifier == EXPAND_STACK_PARM)
8606 /* Check for multiplying things that have been extended
8607 from a narrower type. If this machine supports multiplying
8608 in that narrower type with a result in the desired type,
8609 do it that way, and avoid the explicit type-conversion. */
8611 subexp0 = TREE_OPERAND (exp, 0);
8612 subexp1 = TREE_OPERAND (exp, 1);
8613 /* First, check if we have a multiplication of one signed and one
8614 unsigned operand. */
8615 if (TREE_CODE (subexp0) == NOP_EXPR
8616 && TREE_CODE (subexp1) == NOP_EXPR
8617 && TREE_CODE (type) == INTEGER_TYPE
8618 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8619 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8620 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8621 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (subexp1, 0))))
8622 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0)))
8623 != TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp1, 0)))))
8625 enum machine_mode innermode
8626 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (subexp0, 0)));
8627 this_optab = usmul_widen_optab;
8628 if (mode == GET_MODE_WIDER_MODE (innermode))
8630 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8632 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (subexp0, 0))))
8633 expand_operands (TREE_OPERAND (subexp0, 0),
8634 TREE_OPERAND (subexp1, 0),
8635 NULL_RTX, &op0, &op1, 0);
8637 expand_operands (TREE_OPERAND (subexp0, 0),
8638 TREE_OPERAND (subexp1, 0),
8639 NULL_RTX, &op1, &op0, 0);
8645 /* Check for a multiplication with matching signedness. */
8646 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8647 && TREE_CODE (type) == INTEGER_TYPE
8648 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8649 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8650 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8651 && int_fits_type_p (TREE_OPERAND (exp, 1),
8652 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8653 /* Don't use a widening multiply if a shift will do. */
8654 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8655 > HOST_BITS_PER_WIDE_INT)
8656 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8658 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8659 && (TYPE_PRECISION (TREE_TYPE
8660 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8661 == TYPE_PRECISION (TREE_TYPE
8663 (TREE_OPERAND (exp, 0), 0))))
8664 /* If both operands are extended, they must either both
8665 be zero-extended or both be sign-extended. */
8666 && (TYPE_UNSIGNED (TREE_TYPE
8667 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8668 == TYPE_UNSIGNED (TREE_TYPE
8670 (TREE_OPERAND (exp, 0), 0)))))))
8672 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8673 enum machine_mode innermode = TYPE_MODE (op0type);
8674 bool zextend_p = TYPE_UNSIGNED (op0type);
8675 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8676 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8678 if (mode == GET_MODE_2XWIDER_MODE (innermode))
8680 if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
8682 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8683 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8684 TREE_OPERAND (exp, 1),
8685 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8687 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8688 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8689 NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8692 else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
8693 && innermode == word_mode)
8696 op0 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8697 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8698 op1 = convert_modes (innermode, mode,
8699 expand_normal (TREE_OPERAND (exp, 1)),
8702 op1 = expand_normal (TREE_OPERAND (TREE_OPERAND (exp, 1), 0));
8703 temp = expand_binop (mode, other_optab, op0, op1, target,
8704 unsignedp, OPTAB_LIB_WIDEN);
8705 hipart = gen_highpart (innermode, temp);
8706 htem = expand_mult_highpart_adjust (innermode, hipart,
8710 emit_move_insn (hipart, htem);
8711 return REDUCE_BIT_FIELD (temp);
8715 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8716 subtarget, &op0, &op1, 0);
8717 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8719 case TRUNC_DIV_EXPR:
8720 case FLOOR_DIV_EXPR:
8722 case ROUND_DIV_EXPR:
8723 case EXACT_DIV_EXPR:
8724 /* If this is a fixed-point operation, then we cannot use the code
8725 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8727 if (ALL_FIXED_POINT_MODE_P (mode))
8730 if (modifier == EXPAND_STACK_PARM)
8732 /* Possible optimization: compute the dividend with EXPAND_SUM
8733 then if the divisor is constant can optimize the case
8734 where some terms of the dividend have coeffs divisible by it. */
8735 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8736 subtarget, &op0, &op1, 0);
8737 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8742 case TRUNC_MOD_EXPR:
8743 case FLOOR_MOD_EXPR:
8745 case ROUND_MOD_EXPR:
8746 if (modifier == EXPAND_STACK_PARM)
8748 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8749 subtarget, &op0, &op1, 0);
8750 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8752 case FIXED_CONVERT_EXPR:
8753 op0 = expand_normal (TREE_OPERAND (exp, 0));
8754 if (target == 0 || modifier == EXPAND_STACK_PARM)
8755 target = gen_reg_rtx (mode);
8757 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == INTEGER_TYPE
8758 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
8759 || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8760 expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8762 expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8765 case FIX_TRUNC_EXPR:
8766 op0 = expand_normal (TREE_OPERAND (exp, 0));
8767 if (target == 0 || modifier == EXPAND_STACK_PARM)
8768 target = gen_reg_rtx (mode);
8769 expand_fix (target, op0, unsignedp);
8773 op0 = expand_normal (TREE_OPERAND (exp, 0));
8774 if (target == 0 || modifier == EXPAND_STACK_PARM)
8775 target = gen_reg_rtx (mode);
8776 /* expand_float can't figure out what to do if FROM has VOIDmode.
8777 So give it the correct mode. With -O, cse will optimize this. */
8778 if (GET_MODE (op0) == VOIDmode)
8779 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8781 expand_float (target, op0,
8782 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8786 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8787 VOIDmode, EXPAND_NORMAL);
8788 if (modifier == EXPAND_STACK_PARM)
8790 temp = expand_unop (mode,
8791 optab_for_tree_code (NEGATE_EXPR, type,
8795 return REDUCE_BIT_FIELD (temp);
8798 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8799 VOIDmode, EXPAND_NORMAL);
8800 if (modifier == EXPAND_STACK_PARM)
8803 /* ABS_EXPR is not valid for complex arguments. */
8804 gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8805 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8807 /* Unsigned abs is simply the operand. Testing here means we don't
8808 risk generating incorrect code below. */
8809 if (TYPE_UNSIGNED (type))
8812 return expand_abs (mode, op0, target, unsignedp,
8813 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8817 target = original_target;
8819 || modifier == EXPAND_STACK_PARM
8820 || (MEM_P (target) && MEM_VOLATILE_P (target))
8821 || GET_MODE (target) != mode
8823 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8824 target = gen_reg_rtx (mode);
8825 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8826 target, &op0, &op1, 0);
8828 /* First try to do it with a special MIN or MAX instruction.
8829 If that does not win, use a conditional jump to select the proper
8831 this_optab = optab_for_tree_code (code, type, optab_default);
8832 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8837 /* At this point, a MEM target is no longer useful; we will get better
8840 if (! REG_P (target))
8841 target = gen_reg_rtx (mode);
8843 /* If op1 was placed in target, swap op0 and op1. */
8844 if (target != op0 && target == op1)
8851 /* We generate better code and avoid problems with op1 mentioning
8852 target by forcing op1 into a pseudo if it isn't a constant. */
8853 if (! CONSTANT_P (op1))
8854 op1 = force_reg (mode, op1);
8857 enum rtx_code comparison_code;
8860 if (code == MAX_EXPR)
8861 comparison_code = unsignedp ? GEU : GE;
8863 comparison_code = unsignedp ? LEU : LE;
8865 /* Canonicalize to comparisons against 0. */
8866 if (op1 == const1_rtx)
8868 /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8869 or (a != 0 ? a : 1) for unsigned.
8870 For MIN we are safe converting (a <= 1 ? a : 1)
8871 into (a <= 0 ? a : 1) */
8872 cmpop1 = const0_rtx;
8873 if (code == MAX_EXPR)
8874 comparison_code = unsignedp ? NE : GT;
8876 if (op1 == constm1_rtx && !unsignedp)
8878 /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8879 and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8880 cmpop1 = const0_rtx;
8881 if (code == MIN_EXPR)
8882 comparison_code = LT;
8884 #ifdef HAVE_conditional_move
8885 /* Use a conditional move if possible. */
8886 if (can_conditionally_move_p (mode))
8890 /* ??? Same problem as in expmed.c: emit_conditional_move
8891 forces a stack adjustment via compare_from_rtx, and we
8892 lose the stack adjustment if the sequence we are about
8893 to create is discarded. */
8894 do_pending_stack_adjust ();
8898 /* Try to emit the conditional move. */
8899 insn = emit_conditional_move (target, comparison_code,
8904 /* If we could do the conditional move, emit the sequence,
8908 rtx seq = get_insns ();
8914 /* Otherwise discard the sequence and fall back to code with
8920 emit_move_insn (target, op0);
8922 temp = gen_label_rtx ();
8923 do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8924 unsignedp, mode, NULL_RTX, NULL_RTX, temp);
8926 emit_move_insn (target, op1);
8931 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8932 VOIDmode, EXPAND_NORMAL);
8933 if (modifier == EXPAND_STACK_PARM)
8935 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8939 /* ??? Can optimize bitwise operations with one arg constant.
8940 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8941 and (a bitwise1 b) bitwise2 b (etc)
8942 but that is probably not worth while. */
8944 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8945 boolean values when we want in all cases to compute both of them. In
8946 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8947 as actual zero-or-1 values and then bitwise anding. In cases where
8948 there cannot be any side effects, better code would be made by
8949 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8950 how to recognize those cases. */
8952 case TRUTH_AND_EXPR:
8953 code = BIT_AND_EXPR;
8958 code = BIT_IOR_EXPR;
8962 case TRUTH_XOR_EXPR:
8963 code = BIT_XOR_EXPR;
8969 gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8970 || (GET_MODE_PRECISION (TYPE_MODE (type))
8971 == TYPE_PRECISION (type)));
8976 /* If this is a fixed-point operation, then we cannot use the code
8977 below because "expand_shift" doesn't support sat/no-sat fixed-point
8979 if (ALL_FIXED_POINT_MODE_P (mode))
8982 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8984 if (modifier == EXPAND_STACK_PARM)
8986 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget,
8987 VOIDmode, EXPAND_NORMAL);
8988 temp = expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8990 if (code == LSHIFT_EXPR)
8991 temp = REDUCE_BIT_FIELD (temp);
8994 /* Could determine the answer when only additive constants differ. Also,
8995 the addition of one can be handled by changing the condition. */
9002 case UNORDERED_EXPR:
9010 temp = do_store_flag (exp,
9011 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9012 tmode != VOIDmode ? tmode : mode, 0);
9016 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
9017 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
9019 && REG_P (original_target)
9020 && (GET_MODE (original_target)
9021 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9023 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
9024 VOIDmode, EXPAND_NORMAL);
9026 /* If temp is constant, we can just compute the result. */
9027 if (GET_CODE (temp) == CONST_INT)
9029 if (INTVAL (temp) != 0)
9030 emit_move_insn (target, const1_rtx);
9032 emit_move_insn (target, const0_rtx);
9037 if (temp != original_target)
9039 enum machine_mode mode1 = GET_MODE (temp);
9040 if (mode1 == VOIDmode)
9041 mode1 = tmode != VOIDmode ? tmode : mode;
9043 temp = copy_to_mode_reg (mode1, temp);
9046 op1 = gen_label_rtx ();
9047 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
9048 GET_MODE (temp), unsignedp, op1);
9049 emit_move_insn (temp, const1_rtx);
9054 /* If no set-flag instruction, must generate a conditional store
9055 into a temporary variable. Drop through and handle this
9057 /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9058 are occassionally created by folding during expansion. */
9059 case TRUTH_ANDIF_EXPR:
9060 case TRUTH_ORIF_EXPR:
9063 || modifier == EXPAND_STACK_PARM
9064 || ! safe_from_p (target, exp, 1)
9065 /* Make sure we don't have a hard reg (such as function's return
9066 value) live across basic blocks, if not optimizing. */
9067 || (!optimize && REG_P (target)
9068 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9069 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9072 emit_move_insn (target, const0_rtx);
9074 op1 = gen_label_rtx ();
9075 jumpifnot (exp, op1);
9078 emit_move_insn (target, const1_rtx);
9081 return ignore ? const0_rtx : target;
9083 case TRUTH_NOT_EXPR:
9084 if (modifier == EXPAND_STACK_PARM)
9086 op0 = expand_expr (TREE_OPERAND (exp, 0), target,
9087 VOIDmode, EXPAND_NORMAL);
9088 /* The parser is careful to generate TRUTH_NOT_EXPR
9089 only with operands that are always zero or one. */
9090 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
9091 target, 1, OPTAB_LIB_WIDEN);
9095 case STATEMENT_LIST:
9097 tree_stmt_iterator iter;
9099 gcc_assert (ignore);
9101 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9102 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9107 /* A COND_EXPR with its type being VOID_TYPE represents a
9108 conditional jump and is handled in
9109 expand_gimple_cond_expr. */
9110 gcc_assert (!VOID_TYPE_P (TREE_TYPE (exp)));
9112 /* Note that COND_EXPRs whose type is a structure or union
9113 are required to be constructed to contain assignments of
9114 a temporary variable, so that we can evaluate them here
9115 for side effect only. If type is void, we must do likewise. */
9117 gcc_assert (!TREE_ADDRESSABLE (type)
9119 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node
9120 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node);
9122 /* If we are not to produce a result, we have no target. Otherwise,
9123 if a target was specified use it; it will not be used as an
9124 intermediate target unless it is safe. If no target, use a
9127 if (modifier != EXPAND_STACK_PARM
9129 && safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
9130 && GET_MODE (original_target) == mode
9131 #ifdef HAVE_conditional_move
9132 && (! can_conditionally_move_p (mode)
9133 || REG_P (original_target))
9135 && !MEM_P (original_target))
9136 temp = original_target;
9138 temp = assign_temp (type, 0, 0, 1);
9140 do_pending_stack_adjust ();
9142 op0 = gen_label_rtx ();
9143 op1 = gen_label_rtx ();
9144 jumpifnot (TREE_OPERAND (exp, 0), op0);
9145 store_expr (TREE_OPERAND (exp, 1), temp,
9146 modifier == EXPAND_STACK_PARM,
9149 emit_jump_insn (gen_jump (op1));
9152 store_expr (TREE_OPERAND (exp, 2), temp,
9153 modifier == EXPAND_STACK_PARM,
9161 target = expand_vec_cond_expr (exp, target);
9166 tree lhs = TREE_OPERAND (exp, 0);
9167 tree rhs = TREE_OPERAND (exp, 1);
9168 gcc_assert (ignore);
9170 /* Check for |= or &= of a bitfield of size one into another bitfield
9171 of size 1. In this case, (unless we need the result of the
9172 assignment) we can do this more efficiently with a
9173 test followed by an assignment, if necessary.
9175 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9176 things change so we do, this code should be enhanced to
9178 if (TREE_CODE (lhs) == COMPONENT_REF
9179 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9180 || TREE_CODE (rhs) == BIT_AND_EXPR)
9181 && TREE_OPERAND (rhs, 0) == lhs
9182 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9183 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9184 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9186 rtx label = gen_label_rtx ();
9187 int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9188 do_jump (TREE_OPERAND (rhs, 1),
9191 expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9192 MOVE_NONTEMPORAL (exp));
9193 do_pending_stack_adjust ();
9198 expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9203 if (!TREE_OPERAND (exp, 0))
9204 expand_null_return ();
9206 expand_return (TREE_OPERAND (exp, 0));
9210 return expand_expr_addr_expr (exp, target, tmode, modifier);
9213 /* Get the rtx code of the operands. */
9214 op0 = expand_normal (TREE_OPERAND (exp, 0));
9215 op1 = expand_normal (TREE_OPERAND (exp, 1));
9218 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9220 /* Move the real (op0) and imaginary (op1) parts to their location. */
9221 write_complex_part (target, op0, false);
9222 write_complex_part (target, op1, true);
9227 op0 = expand_normal (TREE_OPERAND (exp, 0));
9228 return read_complex_part (op0, false);
9231 op0 = expand_normal (TREE_OPERAND (exp, 0));
9232 return read_complex_part (op0, true);
9235 expand_resx_expr (exp);
9238 case TRY_CATCH_EXPR:
9240 case EH_FILTER_EXPR:
9241 case TRY_FINALLY_EXPR:
9242 /* Lowered by tree-eh.c. */
9245 case WITH_CLEANUP_EXPR:
9246 case CLEANUP_POINT_EXPR:
9248 case CASE_LABEL_EXPR:
9254 case PREINCREMENT_EXPR:
9255 case PREDECREMENT_EXPR:
9256 case POSTINCREMENT_EXPR:
9257 case POSTDECREMENT_EXPR:
9260 /* Lowered by gimplify.c. */
9263 case CHANGE_DYNAMIC_TYPE_EXPR:
9264 /* This is ignored at the RTL level. The tree level set
9265 DECL_POINTER_ALIAS_SET of any variable to be 0, which is
9266 overkill for the RTL layer but is all that we can
9271 return get_exception_pointer ();
9274 return get_exception_filter ();
9277 /* Function descriptors are not valid except for as
9278 initialization constants, and should not be expanded. */
9286 expand_label (TREE_OPERAND (exp, 0));
9290 expand_asm_expr (exp);
9293 case WITH_SIZE_EXPR:
9294 /* WITH_SIZE_EXPR expands to its first argument. The caller should
9295 have pulled out the size to use in whatever context it needed. */
9296 return expand_expr_real (TREE_OPERAND (exp, 0), original_target, tmode,
9299 case REALIGN_LOAD_EXPR:
9301 tree oprnd0 = TREE_OPERAND (exp, 0);
9302 tree oprnd1 = TREE_OPERAND (exp, 1);
9303 tree oprnd2 = TREE_OPERAND (exp, 2);
9306 this_optab = optab_for_tree_code (code, type, optab_default);
9307 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9308 op2 = expand_normal (oprnd2);
9309 temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9317 tree oprnd0 = TREE_OPERAND (exp, 0);
9318 tree oprnd1 = TREE_OPERAND (exp, 1);
9319 tree oprnd2 = TREE_OPERAND (exp, 2);
9322 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9323 op2 = expand_normal (oprnd2);
9324 target = expand_widen_pattern_expr (exp, op0, op1, op2,
9329 case WIDEN_SUM_EXPR:
9331 tree oprnd0 = TREE_OPERAND (exp, 0);
9332 tree oprnd1 = TREE_OPERAND (exp, 1);
9334 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9335 target = expand_widen_pattern_expr (exp, op0, NULL_RTX, op1,
9340 case REDUC_MAX_EXPR:
9341 case REDUC_MIN_EXPR:
9342 case REDUC_PLUS_EXPR:
9344 op0 = expand_normal (TREE_OPERAND (exp, 0));
9345 this_optab = optab_for_tree_code (code, type, optab_default);
9346 temp = expand_unop (mode, this_optab, op0, target, unsignedp);
9351 case VEC_EXTRACT_EVEN_EXPR:
9352 case VEC_EXTRACT_ODD_EXPR:
9354 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9355 NULL_RTX, &op0, &op1, 0);
9356 this_optab = optab_for_tree_code (code, type, optab_default);
9357 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9363 case VEC_INTERLEAVE_HIGH_EXPR:
9364 case VEC_INTERLEAVE_LOW_EXPR:
9366 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9367 NULL_RTX, &op0, &op1, 0);
9368 this_optab = optab_for_tree_code (code, type, optab_default);
9369 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9375 case VEC_LSHIFT_EXPR:
9376 case VEC_RSHIFT_EXPR:
9378 target = expand_vec_shift_expr (exp, target);
9382 case VEC_UNPACK_HI_EXPR:
9383 case VEC_UNPACK_LO_EXPR:
9385 op0 = expand_normal (TREE_OPERAND (exp, 0));
9386 this_optab = optab_for_tree_code (code, type, optab_default);
9387 temp = expand_widen_pattern_expr (exp, op0, NULL_RTX, NULL_RTX,
9393 case VEC_UNPACK_FLOAT_HI_EXPR:
9394 case VEC_UNPACK_FLOAT_LO_EXPR:
9396 op0 = expand_normal (TREE_OPERAND (exp, 0));
9397 /* The signedness is determined from input operand. */
9398 this_optab = optab_for_tree_code (code,
9399 TREE_TYPE (TREE_OPERAND (exp, 0)),
9401 temp = expand_widen_pattern_expr
9402 (exp, op0, NULL_RTX, NULL_RTX,
9403 target, TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
9409 case VEC_WIDEN_MULT_HI_EXPR:
9410 case VEC_WIDEN_MULT_LO_EXPR:
9412 tree oprnd0 = TREE_OPERAND (exp, 0);
9413 tree oprnd1 = TREE_OPERAND (exp, 1);
9415 expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, 0);
9416 target = expand_widen_pattern_expr (exp, op0, op1, NULL_RTX,
9418 gcc_assert (target);
9422 case VEC_PACK_TRUNC_EXPR:
9423 case VEC_PACK_SAT_EXPR:
9424 case VEC_PACK_FIX_TRUNC_EXPR:
9425 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9429 return lang_hooks.expand_expr (exp, original_target, tmode,
9433 /* Here to do an ordinary binary operator. */
9435 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9436 subtarget, &op0, &op1, 0);
9438 this_optab = optab_for_tree_code (code, type, optab_default);
9440 if (modifier == EXPAND_STACK_PARM)
9442 temp = expand_binop (mode, this_optab, op0, op1, target,
9443 unsignedp, OPTAB_LIB_WIDEN);
9445 return REDUCE_BIT_FIELD (temp);
9447 #undef REDUCE_BIT_FIELD
9449 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9450 signedness of TYPE), possibly returning the result in TARGET. */
9452 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9454 HOST_WIDE_INT prec = TYPE_PRECISION (type);
9455 if (target && GET_MODE (target) != GET_MODE (exp))
9457 /* For constant values, reduce using build_int_cst_type. */
9458 if (GET_CODE (exp) == CONST_INT)
9460 HOST_WIDE_INT value = INTVAL (exp);
9461 tree t = build_int_cst_type (type, value);
9462 return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9464 else if (TYPE_UNSIGNED (type))
9467 if (prec < HOST_BITS_PER_WIDE_INT)
9468 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9471 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9472 ((unsigned HOST_WIDE_INT) 1
9473 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9475 return expand_and (GET_MODE (exp), exp, mask, target);
9479 tree count = build_int_cst (NULL_TREE,
9480 GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9481 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9482 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9486 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9487 when applied to the address of EXP produces an address known to be
9488 aligned more than BIGGEST_ALIGNMENT. */
9491 is_aligning_offset (const_tree offset, const_tree exp)
9493 /* Strip off any conversions. */
9494 while (CONVERT_EXPR_P (offset))
9495 offset = TREE_OPERAND (offset, 0);
9497 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9498 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9499 if (TREE_CODE (offset) != BIT_AND_EXPR
9500 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9501 || compare_tree_int (TREE_OPERAND (offset, 1),
9502 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9503 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9506 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9507 It must be NEGATE_EXPR. Then strip any more conversions. */
9508 offset = TREE_OPERAND (offset, 0);
9509 while (CONVERT_EXPR_P (offset))
9510 offset = TREE_OPERAND (offset, 0);
9512 if (TREE_CODE (offset) != NEGATE_EXPR)
9515 offset = TREE_OPERAND (offset, 0);
9516 while (CONVERT_EXPR_P (offset))
9517 offset = TREE_OPERAND (offset, 0);
9519 /* This must now be the address of EXP. */
9520 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9523 /* Return the tree node if an ARG corresponds to a string constant or zero
9524 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9525 in bytes within the string that ARG is accessing. The type of the
9526 offset will be `sizetype'. */
9529 string_constant (tree arg, tree *ptr_offset)
9531 tree array, offset, lower_bound;
9534 if (TREE_CODE (arg) == ADDR_EXPR)
9536 if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9538 *ptr_offset = size_zero_node;
9539 return TREE_OPERAND (arg, 0);
9541 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9543 array = TREE_OPERAND (arg, 0);
9544 offset = size_zero_node;
9546 else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9548 array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9549 offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9550 if (TREE_CODE (array) != STRING_CST
9551 && TREE_CODE (array) != VAR_DECL)
9554 /* Check if the array has a nonzero lower bound. */
9555 lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9556 if (!integer_zerop (lower_bound))
9558 /* If the offset and base aren't both constants, return 0. */
9559 if (TREE_CODE (lower_bound) != INTEGER_CST)
9561 if (TREE_CODE (offset) != INTEGER_CST)
9563 /* Adjust offset by the lower bound. */
9564 offset = size_diffop (fold_convert (sizetype, offset),
9565 fold_convert (sizetype, lower_bound));
9571 else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9573 tree arg0 = TREE_OPERAND (arg, 0);
9574 tree arg1 = TREE_OPERAND (arg, 1);
9579 if (TREE_CODE (arg0) == ADDR_EXPR
9580 && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9581 || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9583 array = TREE_OPERAND (arg0, 0);
9586 else if (TREE_CODE (arg1) == ADDR_EXPR
9587 && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9588 || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9590 array = TREE_OPERAND (arg1, 0);
9599 if (TREE_CODE (array) == STRING_CST)
9601 *ptr_offset = fold_convert (sizetype, offset);
9604 else if (TREE_CODE (array) == VAR_DECL)
9608 /* Variables initialized to string literals can be handled too. */
9609 if (DECL_INITIAL (array) == NULL_TREE
9610 || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9613 /* If they are read-only, non-volatile and bind locally. */
9614 if (! TREE_READONLY (array)
9615 || TREE_SIDE_EFFECTS (array)
9616 || ! targetm.binds_local_p (array))
9619 /* Avoid const char foo[4] = "abcde"; */
9620 if (DECL_SIZE_UNIT (array) == NULL_TREE
9621 || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9622 || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9623 || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9626 /* If variable is bigger than the string literal, OFFSET must be constant
9627 and inside of the bounds of the string literal. */
9628 offset = fold_convert (sizetype, offset);
9629 if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
9630 && (! host_integerp (offset, 1)
9631 || compare_tree_int (offset, length) >= 0))
9634 *ptr_offset = offset;
9635 return DECL_INITIAL (array);
9641 /* Generate code to calculate EXP using a store-flag instruction
9642 and return an rtx for the result. EXP is either a comparison
9643 or a TRUTH_NOT_EXPR whose operand is a comparison.
9645 If TARGET is nonzero, store the result there if convenient.
9647 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9650 Return zero if there is no suitable set-flag instruction
9651 available on this machine.
9653 Once expand_expr has been called on the arguments of the comparison,
9654 we are committed to doing the store flag, since it is not safe to
9655 re-evaluate the expression. We emit the store-flag insn by calling
9656 emit_store_flag, but only expand the arguments if we have a reason
9657 to believe that emit_store_flag will be successful. If we think that
9658 it will, but it isn't, we have to simulate the store-flag with a
9659 set/jump/set sequence. */
9662 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9665 tree arg0, arg1, type;
9667 enum machine_mode operand_mode;
9671 enum insn_code icode;
9672 rtx subtarget = target;
9675 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9676 result at the end. We can't simply invert the test since it would
9677 have already been inverted if it were valid. This case occurs for
9678 some floating-point comparisons. */
9680 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9681 invert = 1, exp = TREE_OPERAND (exp, 0);
9683 arg0 = TREE_OPERAND (exp, 0);
9684 arg1 = TREE_OPERAND (exp, 1);
9686 /* Don't crash if the comparison was erroneous. */
9687 if (arg0 == error_mark_node || arg1 == error_mark_node)
9690 type = TREE_TYPE (arg0);
9691 operand_mode = TYPE_MODE (type);
9692 unsignedp = TYPE_UNSIGNED (type);
9694 /* We won't bother with BLKmode store-flag operations because it would mean
9695 passing a lot of information to emit_store_flag. */
9696 if (operand_mode == BLKmode)
9699 /* We won't bother with store-flag operations involving function pointers
9700 when function pointers must be canonicalized before comparisons. */
9701 #ifdef HAVE_canonicalize_funcptr_for_compare
9702 if (HAVE_canonicalize_funcptr_for_compare
9703 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9704 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9706 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9707 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9708 == FUNCTION_TYPE))))
9715 /* Get the rtx comparison code to use. We know that EXP is a comparison
9716 operation of some type. Some comparisons against 1 and -1 can be
9717 converted to comparisons with zero. Do so here so that the tests
9718 below will be aware that we have a comparison with zero. These
9719 tests will not catch constants in the first operand, but constants
9720 are rarely passed as the first operand. */
9722 switch (TREE_CODE (exp))
9731 if (integer_onep (arg1))
9732 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9734 code = unsignedp ? LTU : LT;
9737 if (! unsignedp && integer_all_onesp (arg1))
9738 arg1 = integer_zero_node, code = LT;
9740 code = unsignedp ? LEU : LE;
9743 if (! unsignedp && integer_all_onesp (arg1))
9744 arg1 = integer_zero_node, code = GE;
9746 code = unsignedp ? GTU : GT;
9749 if (integer_onep (arg1))
9750 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9752 code = unsignedp ? GEU : GE;
9755 case UNORDERED_EXPR:
9784 /* Put a constant second. */
9785 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
9786 || TREE_CODE (arg0) == FIXED_CST)
9788 tem = arg0; arg0 = arg1; arg1 = tem;
9789 code = swap_condition (code);
9792 /* If this is an equality or inequality test of a single bit, we can
9793 do this by shifting the bit being tested to the low-order bit and
9794 masking the result with the constant 1. If the condition was EQ,
9795 we xor it with 1. This does not require an scc insn and is faster
9796 than an scc insn even if we have it.
9798 The code to make this transformation was moved into fold_single_bit_test,
9799 so we just call into the folder and expand its result. */
9801 if ((code == NE || code == EQ)
9802 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9803 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9805 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9806 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9808 target, VOIDmode, EXPAND_NORMAL);
9811 /* Now see if we are likely to be able to do this. Return if not. */
9812 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9815 icode = setcc_gen_code[(int) code];
9817 if (icode == CODE_FOR_nothing)
9819 enum machine_mode wmode;
9821 for (wmode = operand_mode;
9822 icode == CODE_FOR_nothing && wmode != VOIDmode;
9823 wmode = GET_MODE_WIDER_MODE (wmode))
9824 icode = optab_handler (cstore_optab, wmode)->insn_code;
9827 if (icode == CODE_FOR_nothing
9828 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9830 /* We can only do this if it is one of the special cases that
9831 can be handled without an scc insn. */
9832 if ((code == LT && integer_zerop (arg1))
9833 || (! only_cheap && code == GE && integer_zerop (arg1)))
9835 else if (! only_cheap && (code == NE || code == EQ)
9836 && TREE_CODE (type) != REAL_TYPE
9837 && ((optab_handler (abs_optab, operand_mode)->insn_code
9838 != CODE_FOR_nothing)
9839 || (optab_handler (ffs_optab, operand_mode)->insn_code
9840 != CODE_FOR_nothing)))
9846 if (! get_subtarget (target)
9847 || GET_MODE (subtarget) != operand_mode)
9850 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9853 target = gen_reg_rtx (mode);
9855 result = emit_store_flag (target, code, op0, op1,
9856 operand_mode, unsignedp, 1);
9861 result = expand_binop (mode, xor_optab, result, const1_rtx,
9862 result, 0, OPTAB_LIB_WIDEN);
9866 /* If this failed, we have to do this with set/compare/jump/set code. */
9868 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9869 target = gen_reg_rtx (GET_MODE (target));
9871 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9872 label = gen_label_rtx ();
9873 do_compare_rtx_and_jump (op0, op1, code, unsignedp, operand_mode, NULL_RTX,
9876 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9883 /* Stubs in case we haven't got a casesi insn. */
9885 # define HAVE_casesi 0
9886 # define gen_casesi(a, b, c, d, e) (0)
9887 # define CODE_FOR_casesi CODE_FOR_nothing
9890 /* If the machine does not have a case insn that compares the bounds,
9891 this means extra overhead for dispatch tables, which raises the
9892 threshold for using them. */
9893 #ifndef CASE_VALUES_THRESHOLD
9894 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9895 #endif /* CASE_VALUES_THRESHOLD */
9898 case_values_threshold (void)
9900 return CASE_VALUES_THRESHOLD;
9903 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9904 0 otherwise (i.e. if there is no casesi instruction). */
9906 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9907 rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
9908 rtx fallback_label ATTRIBUTE_UNUSED)
9910 enum machine_mode index_mode = SImode;
9911 int index_bits = GET_MODE_BITSIZE (index_mode);
9912 rtx op1, op2, index;
9913 enum machine_mode op_mode;
9918 /* Convert the index to SImode. */
9919 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9921 enum machine_mode omode = TYPE_MODE (index_type);
9922 rtx rangertx = expand_normal (range);
9924 /* We must handle the endpoints in the original mode. */
9925 index_expr = build2 (MINUS_EXPR, index_type,
9926 index_expr, minval);
9927 minval = integer_zero_node;
9928 index = expand_normal (index_expr);
9930 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9931 omode, 1, default_label);
9932 /* Now we can safely truncate. */
9933 index = convert_to_mode (index_mode, index, 0);
9937 if (TYPE_MODE (index_type) != index_mode)
9939 index_type = lang_hooks.types.type_for_size (index_bits, 0);
9940 index_expr = fold_convert (index_type, index_expr);
9943 index = expand_normal (index_expr);
9946 do_pending_stack_adjust ();
9948 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9949 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9951 index = copy_to_mode_reg (op_mode, index);
9953 op1 = expand_normal (minval);
9955 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9956 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9957 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9958 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9960 op1 = copy_to_mode_reg (op_mode, op1);
9962 op2 = expand_normal (range);
9964 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9965 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9966 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9967 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9969 op2 = copy_to_mode_reg (op_mode, op2);
9971 emit_jump_insn (gen_casesi (index, op1, op2,
9972 table_label, !default_label
9973 ? fallback_label : default_label));
9977 /* Attempt to generate a tablejump instruction; same concept. */
9978 #ifndef HAVE_tablejump
9979 #define HAVE_tablejump 0
9980 #define gen_tablejump(x, y) (0)
9983 /* Subroutine of the next function.
9985 INDEX is the value being switched on, with the lowest value
9986 in the table already subtracted.
9987 MODE is its expected mode (needed if INDEX is constant).
9988 RANGE is the length of the jump table.
9989 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9991 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9992 index value is out of range. */
9995 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10000 if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10001 cfun->cfg->max_jumptable_ents = INTVAL (range);
10003 /* Do an unsigned comparison (in the proper mode) between the index
10004 expression and the value which represents the length of the range.
10005 Since we just finished subtracting the lower bound of the range
10006 from the index expression, this comparison allows us to simultaneously
10007 check that the original index expression value is both greater than
10008 or equal to the minimum value of the range and less than or equal to
10009 the maximum value of the range. */
10012 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10015 /* If index is in range, it must fit in Pmode.
10016 Convert to Pmode so we can index with it. */
10018 index = convert_to_mode (Pmode, index, 1);
10020 /* Don't let a MEM slip through, because then INDEX that comes
10021 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10022 and break_out_memory_refs will go to work on it and mess it up. */
10023 #ifdef PIC_CASE_VECTOR_ADDRESS
10024 if (flag_pic && !REG_P (index))
10025 index = copy_to_mode_reg (Pmode, index);
10028 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10029 GET_MODE_SIZE, because this indicates how large insns are. The other
10030 uses should all be Pmode, because they are addresses. This code
10031 could fail if addresses and insns are not the same size. */
10032 index = gen_rtx_PLUS (Pmode,
10033 gen_rtx_MULT (Pmode, index,
10034 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10035 gen_rtx_LABEL_REF (Pmode, table_label));
10036 #ifdef PIC_CASE_VECTOR_ADDRESS
10038 index = PIC_CASE_VECTOR_ADDRESS (index);
10041 index = memory_address (CASE_VECTOR_MODE, index);
10042 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10043 vector = gen_const_mem (CASE_VECTOR_MODE, index);
10044 convert_move (temp, vector, 0);
10046 emit_jump_insn (gen_tablejump (temp, table_label));
10048 /* If we are generating PIC code or if the table is PC-relative, the
10049 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10050 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10055 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10056 rtx table_label, rtx default_label)
10060 if (! HAVE_tablejump)
10063 index_expr = fold_build2 (MINUS_EXPR, index_type,
10064 fold_convert (index_type, index_expr),
10065 fold_convert (index_type, minval));
10066 index = expand_normal (index_expr);
10067 do_pending_stack_adjust ();
10069 do_tablejump (index, TYPE_MODE (index_type),
10070 convert_modes (TYPE_MODE (index_type),
10071 TYPE_MODE (TREE_TYPE (range)),
10072 expand_normal (range),
10073 TYPE_UNSIGNED (TREE_TYPE (range))),
10074 table_label, default_label);
10078 /* Nonzero if the mode is a valid vector mode for this architecture.
10079 This returns nonzero even if there is no hardware support for the
10080 vector mode, but we can emulate with narrower modes. */
10083 vector_mode_valid_p (enum machine_mode mode)
10085 enum mode_class mclass = GET_MODE_CLASS (mode);
10086 enum machine_mode innermode;
10088 /* Doh! What's going on? */
10089 if (mclass != MODE_VECTOR_INT
10090 && mclass != MODE_VECTOR_FLOAT
10091 && mclass != MODE_VECTOR_FRACT
10092 && mclass != MODE_VECTOR_UFRACT
10093 && mclass != MODE_VECTOR_ACCUM
10094 && mclass != MODE_VECTOR_UACCUM)
10097 /* Hardware support. Woo hoo! */
10098 if (targetm.vector_mode_supported_p (mode))
10101 innermode = GET_MODE_INNER (mode);
10103 /* We should probably return 1 if requesting V4DI and we have no DI,
10104 but we have V2DI, but this is probably very unlikely. */
10106 /* If we have support for the inner mode, we can safely emulate it.
10107 We may not have V2DI, but me can emulate with a pair of DIs. */
10108 return targetm.scalar_mode_supported_p (innermode);
10111 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10113 const_vector_from_tree (tree exp)
10118 enum machine_mode inner, mode;
10120 mode = TYPE_MODE (TREE_TYPE (exp));
10122 if (initializer_zerop (exp))
10123 return CONST0_RTX (mode);
10125 units = GET_MODE_NUNITS (mode);
10126 inner = GET_MODE_INNER (mode);
10128 v = rtvec_alloc (units);
10130 link = TREE_VECTOR_CST_ELTS (exp);
10131 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10133 elt = TREE_VALUE (link);
10135 if (TREE_CODE (elt) == REAL_CST)
10136 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10138 else if (TREE_CODE (elt) == FIXED_CST)
10139 RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10142 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10143 TREE_INT_CST_HIGH (elt),
10147 /* Initialize remaining elements to 0. */
10148 for (; i < units; ++i)
10149 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10151 return gen_rtx_CONST_VECTOR (mode, v);
10153 #include "gt-expr.h"