1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* This structure is used by move_by_pieces to describe the move to
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
110 /* This structure is used by store_by_pieces to describe the clear to
113 struct store_by_pieces
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx enqueue_insn (rtx, rtx);
127 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
129 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134 static tree emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142 static rtx clear_storage_via_libcall (rtx, rtx);
143 static tree clear_storage_libcall_fn (int);
144 static rtx compress_float_constant (rtx, rtx);
145 static rtx get_subtarget (rtx);
146 static int is_zeros_p (tree);
147 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153 static rtx var_rtx (tree);
155 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
158 static int is_aligning_offset (tree, tree);
159 static rtx expand_increment (tree, int, int);
160 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
162 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
164 static void emit_single_push_insn (enum machine_mode, rtx, tree);
166 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167 static rtx const_vector_from_tree (tree);
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
173 static char direct_load[NUM_MACHINE_MODES];
174 static char direct_store[NUM_MACHINE_MODES];
176 /* Record for each mode whether we can float-extend from memory. */
178 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab[NUM_MACHINE_MODES];
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
212 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213 struct file_stack *expr_wfl_stack;
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
225 init_expr_once (void)
228 enum machine_mode mode;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
265 if (! HARD_REGNO_MODE_OK (regno, mode))
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function (void)
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
458 return queued_subexp_p (XEXP (x, 0));
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
469 /* Perform all the pending incrementations. */
475 while ((p = pending_chain))
477 rtx body = QUEUED_BODY (p);
479 switch (GET_CODE (body))
487 QUEUED_INSN (p) = body;
491 #ifdef ENABLE_CHECKING
498 QUEUED_INSN (p) = emit_insn (body);
502 pending_chain = QUEUED_NEXT (p);
506 /* Copy data from FROM to TO, where the machine modes are not the same.
507 Both modes may be integer, or both may be floating.
508 UNSIGNEDP should be nonzero if FROM is an unsigned type.
509 This causes zero-extension instead of sign-extension. */
512 convert_move (rtx to, rtx from, int unsignedp)
514 enum machine_mode to_mode = GET_MODE (to);
515 enum machine_mode from_mode = GET_MODE (from);
516 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
517 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
521 /* rtx code for making an equivalent value. */
522 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
523 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
525 to = protect_from_queue (to, 1);
526 from = protect_from_queue (from, 0);
528 if (to_real != from_real)
531 /* If FROM is a SUBREG that indicates that we have already done at least
532 the required extension, strip it. We don't handle such SUBREGs as
535 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
536 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
537 >= GET_MODE_SIZE (to_mode))
538 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
539 from = gen_lowpart (to_mode, from), from_mode = to_mode;
541 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
544 if (to_mode == from_mode
545 || (from_mode == VOIDmode && CONSTANT_P (from)))
547 emit_move_insn (to, from);
551 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
553 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
556 if (VECTOR_MODE_P (to_mode))
557 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
559 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
561 emit_move_insn (to, from);
565 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
567 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
568 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
577 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
579 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
584 /* Try converting directly if the insn is supported. */
586 code = tab->handlers[to_mode][from_mode].insn_code;
587 if (code != CODE_FOR_nothing)
589 emit_unop_insn (code, to, from,
590 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
594 /* Otherwise use a libcall. */
595 libcall = tab->handlers[to_mode][from_mode].libfunc;
598 /* This conversion is not implemented yet. */
602 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
604 insns = get_insns ();
606 emit_libcall_block (insns, to, value,
607 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
609 : gen_rtx_FLOAT_EXTEND (to_mode, from));
613 /* Handle pointer conversion. */ /* SPEE 900220. */
614 /* Targets are expected to provide conversion insns between PxImode and
615 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
616 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
618 enum machine_mode full_mode
619 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
621 if (trunc_optab->handlers[to_mode][full_mode].insn_code
625 if (full_mode != from_mode)
626 from = convert_to_mode (full_mode, from, unsignedp);
627 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
631 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
633 enum machine_mode full_mode
634 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
636 if (sext_optab->handlers[full_mode][from_mode].insn_code
640 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
642 if (to_mode == full_mode)
645 /* else proceed to integer conversions below. */
646 from_mode = full_mode;
649 /* Now both modes are integers. */
651 /* Handle expanding beyond a word. */
652 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
653 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
660 enum machine_mode lowpart_mode;
661 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
663 /* Try converting directly if the insn is supported. */
664 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
667 /* If FROM is a SUBREG, put it into a register. Do this
668 so that we always generate the same set of insns for
669 better cse'ing; if an intermediate assignment occurred,
670 we won't be doing the operation directly on the SUBREG. */
671 if (optimize > 0 && GET_CODE (from) == SUBREG)
672 from = force_reg (from_mode, from);
673 emit_unop_insn (code, to, from, equiv_code);
676 /* Next, try converting via full word. */
677 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
678 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
679 != CODE_FOR_nothing))
681 if (GET_CODE (to) == REG)
683 if (reg_overlap_mentioned_p (to, from))
684 from = force_reg (from_mode, from);
685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
687 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
688 emit_unop_insn (code, to,
689 gen_lowpart (word_mode, to), equiv_code);
693 /* No special multiword conversion insn; do it by hand. */
696 /* Since we will turn this into a no conflict block, we must ensure
697 that the source does not overlap the target. */
699 if (reg_overlap_mentioned_p (to, from))
700 from = force_reg (from_mode, from);
702 /* Get a copy of FROM widened to a word, if necessary. */
703 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
704 lowpart_mode = word_mode;
706 lowpart_mode = from_mode;
708 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
710 lowpart = gen_lowpart (lowpart_mode, to);
711 emit_move_insn (lowpart, lowfrom);
713 /* Compute the value to put in each remaining word. */
715 fill_value = const0_rtx;
720 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
721 && STORE_FLAG_VALUE == -1)
723 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
725 fill_value = gen_reg_rtx (word_mode);
726 emit_insn (gen_slt (fill_value));
732 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
733 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
735 fill_value = convert_to_mode (word_mode, fill_value, 1);
739 /* Fill the remaining words. */
740 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
742 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
743 rtx subword = operand_subword (to, index, 1, to_mode);
748 if (fill_value != subword)
749 emit_move_insn (subword, fill_value);
752 insns = get_insns ();
755 emit_no_conflict_block (insns, to, from, NULL_RTX,
756 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
760 /* Truncating multi-word to a word or less. */
761 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
762 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
764 if (!((GET_CODE (from) == MEM
765 && ! MEM_VOLATILE_P (from)
766 && direct_load[(int) to_mode]
767 && ! mode_dependent_address_p (XEXP (from, 0)))
768 || GET_CODE (from) == REG
769 || GET_CODE (from) == SUBREG))
770 from = force_reg (from_mode, from);
771 convert_move (to, gen_lowpart (word_mode, from), 0);
775 /* Now follow all the conversions between integers
776 no more than a word long. */
778 /* For truncation, usually we can just refer to FROM in a narrower mode. */
779 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
780 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
781 GET_MODE_BITSIZE (from_mode)))
783 if (!((GET_CODE (from) == MEM
784 && ! MEM_VOLATILE_P (from)
785 && direct_load[(int) to_mode]
786 && ! mode_dependent_address_p (XEXP (from, 0)))
787 || GET_CODE (from) == REG
788 || GET_CODE (from) == SUBREG))
789 from = force_reg (from_mode, from);
790 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
791 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
792 from = copy_to_reg (from);
793 emit_move_insn (to, gen_lowpart (to_mode, from));
797 /* Handle extension. */
798 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
800 /* Convert directly if that works. */
801 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
805 from = force_not_mem (from);
807 emit_unop_insn (code, to, from, equiv_code);
812 enum machine_mode intermediate;
816 /* Search for a mode to convert via. */
817 for (intermediate = from_mode; intermediate != VOIDmode;
818 intermediate = GET_MODE_WIDER_MODE (intermediate))
819 if (((can_extend_p (to_mode, intermediate, unsignedp)
821 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
822 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
823 GET_MODE_BITSIZE (intermediate))))
824 && (can_extend_p (intermediate, from_mode, unsignedp)
825 != CODE_FOR_nothing))
827 convert_move (to, convert_to_mode (intermediate, from,
828 unsignedp), unsignedp);
832 /* No suitable intermediate mode.
833 Generate what we need with shifts. */
834 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
835 - GET_MODE_BITSIZE (from_mode), 0);
836 from = gen_lowpart (to_mode, force_reg (from_mode, from));
837 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
839 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
842 emit_move_insn (to, tmp);
847 /* Support special truncate insns for certain modes. */
848 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
850 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
855 /* Handle truncation of volatile memrefs, and so on;
856 the things that couldn't be truncated directly,
857 and for which there was no special instruction.
859 ??? Code above formerly short-circuited this, for most integer
860 mode pairs, with a force_reg in from_mode followed by a recursive
861 call to this routine. Appears always to have been wrong. */
862 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
864 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
865 emit_move_insn (to, temp);
869 /* Mode combination is not recognized. */
873 /* Return an rtx for a value that would result
874 from converting X to mode MODE.
875 Both X and MODE may be floating, or both integer.
876 UNSIGNEDP is nonzero if X is an unsigned value.
877 This can be done by referring to a part of X in place
878 or by copying to a new temporary with conversion.
880 This function *must not* call protect_from_queue
881 except when putting X into an insn (in which case convert_move does it). */
884 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
886 return convert_modes (mode, VOIDmode, x, unsignedp);
889 /* Return an rtx for a value that would result
890 from converting X from mode OLDMODE to mode MODE.
891 Both modes may be floating, or both integer.
892 UNSIGNEDP is nonzero if X is an unsigned value.
894 This can be done by referring to a part of X in place
895 or by copying to a new temporary with conversion.
897 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
899 This function *must not* call protect_from_queue
900 except when putting X into an insn (in which case convert_move does it). */
903 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
907 /* If FROM is a SUBREG that indicates that we have already done at least
908 the required extension, strip it. */
910 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
911 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
912 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
913 x = gen_lowpart (mode, x);
915 if (GET_MODE (x) != VOIDmode)
916 oldmode = GET_MODE (x);
921 /* There is one case that we must handle specially: If we are converting
922 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
923 we are to interpret the constant as unsigned, gen_lowpart will do
924 the wrong if the constant appears negative. What we want to do is
925 make the high-order word of the constant zero, not all ones. */
927 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
928 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
929 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
931 HOST_WIDE_INT val = INTVAL (x);
933 if (oldmode != VOIDmode
934 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
936 int width = GET_MODE_BITSIZE (oldmode);
938 /* We need to zero extend VAL. */
939 val &= ((HOST_WIDE_INT) 1 << width) - 1;
942 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
945 /* We can do this with a gen_lowpart if both desired and current modes
946 are integer, and this is either a constant integer, a register, or a
947 non-volatile MEM. Except for the constant case where MODE is no
948 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
950 if ((GET_CODE (x) == CONST_INT
951 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
952 || (GET_MODE_CLASS (mode) == MODE_INT
953 && GET_MODE_CLASS (oldmode) == MODE_INT
954 && (GET_CODE (x) == CONST_DOUBLE
955 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
956 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
957 && direct_load[(int) mode])
958 || (GET_CODE (x) == REG
959 && (! HARD_REGISTER_P (x)
960 || HARD_REGNO_MODE_OK (REGNO (x), mode))
961 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
962 GET_MODE_BITSIZE (GET_MODE (x)))))))))
964 /* ?? If we don't know OLDMODE, we have to assume here that
965 X does not need sign- or zero-extension. This may not be
966 the case, but it's the best we can do. */
967 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
968 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
970 HOST_WIDE_INT val = INTVAL (x);
971 int width = GET_MODE_BITSIZE (oldmode);
973 /* We must sign or zero-extend in this case. Start by
974 zero-extending, then sign extend if we need to. */
975 val &= ((HOST_WIDE_INT) 1 << width) - 1;
977 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
978 val |= (HOST_WIDE_INT) (-1) << width;
980 return gen_int_mode (val, mode);
983 return gen_lowpart (mode, x);
986 /* Converting from integer constant into mode is always equivalent to an
988 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
990 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
992 return simplify_gen_subreg (mode, x, oldmode, 0);
995 temp = gen_reg_rtx (mode);
996 convert_move (temp, x, unsignedp);
1000 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1001 store efficiently. Due to internal GCC limitations, this is
1002 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1003 for an immediate constant. */
1005 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1007 /* Determine whether the LEN bytes can be moved by using several move
1008 instructions. Return nonzero if a call to move_by_pieces should
1012 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1013 unsigned int align ATTRIBUTE_UNUSED)
1015 return MOVE_BY_PIECES_P (len, align);
1018 /* Generate several move instructions to copy LEN bytes from block FROM to
1019 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1020 and TO through protect_from_queue before calling.
1022 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1023 used to push FROM to the stack.
1025 ALIGN is maximum stack alignment we can assume.
1027 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1028 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1032 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1033 unsigned int align, int endp)
1035 struct move_by_pieces data;
1036 rtx to_addr, from_addr = XEXP (from, 0);
1037 unsigned int max_size = MOVE_MAX_PIECES + 1;
1038 enum machine_mode mode = VOIDmode, tmode;
1039 enum insn_code icode;
1041 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1044 data.from_addr = from_addr;
1047 to_addr = XEXP (to, 0);
1050 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1051 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1053 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1060 #ifdef STACK_GROWS_DOWNWARD
1066 data.to_addr = to_addr;
1069 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1070 || GET_CODE (from_addr) == POST_INC
1071 || GET_CODE (from_addr) == POST_DEC);
1073 data.explicit_inc_from = 0;
1074 data.explicit_inc_to = 0;
1075 if (data.reverse) data.offset = len;
1078 /* If copying requires more than two move insns,
1079 copy addresses to registers (to make displacements shorter)
1080 and use post-increment if available. */
1081 if (!(data.autinc_from && data.autinc_to)
1082 && move_by_pieces_ninsns (len, align) > 2)
1084 /* Find the mode of the largest move... */
1085 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1086 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1087 if (GET_MODE_SIZE (tmode) < max_size)
1090 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1092 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1093 data.autinc_from = 1;
1094 data.explicit_inc_from = -1;
1096 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1098 data.from_addr = copy_addr_to_reg (from_addr);
1099 data.autinc_from = 1;
1100 data.explicit_inc_from = 1;
1102 if (!data.autinc_from && CONSTANT_P (from_addr))
1103 data.from_addr = copy_addr_to_reg (from_addr);
1104 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1106 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1108 data.explicit_inc_to = -1;
1110 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1112 data.to_addr = copy_addr_to_reg (to_addr);
1114 data.explicit_inc_to = 1;
1116 if (!data.autinc_to && CONSTANT_P (to_addr))
1117 data.to_addr = copy_addr_to_reg (to_addr);
1120 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1121 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1122 align = MOVE_MAX * BITS_PER_UNIT;
1124 /* First move what we can in the largest integer mode, then go to
1125 successively smaller modes. */
1127 while (max_size > 1)
1129 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1130 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1131 if (GET_MODE_SIZE (tmode) < max_size)
1134 if (mode == VOIDmode)
1137 icode = mov_optab->handlers[(int) mode].insn_code;
1138 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1139 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1141 max_size = GET_MODE_SIZE (mode);
1144 /* The code above should have handled everything. */
1158 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1159 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1161 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1164 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1171 to1 = adjust_address (data.to, QImode, data.offset);
1179 /* Return number of insns required to move L bytes by pieces.
1180 ALIGN (in bits) is maximum alignment we can assume. */
1182 static unsigned HOST_WIDE_INT
1183 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1185 unsigned HOST_WIDE_INT n_insns = 0;
1186 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1188 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1189 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1190 align = MOVE_MAX * BITS_PER_UNIT;
1192 while (max_size > 1)
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
1202 if (mode == VOIDmode)
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1207 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1209 max_size = GET_MODE_SIZE (mode);
1217 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1218 with move instructions for mode MODE. GENFUN is the gen_... function
1219 to make a move insn for that mode. DATA has all the other info. */
1222 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1223 struct move_by_pieces *data)
1225 unsigned int size = GET_MODE_SIZE (mode);
1226 rtx to1 = NULL_RTX, from1;
1228 while (data->len >= size)
1231 data->offset -= size;
1235 if (data->autinc_to)
1236 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1239 to1 = adjust_address (data->to, mode, data->offset);
1242 if (data->autinc_from)
1243 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1246 from1 = adjust_address (data->from, mode, data->offset);
1248 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1249 emit_insn (gen_add2_insn (data->to_addr,
1250 GEN_INT (-(HOST_WIDE_INT)size)));
1251 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1252 emit_insn (gen_add2_insn (data->from_addr,
1253 GEN_INT (-(HOST_WIDE_INT)size)));
1256 emit_insn ((*genfun) (to1, from1));
1259 #ifdef PUSH_ROUNDING
1260 emit_single_push_insn (mode, from1, NULL);
1266 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1267 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1268 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1269 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1271 if (! data->reverse)
1272 data->offset += size;
1278 /* Emit code to move a block Y to a block X. This may be done with
1279 string-move instructions, with multiple scalar move instructions,
1280 or with a library call.
1282 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1283 SIZE is an rtx that says how long they are.
1284 ALIGN is the maximum alignment we can assume they have.
1285 METHOD describes what kind of copy this is, and what mechanisms may be used.
1287 Return the address of the new block, if memcpy is called and returns it,
1291 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1299 case BLOCK_OP_NORMAL:
1300 may_use_call = true;
1303 case BLOCK_OP_CALL_PARM:
1304 may_use_call = block_move_libcall_safe_for_call_parm ();
1306 /* Make inhibit_defer_pop nonzero around the library call
1307 to force it to pop the arguments right away. */
1311 case BLOCK_OP_NO_LIBCALL:
1312 may_use_call = false;
1319 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1321 if (GET_MODE (x) != BLKmode)
1323 if (GET_MODE (y) != BLKmode)
1326 x = protect_from_queue (x, 1);
1327 y = protect_from_queue (y, 0);
1328 size = protect_from_queue (size, 0);
1330 if (GET_CODE (x) != MEM)
1332 if (GET_CODE (y) != MEM)
1337 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1338 can be incorrect is coming from __builtin_memcpy. */
1339 if (GET_CODE (size) == CONST_INT)
1341 if (INTVAL (size) == 0)
1344 x = shallow_copy_rtx (x);
1345 y = shallow_copy_rtx (y);
1346 set_mem_size (x, size);
1347 set_mem_size (y, size);
1350 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1351 move_by_pieces (x, y, INTVAL (size), align, 0);
1352 else if (emit_block_move_via_movstr (x, y, size, align))
1354 else if (may_use_call)
1355 retval = emit_block_move_via_libcall (x, y, size);
1357 emit_block_move_via_loop (x, y, size, align);
1359 if (method == BLOCK_OP_CALL_PARM)
1365 /* A subroutine of emit_block_move. Returns true if calling the
1366 block move libcall will not clobber any parameters which may have
1367 already been placed on the stack. */
1370 block_move_libcall_safe_for_call_parm (void)
1372 /* If arguments are pushed on the stack, then they're safe. */
1376 /* If registers go on the stack anyway, any argument is sure to clobber
1377 an outgoing argument. */
1378 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1380 tree fn = emit_block_move_libcall_fn (false);
1382 if (REG_PARM_STACK_SPACE (fn) != 0)
1387 /* If any argument goes in memory, then it might clobber an outgoing
1390 CUMULATIVE_ARGS args_so_far;
1393 fn = emit_block_move_libcall_fn (false);
1394 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1396 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1397 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1399 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1400 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1401 if (!tmp || !REG_P (tmp))
1403 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1404 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1408 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1414 /* A subroutine of emit_block_move. Expand a movstr pattern;
1415 return true if successful. */
1418 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1420 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1421 int save_volatile_ok = volatile_ok;
1422 enum machine_mode mode;
1424 /* Since this is a move insn, we don't care about volatility. */
1427 /* Try the most limited insn first, because there's no point
1428 including more than one in the machine description unless
1429 the more limited one has some advantage. */
1431 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1432 mode = GET_MODE_WIDER_MODE (mode))
1434 enum insn_code code = movstr_optab[(int) mode];
1435 insn_operand_predicate_fn pred;
1437 if (code != CODE_FOR_nothing
1438 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1439 here because if SIZE is less than the mode mask, as it is
1440 returned by the macro, it will definitely be less than the
1441 actual mode mask. */
1442 && ((GET_CODE (size) == CONST_INT
1443 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1444 <= (GET_MODE_MASK (mode) >> 1)))
1445 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1446 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1447 || (*pred) (x, BLKmode))
1448 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1449 || (*pred) (y, BLKmode))
1450 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1451 || (*pred) (opalign, VOIDmode)))
1454 rtx last = get_last_insn ();
1457 op2 = convert_to_mode (mode, size, 1);
1458 pred = insn_data[(int) code].operand[2].predicate;
1459 if (pred != 0 && ! (*pred) (op2, mode))
1460 op2 = copy_to_mode_reg (mode, op2);
1462 /* ??? When called via emit_block_move_for_call, it'd be
1463 nice if there were some way to inform the backend, so
1464 that it doesn't fail the expansion because it thinks
1465 emitting the libcall would be more efficient. */
1467 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1471 volatile_ok = save_volatile_ok;
1475 delete_insns_since (last);
1479 volatile_ok = save_volatile_ok;
1483 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1484 Return the return value from memcpy, 0 otherwise. */
1487 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1489 rtx dst_addr, src_addr;
1490 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1491 enum machine_mode size_mode;
1494 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1496 It is unsafe to save the value generated by protect_from_queue and reuse
1497 it later. Consider what happens if emit_queue is called before the
1498 return value from protect_from_queue is used.
1500 Expansion of the CALL_EXPR below will call emit_queue before we are
1501 finished emitting RTL for argument setup. So if we are not careful we
1502 could get the wrong value for an argument.
1504 To avoid this problem we go ahead and emit code to copy the addresses of
1505 DST and SRC and SIZE into new pseudos. We can then place those new
1506 pseudos into an RTL_EXPR and use them later, even after a call to
1509 Note this is not strictly needed for library calls since they do not call
1510 emit_queue before loading their arguments. However, we may need to have
1511 library calls call emit_queue in the future since failing to do so could
1512 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1513 arguments in registers. */
1515 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1516 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1518 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1519 src_addr = convert_memory_address (ptr_mode, src_addr);
1521 dst_tree = make_tree (ptr_type_node, dst_addr);
1522 src_tree = make_tree (ptr_type_node, src_addr);
1524 if (TARGET_MEM_FUNCTIONS)
1525 size_mode = TYPE_MODE (sizetype);
1527 size_mode = TYPE_MODE (unsigned_type_node);
1529 size = convert_to_mode (size_mode, size, 1);
1530 size = copy_to_mode_reg (size_mode, size);
1532 /* It is incorrect to use the libcall calling conventions to call
1533 memcpy in this context. This could be a user call to memcpy and
1534 the user may wish to examine the return value from memcpy. For
1535 targets where libcalls and normal calls have different conventions
1536 for returning pointers, we could end up generating incorrect code.
1538 For convenience, we generate the call to bcopy this way as well. */
1540 if (TARGET_MEM_FUNCTIONS)
1541 size_tree = make_tree (sizetype, size);
1543 size_tree = make_tree (unsigned_type_node, size);
1545 fn = emit_block_move_libcall_fn (true);
1546 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1547 if (TARGET_MEM_FUNCTIONS)
1549 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1550 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1554 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1555 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1558 /* Now we have to build up the CALL_EXPR itself. */
1559 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1560 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1561 call_expr, arg_list, NULL_TREE);
1563 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1565 /* If we are initializing a readonly value, show the above call clobbered
1566 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1567 the delay slot scheduler might overlook conflicts and take nasty
1569 if (RTX_UNCHANGING_P (dst))
1570 add_function_usage_to
1571 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1572 gen_rtx_CLOBBER (VOIDmode, dst),
1575 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1578 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1579 for the function we use for block copies. The first time FOR_CALL
1580 is true, we call assemble_external. */
1582 static GTY(()) tree block_move_fn;
1585 init_block_move_fn (const char *asmspec)
1591 if (TARGET_MEM_FUNCTIONS)
1593 fn = get_identifier ("memcpy");
1594 args = build_function_type_list (ptr_type_node, ptr_type_node,
1595 const_ptr_type_node, sizetype,
1600 fn = get_identifier ("bcopy");
1601 args = build_function_type_list (void_type_node, const_ptr_type_node,
1602 ptr_type_node, unsigned_type_node,
1606 fn = build_decl (FUNCTION_DECL, fn, args);
1607 DECL_EXTERNAL (fn) = 1;
1608 TREE_PUBLIC (fn) = 1;
1609 DECL_ARTIFICIAL (fn) = 1;
1610 TREE_NOTHROW (fn) = 1;
1617 SET_DECL_RTL (block_move_fn, NULL_RTX);
1618 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1623 emit_block_move_libcall_fn (int for_call)
1625 static bool emitted_extern;
1628 init_block_move_fn (NULL);
1630 if (for_call && !emitted_extern)
1632 emitted_extern = true;
1633 make_decl_rtl (block_move_fn, NULL);
1634 assemble_external (block_move_fn);
1637 return block_move_fn;
1640 /* A subroutine of emit_block_move. Copy the data via an explicit
1641 loop. This is used only when libcalls are forbidden. */
1642 /* ??? It'd be nice to copy in hunks larger than QImode. */
1645 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1646 unsigned int align ATTRIBUTE_UNUSED)
1648 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1649 enum machine_mode iter_mode;
1651 iter_mode = GET_MODE (size);
1652 if (iter_mode == VOIDmode)
1653 iter_mode = word_mode;
1655 top_label = gen_label_rtx ();
1656 cmp_label = gen_label_rtx ();
1657 iter = gen_reg_rtx (iter_mode);
1659 emit_move_insn (iter, const0_rtx);
1661 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1662 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1663 do_pending_stack_adjust ();
1665 emit_note (NOTE_INSN_LOOP_BEG);
1667 emit_jump (cmp_label);
1668 emit_label (top_label);
1670 tmp = convert_modes (Pmode, iter_mode, iter, true);
1671 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1672 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1673 x = change_address (x, QImode, x_addr);
1674 y = change_address (y, QImode, y_addr);
1676 emit_move_insn (x, y);
1678 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1679 true, OPTAB_LIB_WIDEN);
1681 emit_move_insn (iter, tmp);
1683 emit_note (NOTE_INSN_LOOP_CONT);
1684 emit_label (cmp_label);
1686 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1689 emit_note (NOTE_INSN_LOOP_END);
1692 /* Copy all or part of a value X into registers starting at REGNO.
1693 The number of registers to be filled is NREGS. */
1696 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1699 #ifdef HAVE_load_multiple
1707 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1708 x = validize_mem (force_const_mem (mode, x));
1710 /* See if the machine can do this with a load multiple insn. */
1711 #ifdef HAVE_load_multiple
1712 if (HAVE_load_multiple)
1714 last = get_last_insn ();
1715 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1723 delete_insns_since (last);
1727 for (i = 0; i < nregs; i++)
1728 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1729 operand_subword_force (x, i, mode));
1732 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1733 The number of registers to be filled is NREGS. */
1736 move_block_from_reg (int regno, rtx x, int nregs)
1743 /* See if the machine can do this with a store multiple insn. */
1744 #ifdef HAVE_store_multiple
1745 if (HAVE_store_multiple)
1747 rtx last = get_last_insn ();
1748 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1756 delete_insns_since (last);
1760 for (i = 0; i < nregs; i++)
1762 rtx tem = operand_subword (x, i, 1, BLKmode);
1767 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1771 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1772 ORIG, where ORIG is a non-consecutive group of registers represented by
1773 a PARALLEL. The clone is identical to the original except in that the
1774 original set of registers is replaced by a new set of pseudo registers.
1775 The new set has the same modes as the original set. */
1778 gen_group_rtx (rtx orig)
1783 if (GET_CODE (orig) != PARALLEL)
1786 length = XVECLEN (orig, 0);
1787 tmps = alloca (sizeof (rtx) * length);
1789 /* Skip a NULL entry in first slot. */
1790 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1795 for (; i < length; i++)
1797 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1798 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1800 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1803 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1806 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1807 where DST is non-consecutive registers represented by a PARALLEL.
1808 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1812 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1817 if (GET_CODE (dst) != PARALLEL)
1820 /* Check for a NULL entry, used to indicate that the parameter goes
1821 both on the stack and in registers. */
1822 if (XEXP (XVECEXP (dst, 0, 0), 0))
1827 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1829 /* Process the pieces. */
1830 for (i = start; i < XVECLEN (dst, 0); i++)
1832 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1833 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1834 unsigned int bytelen = GET_MODE_SIZE (mode);
1837 /* Handle trailing fragments that run over the size of the struct. */
1838 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1840 /* Arrange to shift the fragment to where it belongs.
1841 extract_bit_field loads to the lsb of the reg. */
1843 #ifdef BLOCK_REG_PADDING
1844 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1845 == (BYTES_BIG_ENDIAN ? upward : downward)
1850 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1851 bytelen = ssize - bytepos;
1856 /* If we won't be loading directly from memory, protect the real source
1857 from strange tricks we might play; but make sure that the source can
1858 be loaded directly into the destination. */
1860 if (GET_CODE (orig_src) != MEM
1861 && (!CONSTANT_P (orig_src)
1862 || (GET_MODE (orig_src) != mode
1863 && GET_MODE (orig_src) != VOIDmode)))
1865 if (GET_MODE (orig_src) == VOIDmode)
1866 src = gen_reg_rtx (mode);
1868 src = gen_reg_rtx (GET_MODE (orig_src));
1870 emit_move_insn (src, orig_src);
1873 /* Optimize the access just a bit. */
1874 if (GET_CODE (src) == MEM
1875 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1876 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1877 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1878 && bytelen == GET_MODE_SIZE (mode))
1880 tmps[i] = gen_reg_rtx (mode);
1881 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1883 else if (GET_CODE (src) == CONCAT)
1885 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1886 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1888 if ((bytepos == 0 && bytelen == slen0)
1889 || (bytepos != 0 && bytepos + bytelen <= slen))
1891 /* The following assumes that the concatenated objects all
1892 have the same size. In this case, a simple calculation
1893 can be used to determine the object and the bit field
1895 tmps[i] = XEXP (src, bytepos / slen0);
1896 if (! CONSTANT_P (tmps[i])
1897 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1898 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1899 (bytepos % slen0) * BITS_PER_UNIT,
1900 1, NULL_RTX, mode, mode, ssize);
1902 else if (bytepos == 0)
1904 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1905 emit_move_insn (mem, src);
1906 tmps[i] = adjust_address (mem, mode, 0);
1911 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1912 SIMD register, which is currently broken. While we get GCC
1913 to emit proper RTL for these cases, let's dump to memory. */
1914 else if (VECTOR_MODE_P (GET_MODE (dst))
1915 && GET_CODE (src) == REG)
1917 int slen = GET_MODE_SIZE (GET_MODE (src));
1920 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1921 emit_move_insn (mem, src);
1922 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1924 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1925 && XVECLEN (dst, 0) > 1)
1926 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1927 else if (CONSTANT_P (src)
1928 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1931 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1932 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1936 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1937 tmps[i], 0, OPTAB_WIDEN);
1942 /* Copy the extracted pieces into the proper (probable) hard regs. */
1943 for (i = start; i < XVECLEN (dst, 0); i++)
1944 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1947 /* Emit code to move a block SRC to block DST, where SRC and DST are
1948 non-consecutive groups of registers, each represented by a PARALLEL. */
1951 emit_group_move (rtx dst, rtx src)
1955 if (GET_CODE (src) != PARALLEL
1956 || GET_CODE (dst) != PARALLEL
1957 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1960 /* Skip first entry if NULL. */
1961 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1962 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1963 XEXP (XVECEXP (src, 0, i), 0));
1966 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1967 where SRC is non-consecutive registers represented by a PARALLEL.
1968 SSIZE represents the total size of block ORIG_DST, or -1 if not
1972 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1977 if (GET_CODE (src) != PARALLEL)
1980 /* Check for a NULL entry, used to indicate that the parameter goes
1981 both on the stack and in registers. */
1982 if (XEXP (XVECEXP (src, 0, 0), 0))
1987 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1989 /* Copy the (probable) hard regs into pseudos. */
1990 for (i = start; i < XVECLEN (src, 0); i++)
1992 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1993 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1994 emit_move_insn (tmps[i], reg);
1998 /* If we won't be storing directly into memory, protect the real destination
1999 from strange tricks we might play. */
2001 if (GET_CODE (dst) == PARALLEL)
2005 /* We can get a PARALLEL dst if there is a conditional expression in
2006 a return statement. In that case, the dst and src are the same,
2007 so no action is necessary. */
2008 if (rtx_equal_p (dst, src))
2011 /* It is unclear if we can ever reach here, but we may as well handle
2012 it. Allocate a temporary, and split this into a store/load to/from
2015 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2016 emit_group_store (temp, src, type, ssize);
2017 emit_group_load (dst, temp, type, ssize);
2020 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2022 dst = gen_reg_rtx (GET_MODE (orig_dst));
2023 /* Make life a bit easier for combine. */
2024 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2027 /* Process the pieces. */
2028 for (i = start; i < XVECLEN (src, 0); i++)
2030 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2031 enum machine_mode mode = GET_MODE (tmps[i]);
2032 unsigned int bytelen = GET_MODE_SIZE (mode);
2035 /* Handle trailing fragments that run over the size of the struct. */
2036 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2038 /* store_bit_field always takes its value from the lsb.
2039 Move the fragment to the lsb if it's not already there. */
2041 #ifdef BLOCK_REG_PADDING
2042 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2043 == (BYTES_BIG_ENDIAN ? upward : downward)
2049 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2050 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2051 tmps[i], 0, OPTAB_WIDEN);
2053 bytelen = ssize - bytepos;
2056 if (GET_CODE (dst) == CONCAT)
2058 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2059 dest = XEXP (dst, 0);
2060 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2062 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2063 dest = XEXP (dst, 1);
2065 else if (bytepos == 0 && XVECLEN (src, 0))
2067 dest = assign_stack_temp (GET_MODE (dest),
2068 GET_MODE_SIZE (GET_MODE (dest)), 0);
2069 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2078 /* Optimize the access just a bit. */
2079 if (GET_CODE (dest) == MEM
2080 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2081 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2082 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2083 && bytelen == GET_MODE_SIZE (mode))
2084 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2086 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2087 mode, tmps[i], ssize);
2092 /* Copy from the pseudo into the (probable) hard reg. */
2093 if (orig_dst != dst)
2094 emit_move_insn (orig_dst, dst);
2097 /* Generate code to copy a BLKmode object of TYPE out of a
2098 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2099 is null, a stack temporary is created. TGTBLK is returned.
2101 The purpose of this routine is to handle functions that return
2102 BLKmode structures in registers. Some machines (the PA for example)
2103 want to return all small structures in registers regardless of the
2104 structure's alignment. */
2107 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2109 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2110 rtx src = NULL, dst = NULL;
2111 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2112 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2116 tgtblk = assign_temp (build_qualified_type (type,
2118 | TYPE_QUAL_CONST)),
2120 preserve_temp_slots (tgtblk);
2123 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2124 into a new pseudo which is a full word. */
2126 if (GET_MODE (srcreg) != BLKmode
2127 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2128 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2130 /* If the structure doesn't take up a whole number of words, see whether
2131 SRCREG is padded on the left or on the right. If it's on the left,
2132 set PADDING_CORRECTION to the number of bits to skip.
2134 In most ABIs, the structure will be returned at the least end of
2135 the register, which translates to right padding on little-endian
2136 targets and left padding on big-endian targets. The opposite
2137 holds if the structure is returned at the most significant
2138 end of the register. */
2139 if (bytes % UNITS_PER_WORD != 0
2140 && (targetm.calls.return_in_msb (type)
2142 : BYTES_BIG_ENDIAN))
2144 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2146 /* Copy the structure BITSIZE bites at a time.
2148 We could probably emit more efficient code for machines which do not use
2149 strict alignment, but it doesn't seem worth the effort at the current
2151 for (bitpos = 0, xbitpos = padding_correction;
2152 bitpos < bytes * BITS_PER_UNIT;
2153 bitpos += bitsize, xbitpos += bitsize)
2155 /* We need a new source operand each time xbitpos is on a
2156 word boundary and when xbitpos == padding_correction
2157 (the first time through). */
2158 if (xbitpos % BITS_PER_WORD == 0
2159 || xbitpos == padding_correction)
2160 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2163 /* We need a new destination operand each time bitpos is on
2165 if (bitpos % BITS_PER_WORD == 0)
2166 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2168 /* Use xbitpos for the source extraction (right justified) and
2169 xbitpos for the destination store (left justified). */
2170 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2171 extract_bit_field (src, bitsize,
2172 xbitpos % BITS_PER_WORD, 1,
2173 NULL_RTX, word_mode, word_mode,
2181 /* Add a USE expression for REG to the (possibly empty) list pointed
2182 to by CALL_FUSAGE. REG must denote a hard register. */
2185 use_reg (rtx *call_fusage, rtx reg)
2187 if (GET_CODE (reg) != REG
2188 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2192 = gen_rtx_EXPR_LIST (VOIDmode,
2193 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2196 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2197 starting at REGNO. All of these registers must be hard registers. */
2200 use_regs (rtx *call_fusage, int regno, int nregs)
2204 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2207 for (i = 0; i < nregs; i++)
2208 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2211 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2212 PARALLEL REGS. This is for calls that pass values in multiple
2213 non-contiguous locations. The Irix 6 ABI has examples of this. */
2216 use_group_regs (rtx *call_fusage, rtx regs)
2220 for (i = 0; i < XVECLEN (regs, 0); i++)
2222 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2224 /* A NULL entry means the parameter goes both on the stack and in
2225 registers. This can also be a MEM for targets that pass values
2226 partially on the stack and partially in registers. */
2227 if (reg != 0 && GET_CODE (reg) == REG)
2228 use_reg (call_fusage, reg);
2233 /* Determine whether the LEN bytes generated by CONSTFUN can be
2234 stored to memory using several move instructions. CONSTFUNDATA is
2235 a pointer which will be passed as argument in every CONSTFUN call.
2236 ALIGN is maximum alignment we can assume. Return nonzero if a
2237 call to store_by_pieces should succeed. */
2240 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2241 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2242 void *constfundata, unsigned int align)
2244 unsigned HOST_WIDE_INT max_size, l;
2245 HOST_WIDE_INT offset = 0;
2246 enum machine_mode mode, tmode;
2247 enum insn_code icode;
2254 if (! STORE_BY_PIECES_P (len, align))
2257 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2258 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2259 align = MOVE_MAX * BITS_PER_UNIT;
2261 /* We would first store what we can in the largest integer mode, then go to
2262 successively smaller modes. */
2265 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2270 max_size = STORE_MAX_PIECES + 1;
2271 while (max_size > 1)
2273 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2275 if (GET_MODE_SIZE (tmode) < max_size)
2278 if (mode == VOIDmode)
2281 icode = mov_optab->handlers[(int) mode].insn_code;
2282 if (icode != CODE_FOR_nothing
2283 && align >= GET_MODE_ALIGNMENT (mode))
2285 unsigned int size = GET_MODE_SIZE (mode);
2292 cst = (*constfun) (constfundata, offset, mode);
2293 if (!LEGITIMATE_CONSTANT_P (cst))
2303 max_size = GET_MODE_SIZE (mode);
2306 /* The code above should have handled everything. */
2314 /* Generate several move instructions to store LEN bytes generated by
2315 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2316 pointer which will be passed as argument in every CONSTFUN call.
2317 ALIGN is maximum alignment we can assume.
2318 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2319 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2323 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2324 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2325 void *constfundata, unsigned int align, int endp)
2327 struct store_by_pieces data;
2336 if (! STORE_BY_PIECES_P (len, align))
2338 to = protect_from_queue (to, 1);
2339 data.constfun = constfun;
2340 data.constfundata = constfundata;
2343 store_by_pieces_1 (&data, align);
2354 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2355 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2357 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2360 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2367 to1 = adjust_address (data.to, QImode, data.offset);
2375 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2376 rtx with BLKmode). The caller must pass TO through protect_from_queue
2377 before calling. ALIGN is maximum alignment we can assume. */
2380 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2382 struct store_by_pieces data;
2387 data.constfun = clear_by_pieces_1;
2388 data.constfundata = NULL;
2391 store_by_pieces_1 (&data, align);
2394 /* Callback routine for clear_by_pieces.
2395 Return const0_rtx unconditionally. */
2398 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2399 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2400 enum machine_mode mode ATTRIBUTE_UNUSED)
2405 /* Subroutine of clear_by_pieces and store_by_pieces.
2406 Generate several move instructions to store LEN bytes of block TO. (A MEM
2407 rtx with BLKmode). The caller must pass TO through protect_from_queue
2408 before calling. ALIGN is maximum alignment we can assume. */
2411 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2412 unsigned int align ATTRIBUTE_UNUSED)
2414 rtx to_addr = XEXP (data->to, 0);
2415 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2416 enum machine_mode mode = VOIDmode, tmode;
2417 enum insn_code icode;
2420 data->to_addr = to_addr;
2422 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2423 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2425 data->explicit_inc_to = 0;
2427 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2429 data->offset = data->len;
2431 /* If storing requires more than two move insns,
2432 copy addresses to registers (to make displacements shorter)
2433 and use post-increment if available. */
2434 if (!data->autinc_to
2435 && move_by_pieces_ninsns (data->len, align) > 2)
2437 /* Determine the main mode we'll be using. */
2438 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2439 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2440 if (GET_MODE_SIZE (tmode) < max_size)
2443 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2445 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2446 data->autinc_to = 1;
2447 data->explicit_inc_to = -1;
2450 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2451 && ! data->autinc_to)
2453 data->to_addr = copy_addr_to_reg (to_addr);
2454 data->autinc_to = 1;
2455 data->explicit_inc_to = 1;
2458 if ( !data->autinc_to && CONSTANT_P (to_addr))
2459 data->to_addr = copy_addr_to_reg (to_addr);
2462 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2463 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2464 align = MOVE_MAX * BITS_PER_UNIT;
2466 /* First store what we can in the largest integer mode, then go to
2467 successively smaller modes. */
2469 while (max_size > 1)
2471 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2472 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2473 if (GET_MODE_SIZE (tmode) < max_size)
2476 if (mode == VOIDmode)
2479 icode = mov_optab->handlers[(int) mode].insn_code;
2480 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2481 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2483 max_size = GET_MODE_SIZE (mode);
2486 /* The code above should have handled everything. */
2491 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2492 with move instructions for mode MODE. GENFUN is the gen_... function
2493 to make a move insn for that mode. DATA has all the other info. */
2496 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2497 struct store_by_pieces *data)
2499 unsigned int size = GET_MODE_SIZE (mode);
2502 while (data->len >= size)
2505 data->offset -= size;
2507 if (data->autinc_to)
2508 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2511 to1 = adjust_address (data->to, mode, data->offset);
2513 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2514 emit_insn (gen_add2_insn (data->to_addr,
2515 GEN_INT (-(HOST_WIDE_INT) size)));
2517 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2518 emit_insn ((*genfun) (to1, cst));
2520 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2521 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2523 if (! data->reverse)
2524 data->offset += size;
2530 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2531 its length in bytes. */
2534 clear_storage (rtx object, rtx size)
2537 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2538 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object) != BLKmode
2543 && GET_CODE (size) == CONST_INT
2544 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2545 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2548 object = protect_from_queue (object, 1);
2549 size = protect_from_queue (size, 0);
2551 if (size == const0_rtx)
2553 else if (GET_CODE (size) == CONST_INT
2554 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2555 clear_by_pieces (object, INTVAL (size), align);
2556 else if (clear_storage_via_clrstr (object, size, align))
2559 retval = clear_storage_via_libcall (object, size);
2565 /* A subroutine of clear_storage. Expand a clrstr pattern;
2566 return true if successful. */
2569 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2571 /* Try the most limited insn first, because there's no point
2572 including more than one in the machine description unless
2573 the more limited one has some advantage. */
2575 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2576 enum machine_mode mode;
2578 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2579 mode = GET_MODE_WIDER_MODE (mode))
2581 enum insn_code code = clrstr_optab[(int) mode];
2582 insn_operand_predicate_fn pred;
2584 if (code != CODE_FOR_nothing
2585 /* We don't need MODE to be narrower than
2586 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2587 the mode mask, as it is returned by the macro, it will
2588 definitely be less than the actual mode mask. */
2589 && ((GET_CODE (size) == CONST_INT
2590 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2591 <= (GET_MODE_MASK (mode) >> 1)))
2592 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2593 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2594 || (*pred) (object, BLKmode))
2595 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2596 || (*pred) (opalign, VOIDmode)))
2599 rtx last = get_last_insn ();
2602 op1 = convert_to_mode (mode, size, 1);
2603 pred = insn_data[(int) code].operand[1].predicate;
2604 if (pred != 0 && ! (*pred) (op1, mode))
2605 op1 = copy_to_mode_reg (mode, op1);
2607 pat = GEN_FCN ((int) code) (object, op1, opalign);
2614 delete_insns_since (last);
2621 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2622 Return the return value of memset, 0 otherwise. */
2625 clear_storage_via_libcall (rtx object, rtx size)
2627 tree call_expr, arg_list, fn, object_tree, size_tree;
2628 enum machine_mode size_mode;
2631 /* OBJECT or SIZE may have been passed through protect_from_queue.
2633 It is unsafe to save the value generated by protect_from_queue
2634 and reuse it later. Consider what happens if emit_queue is
2635 called before the return value from protect_from_queue is used.
2637 Expansion of the CALL_EXPR below will call emit_queue before
2638 we are finished emitting RTL for argument setup. So if we are
2639 not careful we could get the wrong value for an argument.
2641 To avoid this problem we go ahead and emit code to copy OBJECT
2642 and SIZE into new pseudos. We can then place those new pseudos
2643 into an RTL_EXPR and use them later, even after a call to
2646 Note this is not strictly needed for library calls since they
2647 do not call emit_queue before loading their arguments. However,
2648 we may need to have library calls call emit_queue in the future
2649 since failing to do so could cause problems for targets which
2650 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2652 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2654 if (TARGET_MEM_FUNCTIONS)
2655 size_mode = TYPE_MODE (sizetype);
2657 size_mode = TYPE_MODE (unsigned_type_node);
2658 size = convert_to_mode (size_mode, size, 1);
2659 size = copy_to_mode_reg (size_mode, size);
2661 /* It is incorrect to use the libcall calling conventions to call
2662 memset in this context. This could be a user call to memset and
2663 the user may wish to examine the return value from memset. For
2664 targets where libcalls and normal calls have different conventions
2665 for returning pointers, we could end up generating incorrect code.
2667 For convenience, we generate the call to bzero this way as well. */
2669 object_tree = make_tree (ptr_type_node, object);
2670 if (TARGET_MEM_FUNCTIONS)
2671 size_tree = make_tree (sizetype, size);
2673 size_tree = make_tree (unsigned_type_node, size);
2675 fn = clear_storage_libcall_fn (true);
2676 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2677 if (TARGET_MEM_FUNCTIONS)
2678 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2679 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2681 /* Now we have to build up the CALL_EXPR itself. */
2682 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2683 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2684 call_expr, arg_list, NULL_TREE);
2686 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2688 /* If we are initializing a readonly value, show the above call
2689 clobbered it. Otherwise, a load from it may erroneously be
2690 hoisted from a loop. */
2691 if (RTX_UNCHANGING_P (object))
2692 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2694 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2697 /* A subroutine of clear_storage_via_libcall. Create the tree node
2698 for the function we use for block clears. The first time FOR_CALL
2699 is true, we call assemble_external. */
2701 static GTY(()) tree block_clear_fn;
2704 init_block_clear_fn (const char *asmspec)
2706 if (!block_clear_fn)
2710 if (TARGET_MEM_FUNCTIONS)
2712 fn = get_identifier ("memset");
2713 args = build_function_type_list (ptr_type_node, ptr_type_node,
2714 integer_type_node, sizetype,
2719 fn = get_identifier ("bzero");
2720 args = build_function_type_list (void_type_node, ptr_type_node,
2721 unsigned_type_node, NULL_TREE);
2724 fn = build_decl (FUNCTION_DECL, fn, args);
2725 DECL_EXTERNAL (fn) = 1;
2726 TREE_PUBLIC (fn) = 1;
2727 DECL_ARTIFICIAL (fn) = 1;
2728 TREE_NOTHROW (fn) = 1;
2730 block_clear_fn = fn;
2735 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2736 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2741 clear_storage_libcall_fn (int for_call)
2743 static bool emitted_extern;
2745 if (!block_clear_fn)
2746 init_block_clear_fn (NULL);
2748 if (for_call && !emitted_extern)
2750 emitted_extern = true;
2751 make_decl_rtl (block_clear_fn, NULL);
2752 assemble_external (block_clear_fn);
2755 return block_clear_fn;
2758 /* Generate code to copy Y into X.
2759 Both Y and X must have the same mode, except that
2760 Y can be a constant with VOIDmode.
2761 This mode cannot be BLKmode; use emit_block_move for that.
2763 Return the last instruction emitted. */
2766 emit_move_insn (rtx x, rtx y)
2768 enum machine_mode mode = GET_MODE (x);
2769 rtx y_cst = NULL_RTX;
2772 x = protect_from_queue (x, 1);
2773 y = protect_from_queue (y, 0);
2775 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2778 /* Never force constant_p_rtx to memory. */
2779 if (GET_CODE (y) == CONSTANT_P_RTX)
2781 else if (CONSTANT_P (y))
2784 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2785 && (last_insn = compress_float_constant (x, y)))
2790 if (!LEGITIMATE_CONSTANT_P (y))
2792 y = force_const_mem (mode, y);
2794 /* If the target's cannot_force_const_mem prevented the spill,
2795 assume that the target's move expanders will also take care
2796 of the non-legitimate constant. */
2802 /* If X or Y are memory references, verify that their addresses are valid
2804 if (GET_CODE (x) == MEM
2805 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2806 && ! push_operand (x, GET_MODE (x)))
2808 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2809 x = validize_mem (x);
2811 if (GET_CODE (y) == MEM
2812 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2814 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2815 y = validize_mem (y);
2817 if (mode == BLKmode)
2820 last_insn = emit_move_insn_1 (x, y);
2822 if (y_cst && GET_CODE (x) == REG
2823 && (set = single_set (last_insn)) != NULL_RTX
2824 && SET_DEST (set) == x
2825 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2826 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2831 /* Low level part of emit_move_insn.
2832 Called just like emit_move_insn, but assumes X and Y
2833 are basically valid. */
2836 emit_move_insn_1 (rtx x, rtx y)
2838 enum machine_mode mode = GET_MODE (x);
2839 enum machine_mode submode;
2840 enum mode_class class = GET_MODE_CLASS (mode);
2842 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2845 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2847 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2849 /* Expand complex moves by moving real part and imag part, if possible. */
2850 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2851 && BLKmode != (submode = GET_MODE_INNER (mode))
2852 && (mov_optab->handlers[(int) submode].insn_code
2853 != CODE_FOR_nothing))
2855 /* Don't split destination if it is a stack push. */
2856 int stack = push_operand (x, GET_MODE (x));
2858 #ifdef PUSH_ROUNDING
2859 /* In case we output to the stack, but the size is smaller than the
2860 machine can push exactly, we need to use move instructions. */
2862 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2863 != GET_MODE_SIZE (submode)))
2866 HOST_WIDE_INT offset1, offset2;
2868 /* Do not use anti_adjust_stack, since we don't want to update
2869 stack_pointer_delta. */
2870 temp = expand_binop (Pmode,
2871 #ifdef STACK_GROWS_DOWNWARD
2879 (GET_MODE_SIZE (GET_MODE (x)))),
2880 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2882 if (temp != stack_pointer_rtx)
2883 emit_move_insn (stack_pointer_rtx, temp);
2885 #ifdef STACK_GROWS_DOWNWARD
2887 offset2 = GET_MODE_SIZE (submode);
2889 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2890 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2891 + GET_MODE_SIZE (submode));
2894 emit_move_insn (change_address (x, submode,
2895 gen_rtx_PLUS (Pmode,
2897 GEN_INT (offset1))),
2898 gen_realpart (submode, y));
2899 emit_move_insn (change_address (x, submode,
2900 gen_rtx_PLUS (Pmode,
2902 GEN_INT (offset2))),
2903 gen_imagpart (submode, y));
2907 /* If this is a stack, push the highpart first, so it
2908 will be in the argument order.
2910 In that case, change_address is used only to convert
2911 the mode, not to change the address. */
2914 /* Note that the real part always precedes the imag part in memory
2915 regardless of machine's endianness. */
2916 #ifdef STACK_GROWS_DOWNWARD
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_imagpart (submode, y));
2919 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2920 gen_realpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_realpart (submode, y));
2924 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2925 gen_imagpart (submode, y));
2930 rtx realpart_x, realpart_y;
2931 rtx imagpart_x, imagpart_y;
2933 /* If this is a complex value with each part being smaller than a
2934 word, the usual calling sequence will likely pack the pieces into
2935 a single register. Unfortunately, SUBREG of hard registers only
2936 deals in terms of words, so we have a problem converting input
2937 arguments to the CONCAT of two registers that is used elsewhere
2938 for complex values. If this is before reload, we can copy it into
2939 memory and reload. FIXME, we should see about using extract and
2940 insert on integer registers, but complex short and complex char
2941 variables should be rarely used. */
2942 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2943 && (reload_in_progress | reload_completed) == 0)
2946 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2948 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2950 if (packed_dest_p || packed_src_p)
2952 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2953 ? MODE_FLOAT : MODE_INT);
2955 enum machine_mode reg_mode
2956 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2958 if (reg_mode != BLKmode)
2960 rtx mem = assign_stack_temp (reg_mode,
2961 GET_MODE_SIZE (mode), 0);
2962 rtx cmem = adjust_address (mem, mode, 0);
2965 = N_("function using short complex types cannot be inline");
2969 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2971 emit_move_insn_1 (cmem, y);
2972 return emit_move_insn_1 (sreg, mem);
2976 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2978 emit_move_insn_1 (mem, sreg);
2979 return emit_move_insn_1 (x, cmem);
2985 realpart_x = gen_realpart (submode, x);
2986 realpart_y = gen_realpart (submode, y);
2987 imagpart_x = gen_imagpart (submode, x);
2988 imagpart_y = gen_imagpart (submode, y);
2990 /* Show the output dies here. This is necessary for SUBREGs
2991 of pseudos since we cannot track their lifetimes correctly;
2992 hard regs shouldn't appear here except as return values.
2993 We never want to emit such a clobber after reload. */
2995 && ! (reload_in_progress || reload_completed)
2996 && (GET_CODE (realpart_x) == SUBREG
2997 || GET_CODE (imagpart_x) == SUBREG))
2998 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3000 emit_move_insn (realpart_x, realpart_y);
3001 emit_move_insn (imagpart_x, imagpart_y);
3004 return get_last_insn ();
3007 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3008 find a mode to do it in. If we have a movcc, use it. Otherwise,
3009 find the MODE_INT mode of the same width. */
3010 else if (GET_MODE_CLASS (mode) == MODE_CC
3011 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3013 enum insn_code insn_code;
3014 enum machine_mode tmode = VOIDmode;
3018 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3021 for (tmode = QImode; tmode != VOIDmode;
3022 tmode = GET_MODE_WIDER_MODE (tmode))
3023 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3026 if (tmode == VOIDmode)
3029 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3030 may call change_address which is not appropriate if we were
3031 called when a reload was in progress. We don't have to worry
3032 about changing the address since the size in bytes is supposed to
3033 be the same. Copy the MEM to change the mode and move any
3034 substitutions from the old MEM to the new one. */
3036 if (reload_in_progress)
3038 x = gen_lowpart_common (tmode, x1);
3039 if (x == 0 && GET_CODE (x1) == MEM)
3041 x = adjust_address_nv (x1, tmode, 0);
3042 copy_replacements (x1, x);
3045 y = gen_lowpart_common (tmode, y1);
3046 if (y == 0 && GET_CODE (y1) == MEM)
3048 y = adjust_address_nv (y1, tmode, 0);
3049 copy_replacements (y1, y);
3054 x = gen_lowpart (tmode, x);
3055 y = gen_lowpart (tmode, y);
3058 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3059 return emit_insn (GEN_FCN (insn_code) (x, y));
3062 /* Try using a move pattern for the corresponding integer mode. This is
3063 only safe when simplify_subreg can convert MODE constants into integer
3064 constants. At present, it can only do this reliably if the value
3065 fits within a HOST_WIDE_INT. */
3066 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3067 && (submode = int_mode_for_mode (mode)) != BLKmode
3068 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3069 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3070 (simplify_gen_subreg (submode, x, mode, 0),
3071 simplify_gen_subreg (submode, y, mode, 0)));
3073 /* This will handle any multi-word or full-word mode that lacks a move_insn
3074 pattern. However, you will get better code if you define such patterns,
3075 even if they must turn into multiple assembler instructions. */
3076 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3083 #ifdef PUSH_ROUNDING
3085 /* If X is a push on the stack, do the push now and replace
3086 X with a reference to the stack pointer. */
3087 if (push_operand (x, GET_MODE (x)))
3092 /* Do not use anti_adjust_stack, since we don't want to update
3093 stack_pointer_delta. */
3094 temp = expand_binop (Pmode,
3095 #ifdef STACK_GROWS_DOWNWARD
3103 (GET_MODE_SIZE (GET_MODE (x)))),
3104 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3106 if (temp != stack_pointer_rtx)
3107 emit_move_insn (stack_pointer_rtx, temp);
3109 code = GET_CODE (XEXP (x, 0));
3111 /* Just hope that small offsets off SP are OK. */
3112 if (code == POST_INC)
3113 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3114 GEN_INT (-((HOST_WIDE_INT)
3115 GET_MODE_SIZE (GET_MODE (x)))));
3116 else if (code == POST_DEC)
3117 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3118 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3120 temp = stack_pointer_rtx;
3122 x = change_address (x, VOIDmode, temp);
3126 /* If we are in reload, see if either operand is a MEM whose address
3127 is scheduled for replacement. */
3128 if (reload_in_progress && GET_CODE (x) == MEM
3129 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3130 x = replace_equiv_address_nv (x, inner);
3131 if (reload_in_progress && GET_CODE (y) == MEM
3132 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3133 y = replace_equiv_address_nv (y, inner);
3139 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3142 rtx xpart = operand_subword (x, i, 1, mode);
3143 rtx ypart = operand_subword (y, i, 1, mode);
3145 /* If we can't get a part of Y, put Y into memory if it is a
3146 constant. Otherwise, force it into a register. If we still
3147 can't get a part of Y, abort. */
3148 if (ypart == 0 && CONSTANT_P (y))
3150 y = force_const_mem (mode, y);
3151 ypart = operand_subword (y, i, 1, mode);
3153 else if (ypart == 0)
3154 ypart = operand_subword_force (y, i, mode);
3156 if (xpart == 0 || ypart == 0)
3159 need_clobber |= (GET_CODE (xpart) == SUBREG);
3161 last_insn = emit_move_insn (xpart, ypart);
3167 /* Show the output dies here. This is necessary for SUBREGs
3168 of pseudos since we cannot track their lifetimes correctly;
3169 hard regs shouldn't appear here except as return values.
3170 We never want to emit such a clobber after reload. */
3172 && ! (reload_in_progress || reload_completed)
3173 && need_clobber != 0)
3174 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3184 /* If Y is representable exactly in a narrower mode, and the target can
3185 perform the extension directly from constant or memory, then emit the
3186 move as an extension. */
3189 compress_float_constant (rtx x, rtx y)
3191 enum machine_mode dstmode = GET_MODE (x);
3192 enum machine_mode orig_srcmode = GET_MODE (y);
3193 enum machine_mode srcmode;
3196 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3198 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3199 srcmode != orig_srcmode;
3200 srcmode = GET_MODE_WIDER_MODE (srcmode))
3203 rtx trunc_y, last_insn;
3205 /* Skip if the target can't extend this way. */
3206 ic = can_extend_p (dstmode, srcmode, 0);
3207 if (ic == CODE_FOR_nothing)
3210 /* Skip if the narrowed value isn't exact. */
3211 if (! exact_real_truncate (srcmode, &r))
3214 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3216 if (LEGITIMATE_CONSTANT_P (trunc_y))
3218 /* Skip if the target needs extra instructions to perform
3220 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3223 else if (float_extend_from_mem[dstmode][srcmode])
3224 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3228 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3229 last_insn = get_last_insn ();
3231 if (GET_CODE (x) == REG)
3232 set_unique_reg_note (last_insn, REG_EQUAL, y);
3240 /* Pushing data onto the stack. */
3242 /* Push a block of length SIZE (perhaps variable)
3243 and return an rtx to address the beginning of the block.
3244 Note that it is not possible for the value returned to be a QUEUED.
3245 The value may be virtual_outgoing_args_rtx.
3247 EXTRA is the number of bytes of padding to push in addition to SIZE.
3248 BELOW nonzero means this padding comes at low addresses;
3249 otherwise, the padding comes at high addresses. */
3252 push_block (rtx size, int extra, int below)
3256 size = convert_modes (Pmode, ptr_mode, size, 1);
3257 if (CONSTANT_P (size))
3258 anti_adjust_stack (plus_constant (size, extra));
3259 else if (GET_CODE (size) == REG && extra == 0)
3260 anti_adjust_stack (size);
3263 temp = copy_to_mode_reg (Pmode, size);
3265 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3266 temp, 0, OPTAB_LIB_WIDEN);
3267 anti_adjust_stack (temp);
3270 #ifndef STACK_GROWS_DOWNWARD
3276 temp = virtual_outgoing_args_rtx;
3277 if (extra != 0 && below)
3278 temp = plus_constant (temp, extra);
3282 if (GET_CODE (size) == CONST_INT)
3283 temp = plus_constant (virtual_outgoing_args_rtx,
3284 -INTVAL (size) - (below ? 0 : extra));
3285 else if (extra != 0 && !below)
3286 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3287 negate_rtx (Pmode, plus_constant (size, extra)));
3289 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3290 negate_rtx (Pmode, size));
3293 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3296 #ifdef PUSH_ROUNDING
3298 /* Emit single push insn. */
3301 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3304 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3306 enum insn_code icode;
3307 insn_operand_predicate_fn pred;
3309 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3310 /* If there is push pattern, use it. Otherwise try old way of throwing
3311 MEM representing push operation to move expander. */
3312 icode = push_optab->handlers[(int) mode].insn_code;
3313 if (icode != CODE_FOR_nothing)
3315 if (((pred = insn_data[(int) icode].operand[0].predicate)
3316 && !((*pred) (x, mode))))
3317 x = force_reg (mode, x);
3318 emit_insn (GEN_FCN (icode) (x));
3321 if (GET_MODE_SIZE (mode) == rounded_size)
3322 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3323 /* If we are to pad downward, adjust the stack pointer first and
3324 then store X into the stack location using an offset. This is
3325 because emit_move_insn does not know how to pad; it does not have
3327 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3329 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3330 HOST_WIDE_INT offset;
3332 emit_move_insn (stack_pointer_rtx,
3333 expand_binop (Pmode,
3334 #ifdef STACK_GROWS_DOWNWARD
3340 GEN_INT (rounded_size),
3341 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3343 offset = (HOST_WIDE_INT) padding_size;
3344 #ifdef STACK_GROWS_DOWNWARD
3345 if (STACK_PUSH_CODE == POST_DEC)
3346 /* We have already decremented the stack pointer, so get the
3348 offset += (HOST_WIDE_INT) rounded_size;
3350 if (STACK_PUSH_CODE == POST_INC)
3351 /* We have already incremented the stack pointer, so get the
3353 offset -= (HOST_WIDE_INT) rounded_size;
3355 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3359 #ifdef STACK_GROWS_DOWNWARD
3360 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3361 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3362 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3364 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3365 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3366 GEN_INT (rounded_size));
3368 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3371 dest = gen_rtx_MEM (mode, dest_addr);
3375 set_mem_attributes (dest, type, 1);
3377 if (flag_optimize_sibling_calls)
3378 /* Function incoming arguments may overlap with sibling call
3379 outgoing arguments and we cannot allow reordering of reads
3380 from function arguments with stores to outgoing arguments
3381 of sibling calls. */
3382 set_mem_alias_set (dest, 0);
3384 emit_move_insn (dest, x);
3388 /* Generate code to push X onto the stack, assuming it has mode MODE and
3390 MODE is redundant except when X is a CONST_INT (since they don't
3392 SIZE is an rtx for the size of data to be copied (in bytes),
3393 needed only if X is BLKmode.
3395 ALIGN (in bits) is maximum alignment we can assume.
3397 If PARTIAL and REG are both nonzero, then copy that many of the first
3398 words of X into registers starting with REG, and push the rest of X.
3399 The amount of space pushed is decreased by PARTIAL words,
3400 rounded *down* to a multiple of PARM_BOUNDARY.
3401 REG must be a hard register in this case.
3402 If REG is zero but PARTIAL is not, take any all others actions for an
3403 argument partially in registers, but do not actually load any
3406 EXTRA is the amount in bytes of extra space to leave next to this arg.
3407 This is ignored if an argument block has already been allocated.
3409 On a machine that lacks real push insns, ARGS_ADDR is the address of
3410 the bottom of the argument block for this call. We use indexing off there
3411 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3412 argument block has not been preallocated.
3414 ARGS_SO_FAR is the size of args previously pushed for this call.
3416 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3417 for arguments passed in registers. If nonzero, it will be the number
3418 of bytes required. */
3421 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3422 unsigned int align, int partial, rtx reg, int extra,
3423 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3427 enum direction stack_direction
3428 #ifdef STACK_GROWS_DOWNWARD
3434 /* Decide where to pad the argument: `downward' for below,
3435 `upward' for above, or `none' for don't pad it.
3436 Default is below for small data on big-endian machines; else above. */
3437 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3439 /* Invert direction if stack is post-decrement.
3441 if (STACK_PUSH_CODE == POST_DEC)
3442 if (where_pad != none)
3443 where_pad = (where_pad == downward ? upward : downward);
3445 xinner = x = protect_from_queue (x, 0);
3447 if (mode == BLKmode)
3449 /* Copy a block into the stack, entirely or partially. */
3452 int used = partial * UNITS_PER_WORD;
3456 if (reg && GET_CODE (reg) == PARALLEL)
3458 /* Use the size of the elt to compute offset. */
3459 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3460 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3461 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3464 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3471 /* USED is now the # of bytes we need not copy to the stack
3472 because registers will take care of them. */
3475 xinner = adjust_address (xinner, BLKmode, used);
3477 /* If the partial register-part of the arg counts in its stack size,
3478 skip the part of stack space corresponding to the registers.
3479 Otherwise, start copying to the beginning of the stack space,
3480 by setting SKIP to 0. */
3481 skip = (reg_parm_stack_space == 0) ? 0 : used;
3483 #ifdef PUSH_ROUNDING
3484 /* Do it with several push insns if that doesn't take lots of insns
3485 and if there is no difficulty with push insns that skip bytes
3486 on the stack for alignment purposes. */
3489 && GET_CODE (size) == CONST_INT
3491 && MEM_ALIGN (xinner) >= align
3492 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3493 /* Here we avoid the case of a structure whose weak alignment
3494 forces many pushes of a small amount of data,
3495 and such small pushes do rounding that causes trouble. */
3496 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3497 || align >= BIGGEST_ALIGNMENT
3498 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3499 == (align / BITS_PER_UNIT)))
3500 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3502 /* Push padding now if padding above and stack grows down,
3503 or if padding below and stack grows up.
3504 But if space already allocated, this has already been done. */
3505 if (extra && args_addr == 0
3506 && where_pad != none && where_pad != stack_direction)
3507 anti_adjust_stack (GEN_INT (extra));
3509 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3512 #endif /* PUSH_ROUNDING */
3516 /* Otherwise make space on the stack and copy the data
3517 to the address of that space. */
3519 /* Deduct words put into registers from the size we must copy. */
3522 if (GET_CODE (size) == CONST_INT)
3523 size = GEN_INT (INTVAL (size) - used);
3525 size = expand_binop (GET_MODE (size), sub_optab, size,
3526 GEN_INT (used), NULL_RTX, 0,
3530 /* Get the address of the stack space.
3531 In this case, we do not deal with EXTRA separately.
3532 A single stack adjust will do. */
3535 temp = push_block (size, extra, where_pad == downward);
3538 else if (GET_CODE (args_so_far) == CONST_INT)
3539 temp = memory_address (BLKmode,
3540 plus_constant (args_addr,
3541 skip + INTVAL (args_so_far)));
3543 temp = memory_address (BLKmode,
3544 plus_constant (gen_rtx_PLUS (Pmode,
3549 if (!ACCUMULATE_OUTGOING_ARGS)
3551 /* If the source is referenced relative to the stack pointer,
3552 copy it to another register to stabilize it. We do not need
3553 to do this if we know that we won't be changing sp. */
3555 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3556 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3557 temp = copy_to_reg (temp);
3560 target = gen_rtx_MEM (BLKmode, temp);
3564 set_mem_attributes (target, type, 1);
3565 /* Function incoming arguments may overlap with sibling call
3566 outgoing arguments and we cannot allow reordering of reads
3567 from function arguments with stores to outgoing arguments
3568 of sibling calls. */
3569 set_mem_alias_set (target, 0);
3572 /* ALIGN may well be better aligned than TYPE, e.g. due to
3573 PARM_BOUNDARY. Assume the caller isn't lying. */
3574 set_mem_align (target, align);
3576 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3579 else if (partial > 0)
3581 /* Scalar partly in registers. */
3583 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3586 /* # words of start of argument
3587 that we must make space for but need not store. */
3588 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3589 int args_offset = INTVAL (args_so_far);
3592 /* Push padding now if padding above and stack grows down,
3593 or if padding below and stack grows up.
3594 But if space already allocated, this has already been done. */
3595 if (extra && args_addr == 0
3596 && where_pad != none && where_pad != stack_direction)
3597 anti_adjust_stack (GEN_INT (extra));
3599 /* If we make space by pushing it, we might as well push
3600 the real data. Otherwise, we can leave OFFSET nonzero
3601 and leave the space uninitialized. */
3605 /* Now NOT_STACK gets the number of words that we don't need to
3606 allocate on the stack. */
3607 not_stack = partial - offset;
3609 /* If the partial register-part of the arg counts in its stack size,
3610 skip the part of stack space corresponding to the registers.
3611 Otherwise, start copying to the beginning of the stack space,
3612 by setting SKIP to 0. */
3613 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3615 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3616 x = validize_mem (force_const_mem (mode, x));
3618 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3619 SUBREGs of such registers are not allowed. */
3620 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3621 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3622 x = copy_to_reg (x);
3624 /* Loop over all the words allocated on the stack for this arg. */
3625 /* We can do it by words, because any scalar bigger than a word
3626 has a size a multiple of a word. */
3627 #ifndef PUSH_ARGS_REVERSED
3628 for (i = not_stack; i < size; i++)
3630 for (i = size - 1; i >= not_stack; i--)
3632 if (i >= not_stack + offset)
3633 emit_push_insn (operand_subword_force (x, i, mode),
3634 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3636 GEN_INT (args_offset + ((i - not_stack + skip)
3638 reg_parm_stack_space, alignment_pad);
3645 /* Push padding now if padding above and stack grows down,
3646 or if padding below and stack grows up.
3647 But if space already allocated, this has already been done. */
3648 if (extra && args_addr == 0
3649 && where_pad != none && where_pad != stack_direction)
3650 anti_adjust_stack (GEN_INT (extra));
3652 #ifdef PUSH_ROUNDING
3653 if (args_addr == 0 && PUSH_ARGS)
3654 emit_single_push_insn (mode, x, type);
3658 if (GET_CODE (args_so_far) == CONST_INT)
3660 = memory_address (mode,
3661 plus_constant (args_addr,
3662 INTVAL (args_so_far)));
3664 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3666 dest = gen_rtx_MEM (mode, addr);
3669 set_mem_attributes (dest, type, 1);
3670 /* Function incoming arguments may overlap with sibling call
3671 outgoing arguments and we cannot allow reordering of reads
3672 from function arguments with stores to outgoing arguments
3673 of sibling calls. */
3674 set_mem_alias_set (dest, 0);
3677 emit_move_insn (dest, x);
3681 /* If part should go in registers, copy that part
3682 into the appropriate registers. Do this now, at the end,
3683 since mem-to-mem copies above may do function calls. */
3684 if (partial > 0 && reg != 0)
3686 /* Handle calls that pass values in multiple non-contiguous locations.
3687 The Irix 6 ABI has examples of this. */
3688 if (GET_CODE (reg) == PARALLEL)
3689 emit_group_load (reg, x, type, -1);
3691 move_block_to_reg (REGNO (reg), x, partial, mode);
3694 if (extra && args_addr == 0 && where_pad == stack_direction)
3695 anti_adjust_stack (GEN_INT (extra));
3697 if (alignment_pad && args_addr == 0)
3698 anti_adjust_stack (alignment_pad);
3701 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3705 get_subtarget (rtx x)
3708 /* Only registers can be subtargets. */
3709 || GET_CODE (x) != REG
3710 /* If the register is readonly, it can't be set more than once. */
3711 || RTX_UNCHANGING_P (x)
3712 /* Don't use hard regs to avoid extending their life. */
3713 || REGNO (x) < FIRST_PSEUDO_REGISTER
3714 /* Avoid subtargets inside loops,
3715 since they hide some invariant expressions. */
3716 || preserve_subexpressions_p ())
3720 /* Expand an assignment that stores the value of FROM into TO.
3721 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3722 (This may contain a QUEUED rtx;
3723 if the value is constant, this rtx is a constant.)
3724 Otherwise, the returned value is NULL_RTX. */
3727 expand_assignment (tree to, tree from, int want_value)
3732 /* Don't crash if the lhs of the assignment was erroneous. */
3734 if (TREE_CODE (to) == ERROR_MARK)
3736 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3737 return want_value ? result : NULL_RTX;
3740 /* Assignment of a structure component needs special treatment
3741 if the structure component's rtx is not simply a MEM.
3742 Assignment of an array element at a constant index, and assignment of
3743 an array element in an unaligned packed structure field, has the same
3746 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3747 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3748 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3750 enum machine_mode mode1;
3751 HOST_WIDE_INT bitsize, bitpos;
3759 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3760 &unsignedp, &volatilep);
3762 /* If we are going to use store_bit_field and extract_bit_field,
3763 make sure to_rtx will be safe for multiple use. */
3765 if (mode1 == VOIDmode && want_value)
3766 tem = stabilize_reference (tem);
3768 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3772 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3774 if (GET_CODE (to_rtx) != MEM)
3777 #ifdef POINTERS_EXTEND_UNSIGNED
3778 if (GET_MODE (offset_rtx) != Pmode)
3779 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3781 if (GET_MODE (offset_rtx) != ptr_mode)
3782 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3785 /* A constant address in TO_RTX can have VOIDmode, we must not try
3786 to call force_reg for that case. Avoid that case. */
3787 if (GET_CODE (to_rtx) == MEM
3788 && GET_MODE (to_rtx) == BLKmode
3789 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3791 && (bitpos % bitsize) == 0
3792 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3793 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3795 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3799 to_rtx = offset_address (to_rtx, offset_rtx,
3800 highest_pow2_factor_for_target (to,
3804 if (GET_CODE (to_rtx) == MEM)
3806 /* If the field is at offset zero, we could have been given the
3807 DECL_RTX of the parent struct. Don't munge it. */
3808 to_rtx = shallow_copy_rtx (to_rtx);
3810 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3813 /* Deal with volatile and readonly fields. The former is only done
3814 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3815 if (volatilep && GET_CODE (to_rtx) == MEM)
3817 if (to_rtx == orig_to_rtx)
3818 to_rtx = copy_rtx (to_rtx);
3819 MEM_VOLATILE_P (to_rtx) = 1;
3822 if (TREE_CODE (to) == COMPONENT_REF
3823 && TREE_READONLY (TREE_OPERAND (to, 1))
3824 /* We can't assert that a MEM won't be set more than once
3825 if the component is not addressable because another
3826 non-addressable component may be referenced by the same MEM. */
3827 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3829 if (to_rtx == orig_to_rtx)
3830 to_rtx = copy_rtx (to_rtx);
3831 RTX_UNCHANGING_P (to_rtx) = 1;
3834 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3836 if (to_rtx == orig_to_rtx)
3837 to_rtx = copy_rtx (to_rtx);
3838 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3841 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3843 /* Spurious cast for HPUX compiler. */
3844 ? ((enum machine_mode)
3845 TYPE_MODE (TREE_TYPE (to)))
3847 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3849 preserve_temp_slots (result);
3853 /* If the value is meaningful, convert RESULT to the proper mode.
3854 Otherwise, return nothing. */
3855 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3856 TYPE_MODE (TREE_TYPE (from)),
3858 TREE_UNSIGNED (TREE_TYPE (to)))
3862 /* If the rhs is a function call and its value is not an aggregate,
3863 call the function before we start to compute the lhs.
3864 This is needed for correct code for cases such as
3865 val = setjmp (buf) on machines where reference to val
3866 requires loading up part of an address in a separate insn.
3868 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3869 since it might be a promoted variable where the zero- or sign- extension
3870 needs to be done. Handling this in the normal way is safe because no
3871 computation is done before the call. */
3872 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3873 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3874 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3875 && GET_CODE (DECL_RTL (to)) == REG))
3880 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3882 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3884 /* Handle calls that return values in multiple non-contiguous locations.
3885 The Irix 6 ABI has examples of this. */
3886 if (GET_CODE (to_rtx) == PARALLEL)
3887 emit_group_load (to_rtx, value, TREE_TYPE (from),
3888 int_size_in_bytes (TREE_TYPE (from)));
3889 else if (GET_MODE (to_rtx) == BLKmode)
3890 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3893 if (POINTER_TYPE_P (TREE_TYPE (to)))
3894 value = convert_memory_address (GET_MODE (to_rtx), value);
3895 emit_move_insn (to_rtx, value);
3897 preserve_temp_slots (to_rtx);
3900 return want_value ? to_rtx : NULL_RTX;
3903 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3904 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3907 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3909 /* Don't move directly into a return register. */
3910 if (TREE_CODE (to) == RESULT_DECL
3911 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3916 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3918 if (GET_CODE (to_rtx) == PARALLEL)
3919 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3920 int_size_in_bytes (TREE_TYPE (from)));
3922 emit_move_insn (to_rtx, temp);
3924 preserve_temp_slots (to_rtx);
3927 return want_value ? to_rtx : NULL_RTX;
3930 /* In case we are returning the contents of an object which overlaps
3931 the place the value is being stored, use a safe function when copying
3932 a value through a pointer into a structure value return block. */
3933 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3934 && current_function_returns_struct
3935 && !current_function_returns_pcc_struct)
3940 size = expr_size (from);
3941 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3943 if (TARGET_MEM_FUNCTIONS)
3944 emit_library_call (memmove_libfunc, LCT_NORMAL,
3945 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3946 XEXP (from_rtx, 0), Pmode,
3947 convert_to_mode (TYPE_MODE (sizetype),
3948 size, TREE_UNSIGNED (sizetype)),
3949 TYPE_MODE (sizetype));
3951 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3952 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3953 XEXP (to_rtx, 0), Pmode,
3954 convert_to_mode (TYPE_MODE (integer_type_node),
3956 TREE_UNSIGNED (integer_type_node)),
3957 TYPE_MODE (integer_type_node));
3959 preserve_temp_slots (to_rtx);
3962 return want_value ? to_rtx : NULL_RTX;
3965 /* Compute FROM and store the value in the rtx we got. */
3968 result = store_expr (from, to_rtx, want_value);
3969 preserve_temp_slots (result);
3972 return want_value ? result : NULL_RTX;
3975 /* Generate code for computing expression EXP,
3976 and storing the value into TARGET.
3977 TARGET may contain a QUEUED rtx.
3979 If WANT_VALUE & 1 is nonzero, return a copy of the value
3980 not in TARGET, so that we can be sure to use the proper
3981 value in a containing expression even if TARGET has something
3982 else stored in it. If possible, we copy the value through a pseudo
3983 and return that pseudo. Or, if the value is constant, we try to
3984 return the constant. In some cases, we return a pseudo
3985 copied *from* TARGET.
3987 If the mode is BLKmode then we may return TARGET itself.
3988 It turns out that in BLKmode it doesn't cause a problem.
3989 because C has no operators that could combine two different
3990 assignments into the same BLKmode object with different values
3991 with no sequence point. Will other languages need this to
3994 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3995 to catch quickly any cases where the caller uses the value
3996 and fails to set WANT_VALUE.
3998 If WANT_VALUE & 2 is set, this is a store into a call param on the
3999 stack, and block moves may need to be treated specially. */
4002 store_expr (tree exp, rtx target, int want_value)
4005 rtx alt_rtl = NULL_RTX;
4006 int dont_return_target = 0;
4007 int dont_store_target = 0;
4009 if (VOID_TYPE_P (TREE_TYPE (exp)))
4011 /* C++ can generate ?: expressions with a throw expression in one
4012 branch and an rvalue in the other. Here, we resolve attempts to
4013 store the throw expression's nonexistent result. */
4016 expand_expr (exp, const0_rtx, VOIDmode, 0);
4019 if (TREE_CODE (exp) == COMPOUND_EXPR)
4021 /* Perform first part of compound expression, then assign from second
4023 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4024 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4026 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4028 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4030 /* For conditional expression, get safe form of the target. Then
4031 test the condition, doing the appropriate assignment on either
4032 side. This avoids the creation of unnecessary temporaries.
4033 For non-BLKmode, it is more efficient not to do this. */
4035 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4038 target = protect_from_queue (target, 1);
4040 do_pending_stack_adjust ();
4042 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4043 start_cleanup_deferral ();
4044 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4045 end_cleanup_deferral ();
4047 emit_jump_insn (gen_jump (lab2));
4050 start_cleanup_deferral ();
4051 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4052 end_cleanup_deferral ();
4057 return want_value & 1 ? target : NULL_RTX;
4059 else if (queued_subexp_p (target))
4060 /* If target contains a postincrement, let's not risk
4061 using it as the place to generate the rhs. */
4063 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4065 /* Expand EXP into a new pseudo. */
4066 temp = gen_reg_rtx (GET_MODE (target));
4067 temp = expand_expr (exp, temp, GET_MODE (target),
4069 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4072 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4074 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4076 /* If target is volatile, ANSI requires accessing the value
4077 *from* the target, if it is accessed. So make that happen.
4078 In no case return the target itself. */
4079 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4080 dont_return_target = 1;
4082 else if ((want_value & 1) != 0
4083 && GET_CODE (target) == MEM
4084 && ! MEM_VOLATILE_P (target)
4085 && GET_MODE (target) != BLKmode)
4086 /* If target is in memory and caller wants value in a register instead,
4087 arrange that. Pass TARGET as target for expand_expr so that,
4088 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4089 We know expand_expr will not use the target in that case.
4090 Don't do this if TARGET is volatile because we are supposed
4091 to write it and then read it. */
4093 temp = expand_expr (exp, target, GET_MODE (target),
4094 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4095 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4097 /* If TEMP is already in the desired TARGET, only copy it from
4098 memory and don't store it there again. */
4100 || (rtx_equal_p (temp, target)
4101 && ! side_effects_p (temp) && ! side_effects_p (target)))
4102 dont_store_target = 1;
4103 temp = copy_to_reg (temp);
4105 dont_return_target = 1;
4107 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4108 /* If this is a scalar in a register that is stored in a wider mode
4109 than the declared mode, compute the result into its declared mode
4110 and then convert to the wider mode. Our value is the computed
4113 rtx inner_target = 0;
4115 /* If we don't want a value, we can do the conversion inside EXP,
4116 which will often result in some optimizations. Do the conversion
4117 in two steps: first change the signedness, if needed, then
4118 the extend. But don't do this if the type of EXP is a subtype
4119 of something else since then the conversion might involve
4120 more than just converting modes. */
4121 if ((want_value & 1) == 0
4122 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4123 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4125 if (TREE_UNSIGNED (TREE_TYPE (exp))
4126 != SUBREG_PROMOTED_UNSIGNED_P (target))
4128 (lang_hooks.types.signed_or_unsigned_type
4129 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4131 exp = convert (lang_hooks.types.type_for_mode
4132 (GET_MODE (SUBREG_REG (target)),
4133 SUBREG_PROMOTED_UNSIGNED_P (target)),
4136 inner_target = SUBREG_REG (target);
4139 temp = expand_expr (exp, inner_target, VOIDmode,
4140 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4142 /* If TEMP is a MEM and we want a result value, make the access
4143 now so it gets done only once. Strictly speaking, this is
4144 only necessary if the MEM is volatile, or if the address
4145 overlaps TARGET. But not performing the load twice also
4146 reduces the amount of rtl we generate and then have to CSE. */
4147 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4148 temp = copy_to_reg (temp);
4150 /* If TEMP is a VOIDmode constant, use convert_modes to make
4151 sure that we properly convert it. */
4152 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4154 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4155 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4156 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4157 GET_MODE (target), temp,
4158 SUBREG_PROMOTED_UNSIGNED_P (target));
4161 convert_move (SUBREG_REG (target), temp,
4162 SUBREG_PROMOTED_UNSIGNED_P (target));
4164 /* If we promoted a constant, change the mode back down to match
4165 target. Otherwise, the caller might get confused by a result whose
4166 mode is larger than expected. */
4168 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4170 if (GET_MODE (temp) != VOIDmode)
4172 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4173 SUBREG_PROMOTED_VAR_P (temp) = 1;
4174 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4175 SUBREG_PROMOTED_UNSIGNED_P (target));
4178 temp = convert_modes (GET_MODE (target),
4179 GET_MODE (SUBREG_REG (target)),
4180 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 return want_value & 1 ? temp : NULL_RTX;
4187 temp = expand_expr_real (exp, target, GET_MODE (target),
4189 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4191 /* Return TARGET if it's a specified hardware register.
4192 If TARGET is a volatile mem ref, either return TARGET
4193 or return a reg copied *from* TARGET; ANSI requires this.
4195 Otherwise, if TEMP is not TARGET, return TEMP
4196 if it is constant (for efficiency),
4197 or if we really want the correct value. */
4198 if (!(target && GET_CODE (target) == REG
4199 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4200 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4201 && ! rtx_equal_p (temp, target)
4202 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4203 dont_return_target = 1;
4206 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4207 the same as that of TARGET, adjust the constant. This is needed, for
4208 example, in case it is a CONST_DOUBLE and we want only a word-sized
4210 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4211 && TREE_CODE (exp) != ERROR_MARK
4212 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4213 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4214 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4216 /* If value was not generated in the target, store it there.
4217 Convert the value to TARGET's type first if necessary.
4218 If TEMP and TARGET compare equal according to rtx_equal_p, but
4219 one or both of them are volatile memory refs, we have to distinguish
4221 - expand_expr has used TARGET. In this case, we must not generate
4222 another copy. This can be detected by TARGET being equal according
4224 - expand_expr has not used TARGET - that means that the source just
4225 happens to have the same RTX form. Since temp will have been created
4226 by expand_expr, it will compare unequal according to == .
4227 We must generate a copy in this case, to reach the correct number
4228 of volatile memory references. */
4230 if ((! rtx_equal_p (temp, target)
4231 || (temp != target && (side_effects_p (temp)
4232 || side_effects_p (target))))
4233 && TREE_CODE (exp) != ERROR_MARK
4234 && ! dont_store_target
4235 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4236 but TARGET is not valid memory reference, TEMP will differ
4237 from TARGET although it is really the same location. */
4238 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4239 /* If there's nothing to copy, don't bother. Don't call expr_size
4240 unless necessary, because some front-ends (C++) expr_size-hook
4241 aborts on objects that are not supposed to be bit-copied or
4243 && expr_size (exp) != const0_rtx)
4246 target = protect_from_queue (target, 1);
4247 temp = protect_from_queue (temp, 0);
4248 if (GET_MODE (temp) != GET_MODE (target)
4249 && GET_MODE (temp) != VOIDmode)
4251 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4252 if (dont_return_target)
4254 /* In this case, we will return TEMP,
4255 so make sure it has the proper mode.
4256 But don't forget to store the value into TARGET. */
4257 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4258 emit_move_insn (target, temp);
4261 convert_move (target, temp, unsignedp);
4264 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4266 /* Handle copying a string constant into an array. The string
4267 constant may be shorter than the array. So copy just the string's
4268 actual length, and clear the rest. First get the size of the data
4269 type of the string, which is actually the size of the target. */
4270 rtx size = expr_size (exp);
4272 if (GET_CODE (size) == CONST_INT
4273 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4274 emit_block_move (target, temp, size,
4276 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4279 /* Compute the size of the data to copy from the string. */
4281 = size_binop (MIN_EXPR,
4282 make_tree (sizetype, size),
4283 size_int (TREE_STRING_LENGTH (exp)));
4285 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4287 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4290 /* Copy that much. */
4291 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4292 TREE_UNSIGNED (sizetype));
4293 emit_block_move (target, temp, copy_size_rtx,
4295 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4297 /* Figure out how much is left in TARGET that we have to clear.
4298 Do all calculations in ptr_mode. */
4299 if (GET_CODE (copy_size_rtx) == CONST_INT)
4301 size = plus_constant (size, -INTVAL (copy_size_rtx));
4302 target = adjust_address (target, BLKmode,
4303 INTVAL (copy_size_rtx));
4307 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4308 copy_size_rtx, NULL_RTX, 0,
4311 #ifdef POINTERS_EXTEND_UNSIGNED
4312 if (GET_MODE (copy_size_rtx) != Pmode)
4313 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4314 TREE_UNSIGNED (sizetype));
4317 target = offset_address (target, copy_size_rtx,
4318 highest_pow2_factor (copy_size));
4319 label = gen_label_rtx ();
4320 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4321 GET_MODE (size), 0, label);
4324 if (size != const0_rtx)
4325 clear_storage (target, size);
4331 /* Handle calls that return values in multiple non-contiguous locations.
4332 The Irix 6 ABI has examples of this. */
4333 else if (GET_CODE (target) == PARALLEL)
4334 emit_group_load (target, temp, TREE_TYPE (exp),
4335 int_size_in_bytes (TREE_TYPE (exp)));
4336 else if (GET_MODE (temp) == BLKmode)
4337 emit_block_move (target, temp, expr_size (exp),
4339 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4342 temp = force_operand (temp, target);
4344 emit_move_insn (target, temp);
4348 /* If we don't want a value, return NULL_RTX. */
4349 if ((want_value & 1) == 0)
4352 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4353 ??? The latter test doesn't seem to make sense. */
4354 else if (dont_return_target && GET_CODE (temp) != MEM)
4357 /* Return TARGET itself if it is a hard register. */
4358 else if ((want_value & 1) != 0
4359 && GET_MODE (target) != BLKmode
4360 && ! (GET_CODE (target) == REG
4361 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4362 return copy_to_reg (target);
4368 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4371 is_zeros_p (tree exp)
4375 switch (TREE_CODE (exp))
4379 case NON_LVALUE_EXPR:
4380 case VIEW_CONVERT_EXPR:
4381 return is_zeros_p (TREE_OPERAND (exp, 0));
4384 return integer_zerop (exp);
4388 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4391 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4394 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4395 elt = TREE_CHAIN (elt))
4396 if (!is_zeros_p (TREE_VALUE (elt)))
4402 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4403 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4404 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4405 if (! is_zeros_p (TREE_VALUE (elt)))
4415 /* Return 1 if EXP contains mostly (3/4) zeros. */
4418 mostly_zeros_p (tree exp)
4420 if (TREE_CODE (exp) == CONSTRUCTOR)
4422 int elts = 0, zeros = 0;
4423 tree elt = CONSTRUCTOR_ELTS (exp);
4424 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4426 /* If there are no ranges of true bits, it is all zero. */
4427 return elt == NULL_TREE;
4429 for (; elt; elt = TREE_CHAIN (elt))
4431 /* We do not handle the case where the index is a RANGE_EXPR,
4432 so the statistic will be somewhat inaccurate.
4433 We do make a more accurate count in store_constructor itself,
4434 so since this function is only used for nested array elements,
4435 this should be close enough. */
4436 if (mostly_zeros_p (TREE_VALUE (elt)))
4441 return 4 * zeros >= 3 * elts;
4444 return is_zeros_p (exp);
4447 /* Helper function for store_constructor.
4448 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4449 TYPE is the type of the CONSTRUCTOR, not the element type.
4450 CLEARED is as for store_constructor.
4451 ALIAS_SET is the alias set to use for any stores.
4453 This provides a recursive shortcut back to store_constructor when it isn't
4454 necessary to go through store_field. This is so that we can pass through
4455 the cleared field to let store_constructor know that we may not have to
4456 clear a substructure if the outer structure has already been cleared. */
4459 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4460 HOST_WIDE_INT bitpos, enum machine_mode mode,
4461 tree exp, tree type, int cleared, int alias_set)
4463 if (TREE_CODE (exp) == CONSTRUCTOR
4464 && bitpos % BITS_PER_UNIT == 0
4465 /* If we have a nonzero bitpos for a register target, then we just
4466 let store_field do the bitfield handling. This is unlikely to
4467 generate unnecessary clear instructions anyways. */
4468 && (bitpos == 0 || GET_CODE (target) == MEM))
4470 if (GET_CODE (target) == MEM)
4472 = adjust_address (target,
4473 GET_MODE (target) == BLKmode
4475 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4476 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4479 /* Update the alias set, if required. */
4480 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4481 && MEM_ALIAS_SET (target) != 0)
4483 target = copy_rtx (target);
4484 set_mem_alias_set (target, alias_set);
4487 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4490 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4494 /* Store the value of constructor EXP into the rtx TARGET.
4495 TARGET is either a REG or a MEM; we know it cannot conflict, since
4496 safe_from_p has been called.
4497 CLEARED is true if TARGET is known to have been zero'd.
4498 SIZE is the number of bytes of TARGET we are allowed to modify: this
4499 may not be the same as the size of EXP if we are assigning to a field
4500 which has been packed to exclude padding bits. */
4503 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4505 tree type = TREE_TYPE (exp);
4506 #ifdef WORD_REGISTER_OPERATIONS
4507 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4510 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4511 || TREE_CODE (type) == QUAL_UNION_TYPE)
4515 /* If size is zero or the target is already cleared, do nothing. */
4516 if (size == 0 || cleared)
4518 /* We either clear the aggregate or indicate the value is dead. */
4519 else if ((TREE_CODE (type) == UNION_TYPE
4520 || TREE_CODE (type) == QUAL_UNION_TYPE)
4521 && ! CONSTRUCTOR_ELTS (exp))
4522 /* If the constructor is empty, clear the union. */
4524 clear_storage (target, expr_size (exp));
4528 /* If we are building a static constructor into a register,
4529 set the initial value as zero so we can fold the value into
4530 a constant. But if more than one register is involved,
4531 this probably loses. */
4532 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4533 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4535 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4539 /* If the constructor has fewer fields than the structure
4540 or if we are initializing the structure to mostly zeros,
4541 clear the whole structure first. Don't do this if TARGET is a
4542 register whose mode size isn't equal to SIZE since clear_storage
4543 can't handle this case. */
4544 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4545 || mostly_zeros_p (exp))
4546 && (GET_CODE (target) != REG
4547 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4550 rtx xtarget = target;
4552 if (readonly_fields_p (type))
4554 xtarget = copy_rtx (xtarget);
4555 RTX_UNCHANGING_P (xtarget) = 1;
4558 clear_storage (xtarget, GEN_INT (size));
4563 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4565 /* Store each element of the constructor into
4566 the corresponding field of TARGET. */
4568 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4570 tree field = TREE_PURPOSE (elt);
4571 tree value = TREE_VALUE (elt);
4572 enum machine_mode mode;
4573 HOST_WIDE_INT bitsize;
4574 HOST_WIDE_INT bitpos = 0;
4576 rtx to_rtx = target;
4578 /* Just ignore missing fields.
4579 We cleared the whole structure, above,
4580 if any fields are missing. */
4584 if (cleared && is_zeros_p (value))
4587 if (host_integerp (DECL_SIZE (field), 1))
4588 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4592 mode = DECL_MODE (field);
4593 if (DECL_BIT_FIELD (field))
4596 offset = DECL_FIELD_OFFSET (field);
4597 if (host_integerp (offset, 0)
4598 && host_integerp (bit_position (field), 0))
4600 bitpos = int_bit_position (field);
4604 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4611 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4612 make_tree (TREE_TYPE (exp),
4615 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4616 if (GET_CODE (to_rtx) != MEM)
4619 #ifdef POINTERS_EXTEND_UNSIGNED
4620 if (GET_MODE (offset_rtx) != Pmode)
4621 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4623 if (GET_MODE (offset_rtx) != ptr_mode)
4624 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4627 to_rtx = offset_address (to_rtx, offset_rtx,
4628 highest_pow2_factor (offset));
4631 if (TREE_READONLY (field))
4633 if (GET_CODE (to_rtx) == MEM)
4634 to_rtx = copy_rtx (to_rtx);
4636 RTX_UNCHANGING_P (to_rtx) = 1;
4639 #ifdef WORD_REGISTER_OPERATIONS
4640 /* If this initializes a field that is smaller than a word, at the
4641 start of a word, try to widen it to a full word.
4642 This special case allows us to output C++ member function
4643 initializations in a form that the optimizers can understand. */
4644 if (GET_CODE (target) == REG
4645 && bitsize < BITS_PER_WORD
4646 && bitpos % BITS_PER_WORD == 0
4647 && GET_MODE_CLASS (mode) == MODE_INT
4648 && TREE_CODE (value) == INTEGER_CST
4650 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4652 tree type = TREE_TYPE (value);
4654 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4656 type = lang_hooks.types.type_for_size
4657 (BITS_PER_WORD, TREE_UNSIGNED (type));
4658 value = convert (type, value);
4661 if (BYTES_BIG_ENDIAN)
4663 = fold (build (LSHIFT_EXPR, type, value,
4664 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4665 bitsize = BITS_PER_WORD;
4670 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4671 && DECL_NONADDRESSABLE_P (field))
4673 to_rtx = copy_rtx (to_rtx);
4674 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4677 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4678 value, type, cleared,
4679 get_alias_set (TREE_TYPE (field)));
4682 else if (TREE_CODE (type) == ARRAY_TYPE
4683 || TREE_CODE (type) == VECTOR_TYPE)
4688 tree domain = TYPE_DOMAIN (type);
4689 tree elttype = TREE_TYPE (type);
4691 HOST_WIDE_INT minelt = 0;
4692 HOST_WIDE_INT maxelt = 0;
4696 unsigned n_elts = 0;
4698 /* Vectors are like arrays, but the domain is stored via an array
4700 if (TREE_CODE (type) == VECTOR_TYPE)
4702 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4703 the same field as TYPE_DOMAIN, we are not guaranteed that
4705 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4706 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4707 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4709 enum machine_mode mode = GET_MODE (target);
4711 icode = (int) vec_init_optab->handlers[mode].insn_code;
4712 if (icode != CODE_FOR_nothing)
4716 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4717 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4718 vector = alloca (n_elts);
4719 for (i = 0; i < n_elts; i++)
4720 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4725 const_bounds_p = (TYPE_MIN_VALUE (domain)
4726 && TYPE_MAX_VALUE (domain)
4727 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4728 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4730 /* If we have constant bounds for the range of the type, get them. */
4733 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4734 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4737 /* If the constructor has fewer elements than the array,
4738 clear the whole array first. Similarly if this is
4739 static constructor of a non-BLKmode object. */
4740 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4744 HOST_WIDE_INT count = 0, zero_count = 0;
4745 need_to_clear = ! const_bounds_p;
4747 /* This loop is a more accurate version of the loop in
4748 mostly_zeros_p (it handles RANGE_EXPR in an index).
4749 It is also needed to check for missing elements. */
4750 for (elt = CONSTRUCTOR_ELTS (exp);
4751 elt != NULL_TREE && ! need_to_clear;
4752 elt = TREE_CHAIN (elt))
4754 tree index = TREE_PURPOSE (elt);
4755 HOST_WIDE_INT this_node_count;
4757 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4759 tree lo_index = TREE_OPERAND (index, 0);
4760 tree hi_index = TREE_OPERAND (index, 1);
4762 if (! host_integerp (lo_index, 1)
4763 || ! host_integerp (hi_index, 1))
4769 this_node_count = (tree_low_cst (hi_index, 1)
4770 - tree_low_cst (lo_index, 1) + 1);
4773 this_node_count = 1;
4775 count += this_node_count;
4776 if (mostly_zeros_p (TREE_VALUE (elt)))
4777 zero_count += this_node_count;
4780 /* Clear the entire array first if there are any missing elements,
4781 or if the incidence of zero elements is >= 75%. */
4783 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4787 if (need_to_clear && size > 0 && !vector)
4792 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4794 clear_storage (target, GEN_INT (size));
4798 else if (REG_P (target))
4799 /* Inform later passes that the old value is dead. */
4800 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4802 /* Store each element of the constructor into
4803 the corresponding element of TARGET, determined
4804 by counting the elements. */
4805 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4807 elt = TREE_CHAIN (elt), i++)
4809 enum machine_mode mode;
4810 HOST_WIDE_INT bitsize;
4811 HOST_WIDE_INT bitpos;
4813 tree value = TREE_VALUE (elt);
4814 tree index = TREE_PURPOSE (elt);
4815 rtx xtarget = target;
4817 if (cleared && is_zeros_p (value))
4820 unsignedp = TREE_UNSIGNED (elttype);
4821 mode = TYPE_MODE (elttype);
4822 if (mode == BLKmode)
4823 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4824 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4827 bitsize = GET_MODE_BITSIZE (mode);
4829 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4831 tree lo_index = TREE_OPERAND (index, 0);
4832 tree hi_index = TREE_OPERAND (index, 1);
4833 rtx index_r, pos_rtx, loop_end;
4834 struct nesting *loop;
4835 HOST_WIDE_INT lo, hi, count;
4841 /* If the range is constant and "small", unroll the loop. */
4843 && host_integerp (lo_index, 0)
4844 && host_integerp (hi_index, 0)
4845 && (lo = tree_low_cst (lo_index, 0),
4846 hi = tree_low_cst (hi_index, 0),
4847 count = hi - lo + 1,
4848 (GET_CODE (target) != MEM
4850 || (host_integerp (TYPE_SIZE (elttype), 1)
4851 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4854 lo -= minelt; hi -= minelt;
4855 for (; lo <= hi; lo++)
4857 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4859 if (GET_CODE (target) == MEM
4860 && !MEM_KEEP_ALIAS_SET_P (target)
4861 && TREE_CODE (type) == ARRAY_TYPE
4862 && TYPE_NONALIASED_COMPONENT (type))
4864 target = copy_rtx (target);
4865 MEM_KEEP_ALIAS_SET_P (target) = 1;
4868 store_constructor_field
4869 (target, bitsize, bitpos, mode, value, type, cleared,
4870 get_alias_set (elttype));
4875 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4876 loop_end = gen_label_rtx ();
4878 unsignedp = TREE_UNSIGNED (domain);
4880 index = build_decl (VAR_DECL, NULL_TREE, domain);
4883 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4885 SET_DECL_RTL (index, index_r);
4886 if (TREE_CODE (value) == SAVE_EXPR
4887 && SAVE_EXPR_RTL (value) == 0)
4889 /* Make sure value gets expanded once before the
4891 expand_expr (value, const0_rtx, VOIDmode, 0);
4894 store_expr (lo_index, index_r, 0);
4895 loop = expand_start_loop (0);
4897 /* Assign value to element index. */
4899 = convert (ssizetype,
4900 fold (build (MINUS_EXPR, TREE_TYPE (index),
4901 index, TYPE_MIN_VALUE (domain))));
4902 position = size_binop (MULT_EXPR, position,
4904 TYPE_SIZE_UNIT (elttype)));
4906 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4907 xtarget = offset_address (target, pos_rtx,
4908 highest_pow2_factor (position));
4909 xtarget = adjust_address (xtarget, mode, 0);
4910 if (TREE_CODE (value) == CONSTRUCTOR)
4911 store_constructor (value, xtarget, cleared,
4912 bitsize / BITS_PER_UNIT);
4914 store_expr (value, xtarget, 0);
4916 expand_exit_loop_if_false (loop,
4917 build (LT_EXPR, integer_type_node,
4920 expand_increment (build (PREINCREMENT_EXPR,
4922 index, integer_one_node), 0, 0);
4924 emit_label (loop_end);
4927 else if ((index != 0 && ! host_integerp (index, 0))
4928 || ! host_integerp (TYPE_SIZE (elttype), 1))
4936 index = ssize_int (1);
4939 index = convert (ssizetype,
4940 fold (build (MINUS_EXPR, index,
4941 TYPE_MIN_VALUE (domain))));
4943 position = size_binop (MULT_EXPR, index,
4945 TYPE_SIZE_UNIT (elttype)));
4946 xtarget = offset_address (target,
4947 expand_expr (position, 0, VOIDmode, 0),
4948 highest_pow2_factor (position));
4949 xtarget = adjust_address (xtarget, mode, 0);
4950 store_expr (value, xtarget, 0);
4957 pos = tree_low_cst (index, 0) - minelt;
4960 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4965 bitpos = ((tree_low_cst (index, 0) - minelt)
4966 * tree_low_cst (TYPE_SIZE (elttype), 1));
4968 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4970 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4971 && TREE_CODE (type) == ARRAY_TYPE
4972 && TYPE_NONALIASED_COMPONENT (type))
4974 target = copy_rtx (target);
4975 MEM_KEEP_ALIAS_SET_P (target) = 1;
4977 store_constructor_field (target, bitsize, bitpos, mode, value,
4978 type, cleared, get_alias_set (elttype));
4983 emit_insn (GEN_FCN (icode) (target,
4984 gen_rtx_PARALLEL (GET_MODE (target),
4985 gen_rtvec_v (n_elts, vector))));
4989 /* Set constructor assignments. */
4990 else if (TREE_CODE (type) == SET_TYPE)
4992 tree elt = CONSTRUCTOR_ELTS (exp);
4993 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4994 tree domain = TYPE_DOMAIN (type);
4995 tree domain_min, domain_max, bitlength;
4997 /* The default implementation strategy is to extract the constant
4998 parts of the constructor, use that to initialize the target,
4999 and then "or" in whatever non-constant ranges we need in addition.
5001 If a large set is all zero or all ones, it is
5002 probably better to set it using memset (if available) or bzero.
5003 Also, if a large set has just a single range, it may also be
5004 better to first clear all the first clear the set (using
5005 bzero/memset), and set the bits we want. */
5007 /* Check for all zeros. */
5008 if (elt == NULL_TREE && size > 0)
5011 clear_storage (target, GEN_INT (size));
5015 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5016 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5017 bitlength = size_binop (PLUS_EXPR,
5018 size_diffop (domain_max, domain_min),
5021 nbits = tree_low_cst (bitlength, 1);
5023 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5024 are "complicated" (more than one range), initialize (the
5025 constant parts) by copying from a constant. */
5026 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5027 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5029 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5030 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5031 char *bit_buffer = alloca (nbits);
5032 HOST_WIDE_INT word = 0;
5033 unsigned int bit_pos = 0;
5034 unsigned int ibit = 0;
5035 unsigned int offset = 0; /* In bytes from beginning of set. */
5037 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5040 if (bit_buffer[ibit])
5042 if (BYTES_BIG_ENDIAN)
5043 word |= (1 << (set_word_size - 1 - bit_pos));
5045 word |= 1 << bit_pos;
5049 if (bit_pos >= set_word_size || ibit == nbits)
5051 if (word != 0 || ! cleared)
5053 rtx datum = GEN_INT (word);
5056 /* The assumption here is that it is safe to use
5057 XEXP if the set is multi-word, but not if
5058 it's single-word. */
5059 if (GET_CODE (target) == MEM)
5060 to_rtx = adjust_address (target, mode, offset);
5061 else if (offset == 0)
5065 emit_move_insn (to_rtx, datum);
5072 offset += set_word_size / BITS_PER_UNIT;
5077 /* Don't bother clearing storage if the set is all ones. */
5078 if (TREE_CHAIN (elt) != NULL_TREE
5079 || (TREE_PURPOSE (elt) == NULL_TREE
5081 : ( ! host_integerp (TREE_VALUE (elt), 0)
5082 || ! host_integerp (TREE_PURPOSE (elt), 0)
5083 || (tree_low_cst (TREE_VALUE (elt), 0)
5084 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5085 != (HOST_WIDE_INT) nbits))))
5086 clear_storage (target, expr_size (exp));
5088 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5090 /* Start of range of element or NULL. */
5091 tree startbit = TREE_PURPOSE (elt);
5092 /* End of range of element, or element value. */
5093 tree endbit = TREE_VALUE (elt);
5094 HOST_WIDE_INT startb, endb;
5095 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5097 bitlength_rtx = expand_expr (bitlength,
5098 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5100 /* Handle non-range tuple element like [ expr ]. */
5101 if (startbit == NULL_TREE)
5103 startbit = save_expr (endbit);
5107 startbit = convert (sizetype, startbit);
5108 endbit = convert (sizetype, endbit);
5109 if (! integer_zerop (domain_min))
5111 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5112 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5114 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5115 EXPAND_CONST_ADDRESS);
5116 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5117 EXPAND_CONST_ADDRESS);
5123 ((build_qualified_type (lang_hooks.types.type_for_mode
5124 (GET_MODE (target), 0),
5127 emit_move_insn (targetx, target);
5130 else if (GET_CODE (target) == MEM)
5135 /* Optimization: If startbit and endbit are constants divisible
5136 by BITS_PER_UNIT, call memset instead. */
5137 if (TARGET_MEM_FUNCTIONS
5138 && TREE_CODE (startbit) == INTEGER_CST
5139 && TREE_CODE (endbit) == INTEGER_CST
5140 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5141 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5143 emit_library_call (memset_libfunc, LCT_NORMAL,
5145 plus_constant (XEXP (targetx, 0),
5146 startb / BITS_PER_UNIT),
5148 constm1_rtx, TYPE_MODE (integer_type_node),
5149 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5150 TYPE_MODE (sizetype));
5153 emit_library_call (setbits_libfunc, LCT_NORMAL,
5154 VOIDmode, 4, XEXP (targetx, 0),
5155 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5156 startbit_rtx, TYPE_MODE (sizetype),
5157 endbit_rtx, TYPE_MODE (sizetype));
5160 emit_move_insn (target, targetx);
5168 /* Store the value of EXP (an expression tree)
5169 into a subfield of TARGET which has mode MODE and occupies
5170 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5171 If MODE is VOIDmode, it means that we are storing into a bit-field.
5173 If VALUE_MODE is VOIDmode, return nothing in particular.
5174 UNSIGNEDP is not used in this case.
5176 Otherwise, return an rtx for the value stored. This rtx
5177 has mode VALUE_MODE if that is convenient to do.
5178 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5180 TYPE is the type of the underlying object,
5182 ALIAS_SET is the alias set for the destination. This value will
5183 (in general) be different from that for TARGET, since TARGET is a
5184 reference to the containing structure. */
5187 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5188 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5189 int unsignedp, tree type, int alias_set)
5191 HOST_WIDE_INT width_mask = 0;
5193 if (TREE_CODE (exp) == ERROR_MARK)
5196 /* If we have nothing to store, do nothing unless the expression has
5199 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5200 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5201 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5203 /* If we are storing into an unaligned field of an aligned union that is
5204 in a register, we may have the mode of TARGET being an integer mode but
5205 MODE == BLKmode. In that case, get an aligned object whose size and
5206 alignment are the same as TARGET and store TARGET into it (we can avoid
5207 the store if the field being stored is the entire width of TARGET). Then
5208 call ourselves recursively to store the field into a BLKmode version of
5209 that object. Finally, load from the object into TARGET. This is not
5210 very efficient in general, but should only be slightly more expensive
5211 than the otherwise-required unaligned accesses. Perhaps this can be
5212 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5213 twice, once with emit_move_insn and once via store_field. */
5216 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5218 rtx object = assign_temp (type, 0, 1, 1);
5219 rtx blk_object = adjust_address (object, BLKmode, 0);
5221 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5222 emit_move_insn (object, target);
5224 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5227 emit_move_insn (target, object);
5229 /* We want to return the BLKmode version of the data. */
5233 if (GET_CODE (target) == CONCAT)
5235 /* We're storing into a struct containing a single __complex. */
5239 return store_expr (exp, target, 0);
5242 /* If the structure is in a register or if the component
5243 is a bit field, we cannot use addressing to access it.
5244 Use bit-field techniques or SUBREG to store in it. */
5246 if (mode == VOIDmode
5247 || (mode != BLKmode && ! direct_store[(int) mode]
5248 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5249 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5250 || GET_CODE (target) == REG
5251 || GET_CODE (target) == SUBREG
5252 /* If the field isn't aligned enough to store as an ordinary memref,
5253 store it as a bit field. */
5255 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5256 || bitpos % GET_MODE_ALIGNMENT (mode))
5257 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5258 || (bitpos % BITS_PER_UNIT != 0)))
5259 /* If the RHS and field are a constant size and the size of the
5260 RHS isn't the same size as the bitfield, we must use bitfield
5263 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5264 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5266 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5268 /* If BITSIZE is narrower than the size of the type of EXP
5269 we will be narrowing TEMP. Normally, what's wanted are the
5270 low-order bits. However, if EXP's type is a record and this is
5271 big-endian machine, we want the upper BITSIZE bits. */
5272 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5273 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5274 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5275 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5276 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5280 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5282 if (mode != VOIDmode && mode != BLKmode
5283 && mode != TYPE_MODE (TREE_TYPE (exp)))
5284 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5286 /* If the modes of TARGET and TEMP are both BLKmode, both
5287 must be in memory and BITPOS must be aligned on a byte
5288 boundary. If so, we simply do a block copy. */
5289 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5291 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5292 || bitpos % BITS_PER_UNIT != 0)
5295 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5296 emit_block_move (target, temp,
5297 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5301 return value_mode == VOIDmode ? const0_rtx : target;
5304 /* Store the value in the bitfield. */
5305 store_bit_field (target, bitsize, bitpos, mode, temp,
5306 int_size_in_bytes (type));
5308 if (value_mode != VOIDmode)
5310 /* The caller wants an rtx for the value.
5311 If possible, avoid refetching from the bitfield itself. */
5313 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5316 enum machine_mode tmode;
5318 tmode = GET_MODE (temp);
5319 if (tmode == VOIDmode)
5323 return expand_and (tmode, temp,
5324 gen_int_mode (width_mask, tmode),
5327 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5328 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5329 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5332 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5333 NULL_RTX, value_mode, VOIDmode,
5334 int_size_in_bytes (type));
5340 rtx addr = XEXP (target, 0);
5341 rtx to_rtx = target;
5343 /* If a value is wanted, it must be the lhs;
5344 so make the address stable for multiple use. */
5346 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5347 && ! CONSTANT_ADDRESS_P (addr)
5348 /* A frame-pointer reference is already stable. */
5349 && ! (GET_CODE (addr) == PLUS
5350 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5351 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5352 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5353 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5355 /* Now build a reference to just the desired component. */
5357 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5359 if (to_rtx == target)
5360 to_rtx = copy_rtx (to_rtx);
5362 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5363 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5364 set_mem_alias_set (to_rtx, alias_set);
5366 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5370 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5371 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5372 codes and find the ultimate containing object, which we return.
5374 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5375 bit position, and *PUNSIGNEDP to the signedness of the field.
5376 If the position of the field is variable, we store a tree
5377 giving the variable offset (in units) in *POFFSET.
5378 This offset is in addition to the bit position.
5379 If the position is not variable, we store 0 in *POFFSET.
5381 If any of the extraction expressions is volatile,
5382 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5384 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5385 is a mode that can be used to access the field. In that case, *PBITSIZE
5388 If the field describes a variable-sized object, *PMODE is set to
5389 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5390 this case, but the address of the object can be found. */
5393 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5394 HOST_WIDE_INT *pbitpos, tree *poffset,
5395 enum machine_mode *pmode, int *punsignedp,
5399 enum machine_mode mode = VOIDmode;
5400 tree offset = size_zero_node;
5401 tree bit_offset = bitsize_zero_node;
5404 /* First get the mode, signedness, and size. We do this from just the
5405 outermost expression. */
5406 if (TREE_CODE (exp) == COMPONENT_REF)
5408 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5409 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5410 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5412 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5414 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5416 size_tree = TREE_OPERAND (exp, 1);
5417 *punsignedp = TREE_UNSIGNED (exp);
5421 mode = TYPE_MODE (TREE_TYPE (exp));
5422 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5424 if (mode == BLKmode)
5425 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5427 *pbitsize = GET_MODE_BITSIZE (mode);
5432 if (! host_integerp (size_tree, 1))
5433 mode = BLKmode, *pbitsize = -1;
5435 *pbitsize = tree_low_cst (size_tree, 1);
5438 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5439 and find the ultimate containing object. */
5442 if (TREE_CODE (exp) == BIT_FIELD_REF)
5443 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5444 else if (TREE_CODE (exp) == COMPONENT_REF)
5446 tree field = TREE_OPERAND (exp, 1);
5447 tree this_offset = DECL_FIELD_OFFSET (field);
5449 /* If this field hasn't been filled in yet, don't go
5450 past it. This should only happen when folding expressions
5451 made during type construction. */
5452 if (this_offset == 0)
5455 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
5457 offset = size_binop (PLUS_EXPR, offset, this_offset);
5458 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5459 DECL_FIELD_BIT_OFFSET (field));
5461 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5464 else if (TREE_CODE (exp) == ARRAY_REF
5465 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5467 tree index = TREE_OPERAND (exp, 1);
5468 tree array = TREE_OPERAND (exp, 0);
5469 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5470 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5471 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5473 /* We assume all arrays have sizes that are a multiple of a byte.
5474 First subtract the lower bound, if any, in the type of the
5475 index, then convert to sizetype and multiply by the size of the
5477 if (low_bound != 0 && ! integer_zerop (low_bound))
5478 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5481 /* If the index has a self-referential type, instantiate it with
5482 the object; likewise fkor the component size. */
5483 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5484 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
5485 offset = size_binop (PLUS_EXPR, offset,
5486 size_binop (MULT_EXPR,
5487 convert (sizetype, index),
5491 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5492 conversions that don't change the mode, and all view conversions
5493 except those that need to "step up" the alignment. */
5494 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5495 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5496 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5497 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5499 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5500 < BIGGEST_ALIGNMENT)
5501 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5502 || TYPE_ALIGN_OK (TREE_TYPE
5503 (TREE_OPERAND (exp, 0))))))
5504 && ! ((TREE_CODE (exp) == NOP_EXPR
5505 || TREE_CODE (exp) == CONVERT_EXPR)
5506 && (TYPE_MODE (TREE_TYPE (exp))
5507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5510 /* If any reference in the chain is volatile, the effect is volatile. */
5511 if (TREE_THIS_VOLATILE (exp))
5514 exp = TREE_OPERAND (exp, 0);
5517 /* If OFFSET is constant, see if we can return the whole thing as a
5518 constant bit position. Otherwise, split it up. */
5519 if (host_integerp (offset, 0)
5520 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5522 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5523 && host_integerp (tem, 0))
5524 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5526 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5532 /* Return 1 if T is an expression that get_inner_reference handles. */
5535 handled_component_p (tree t)
5537 switch (TREE_CODE (t))
5542 case ARRAY_RANGE_REF:
5543 case NON_LVALUE_EXPR:
5544 case VIEW_CONVERT_EXPR:
5547 /* ??? Sure they are handled, but get_inner_reference may return
5548 a different PBITSIZE, depending upon whether the expression is
5549 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5552 return (TYPE_MODE (TREE_TYPE (t))
5553 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5560 /* Given an rtx VALUE that may contain additions and multiplications, return
5561 an equivalent value that just refers to a register, memory, or constant.
5562 This is done by generating instructions to perform the arithmetic and
5563 returning a pseudo-register containing the value.
5565 The returned value may be a REG, SUBREG, MEM or constant. */
5568 force_operand (rtx value, rtx target)
5571 /* Use subtarget as the target for operand 0 of a binary operation. */
5572 rtx subtarget = get_subtarget (target);
5573 enum rtx_code code = GET_CODE (value);
5575 /* Check for subreg applied to an expression produced by loop optimizer. */
5577 && GET_CODE (SUBREG_REG (value)) != REG
5578 && GET_CODE (SUBREG_REG (value)) != MEM)
5580 value = simplify_gen_subreg (GET_MODE (value),
5581 force_reg (GET_MODE (SUBREG_REG (value)),
5582 force_operand (SUBREG_REG (value),
5584 GET_MODE (SUBREG_REG (value)),
5585 SUBREG_BYTE (value));
5586 code = GET_CODE (value);
5589 /* Check for a PIC address load. */
5590 if ((code == PLUS || code == MINUS)
5591 && XEXP (value, 0) == pic_offset_table_rtx
5592 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5593 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5594 || GET_CODE (XEXP (value, 1)) == CONST))
5597 subtarget = gen_reg_rtx (GET_MODE (value));
5598 emit_move_insn (subtarget, value);
5602 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5605 target = gen_reg_rtx (GET_MODE (value));
5606 convert_move (target, force_operand (XEXP (value, 0), NULL),
5607 code == ZERO_EXTEND);
5611 if (ARITHMETIC_P (value))
5613 op2 = XEXP (value, 1);
5614 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5616 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5619 op2 = negate_rtx (GET_MODE (value), op2);
5622 /* Check for an addition with OP2 a constant integer and our first
5623 operand a PLUS of a virtual register and something else. In that
5624 case, we want to emit the sum of the virtual register and the
5625 constant first and then add the other value. This allows virtual
5626 register instantiation to simply modify the constant rather than
5627 creating another one around this addition. */
5628 if (code == PLUS && GET_CODE (op2) == CONST_INT
5629 && GET_CODE (XEXP (value, 0)) == PLUS
5630 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5631 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5632 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5634 rtx temp = expand_simple_binop (GET_MODE (value), code,
5635 XEXP (XEXP (value, 0), 0), op2,
5636 subtarget, 0, OPTAB_LIB_WIDEN);
5637 return expand_simple_binop (GET_MODE (value), code, temp,
5638 force_operand (XEXP (XEXP (value,
5640 target, 0, OPTAB_LIB_WIDEN);
5643 op1 = force_operand (XEXP (value, 0), subtarget);
5644 op2 = force_operand (op2, NULL_RTX);
5648 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5650 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5651 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5652 target, 1, OPTAB_LIB_WIDEN);
5654 return expand_divmod (0,
5655 FLOAT_MODE_P (GET_MODE (value))
5656 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5657 GET_MODE (value), op1, op2, target, 0);
5660 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5664 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5668 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5672 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5673 target, 0, OPTAB_LIB_WIDEN);
5676 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5677 target, 1, OPTAB_LIB_WIDEN);
5680 if (UNARY_P (value))
5682 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5683 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5686 #ifdef INSN_SCHEDULING
5687 /* On machines that have insn scheduling, we want all memory reference to be
5688 explicit, so we need to deal with such paradoxical SUBREGs. */
5689 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5690 && (GET_MODE_SIZE (GET_MODE (value))
5691 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5693 = simplify_gen_subreg (GET_MODE (value),
5694 force_reg (GET_MODE (SUBREG_REG (value)),
5695 force_operand (SUBREG_REG (value),
5697 GET_MODE (SUBREG_REG (value)),
5698 SUBREG_BYTE (value));
5704 /* Subroutine of expand_expr: return nonzero iff there is no way that
5705 EXP can reference X, which is being modified. TOP_P is nonzero if this
5706 call is going to be used to determine whether we need a temporary
5707 for EXP, as opposed to a recursive call to this function.
5709 It is always safe for this routine to return zero since it merely
5710 searches for optimization opportunities. */
5713 safe_from_p (rtx x, tree exp, int top_p)
5717 static tree save_expr_list;
5720 /* If EXP has varying size, we MUST use a target since we currently
5721 have no way of allocating temporaries of variable size
5722 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5723 So we assume here that something at a higher level has prevented a
5724 clash. This is somewhat bogus, but the best we can do. Only
5725 do this when X is BLKmode and when we are at the top level. */
5726 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5727 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5728 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5729 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5730 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5732 && GET_MODE (x) == BLKmode)
5733 /* If X is in the outgoing argument area, it is always safe. */
5734 || (GET_CODE (x) == MEM
5735 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5736 || (GET_CODE (XEXP (x, 0)) == PLUS
5737 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5740 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5741 find the underlying pseudo. */
5742 if (GET_CODE (x) == SUBREG)
5745 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5749 /* A SAVE_EXPR might appear many times in the expression passed to the
5750 top-level safe_from_p call, and if it has a complex subexpression,
5751 examining it multiple times could result in a combinatorial explosion.
5752 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5753 with optimization took about 28 minutes to compile -- even though it was
5754 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5755 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5756 we have processed. Note that the only test of top_p was above. */
5765 rtn = safe_from_p (x, exp, 0);
5767 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5768 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5773 /* Now look at our tree code and possibly recurse. */
5774 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5777 exp_rtl = DECL_RTL_IF_SET (exp);
5784 if (TREE_CODE (exp) == TREE_LIST)
5788 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5790 exp = TREE_CHAIN (exp);
5793 if (TREE_CODE (exp) != TREE_LIST)
5794 return safe_from_p (x, exp, 0);
5797 else if (TREE_CODE (exp) == ERROR_MARK)
5798 return 1; /* An already-visited SAVE_EXPR? */
5804 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5809 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5813 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5814 the expression. If it is set, we conflict iff we are that rtx or
5815 both are in memory. Otherwise, we check all operands of the
5816 expression recursively. */
5818 switch (TREE_CODE (exp))
5821 /* If the operand is static or we are static, we can't conflict.
5822 Likewise if we don't conflict with the operand at all. */
5823 if (staticp (TREE_OPERAND (exp, 0))
5824 || TREE_STATIC (exp)
5825 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5828 /* Otherwise, the only way this can conflict is if we are taking
5829 the address of a DECL a that address if part of X, which is
5831 exp = TREE_OPERAND (exp, 0);
5834 if (!DECL_RTL_SET_P (exp)
5835 || GET_CODE (DECL_RTL (exp)) != MEM)
5838 exp_rtl = XEXP (DECL_RTL (exp), 0);
5843 if (GET_CODE (x) == MEM
5844 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5845 get_alias_set (exp)))
5850 /* Assume that the call will clobber all hard registers and
5852 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5853 || GET_CODE (x) == MEM)
5858 /* If a sequence exists, we would have to scan every instruction
5859 in the sequence to see if it was safe. This is probably not
5861 if (RTL_EXPR_SEQUENCE (exp))
5864 exp_rtl = RTL_EXPR_RTL (exp);
5867 case WITH_CLEANUP_EXPR:
5868 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5871 case CLEANUP_POINT_EXPR:
5872 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5875 exp_rtl = SAVE_EXPR_RTL (exp);
5879 /* If we've already scanned this, don't do it again. Otherwise,
5880 show we've scanned it and record for clearing the flag if we're
5882 if (TREE_PRIVATE (exp))
5885 TREE_PRIVATE (exp) = 1;
5886 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5888 TREE_PRIVATE (exp) = 0;
5892 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5896 /* The only operand we look at is operand 1. The rest aren't
5897 part of the expression. */
5898 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5904 /* If we have an rtx, we do not need to scan our operands. */
5908 nops = first_rtl_op (TREE_CODE (exp));
5909 for (i = 0; i < nops; i++)
5910 if (TREE_OPERAND (exp, i) != 0
5911 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5914 /* If this is a language-specific tree code, it may require
5915 special handling. */
5916 if ((unsigned int) TREE_CODE (exp)
5917 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5918 && !lang_hooks.safe_from_p (x, exp))
5922 /* If we have an rtl, find any enclosed object. Then see if we conflict
5926 if (GET_CODE (exp_rtl) == SUBREG)
5928 exp_rtl = SUBREG_REG (exp_rtl);
5929 if (GET_CODE (exp_rtl) == REG
5930 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5934 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5935 are memory and they conflict. */
5936 return ! (rtx_equal_p (x, exp_rtl)
5937 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5938 && true_dependence (exp_rtl, VOIDmode, x,
5939 rtx_addr_varies_p)));
5942 /* If we reach here, it is safe. */
5946 /* Subroutine of expand_expr: return rtx if EXP is a
5947 variable or parameter; else return 0. */
5953 switch (TREE_CODE (exp))
5957 return DECL_RTL (exp);
5963 /* Return the highest power of two that EXP is known to be a multiple of.
5964 This is used in updating alignment of MEMs in array references. */
5966 static unsigned HOST_WIDE_INT
5967 highest_pow2_factor (tree exp)
5969 unsigned HOST_WIDE_INT c0, c1;
5971 switch (TREE_CODE (exp))
5974 /* We can find the lowest bit that's a one. If the low
5975 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5976 We need to handle this case since we can find it in a COND_EXPR,
5977 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5978 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5980 if (TREE_CONSTANT_OVERFLOW (exp))
5981 return BIGGEST_ALIGNMENT;
5984 /* Note: tree_low_cst is intentionally not used here,
5985 we don't care about the upper bits. */
5986 c0 = TREE_INT_CST_LOW (exp);
5988 return c0 ? c0 : BIGGEST_ALIGNMENT;
5992 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5993 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5994 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5995 return MIN (c0, c1);
5998 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5999 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6002 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6004 if (integer_pow2p (TREE_OPERAND (exp, 1))
6005 && host_integerp (TREE_OPERAND (exp, 1), 1))
6007 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6008 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6009 return MAX (1, c0 / c1);
6013 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6015 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6023 return MIN (c0, c1);
6032 /* Similar, except that the alignment requirements of TARGET are
6033 taken into account. Assume it is at least as aligned as its
6034 type, unless it is a COMPONENT_REF in which case the layout of
6035 the structure gives the alignment. */
6037 static unsigned HOST_WIDE_INT
6038 highest_pow2_factor_for_target (tree target, tree exp)
6040 unsigned HOST_WIDE_INT target_align, factor;
6042 factor = highest_pow2_factor (exp);
6043 if (TREE_CODE (target) == COMPONENT_REF)
6044 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6046 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6047 return MAX (factor, target_align);
6050 /* Subroutine of expand_expr. Expand the two operands of a binary
6051 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6052 The value may be stored in TARGET if TARGET is nonzero. The
6053 MODIFIER argument is as documented by expand_expr. */
6056 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6057 enum expand_modifier modifier)
6059 if (! safe_from_p (target, exp1, 1))
6061 if (operand_equal_p (exp0, exp1, 0))
6063 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6064 *op1 = copy_rtx (*op0);
6068 /* If we need to preserve evaluation order, copy exp0 into its own
6069 temporary variable so that it can't be clobbered by exp1. */
6070 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6071 exp0 = save_expr (exp0);
6072 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6073 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6078 /* expand_expr: generate code for computing expression EXP.
6079 An rtx for the computed value is returned. The value is never null.
6080 In the case of a void EXP, const0_rtx is returned.
6082 The value may be stored in TARGET if TARGET is nonzero.
6083 TARGET is just a suggestion; callers must assume that
6084 the rtx returned may not be the same as TARGET.
6086 If TARGET is CONST0_RTX, it means that the value will be ignored.
6088 If TMODE is not VOIDmode, it suggests generating the
6089 result in mode TMODE. But this is done only when convenient.
6090 Otherwise, TMODE is ignored and the value generated in its natural mode.
6091 TMODE is just a suggestion; callers must assume that
6092 the rtx returned may not have mode TMODE.
6094 Note that TARGET may have neither TMODE nor MODE. In that case, it
6095 probably will not be used.
6097 If MODIFIER is EXPAND_SUM then when EXP is an addition
6098 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6099 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6100 products as above, or REG or MEM, or constant.
6101 Ordinarily in such cases we would output mul or add instructions
6102 and then return a pseudo reg containing the sum.
6104 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6105 it also marks a label as absolutely required (it can't be dead).
6106 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6107 This is used for outputting expressions used in initializers.
6109 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6110 with a constant address even if that address is not normally legitimate.
6111 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6113 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6114 a call parameter. Such targets require special care as we haven't yet
6115 marked TARGET so that it's safe from being trashed by libcalls. We
6116 don't want to use TARGET for anything but the final result;
6117 Intermediate values must go elsewhere. Additionally, calls to
6118 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6120 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6121 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6122 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6123 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6127 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6128 enum expand_modifier modifier, rtx *alt_rtl)
6131 tree type = TREE_TYPE (exp);
6132 int unsignedp = TREE_UNSIGNED (type);
6133 enum machine_mode mode;
6134 enum tree_code code = TREE_CODE (exp);
6136 rtx subtarget, original_target;
6140 /* Handle ERROR_MARK before anybody tries to access its type. */
6141 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6143 op0 = CONST0_RTX (tmode);
6149 mode = TYPE_MODE (type);
6150 /* Use subtarget as the target for operand 0 of a binary operation. */
6151 subtarget = get_subtarget (target);
6152 original_target = target;
6153 ignore = (target == const0_rtx
6154 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6155 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6156 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6157 && TREE_CODE (type) == VOID_TYPE));
6159 /* If we are going to ignore this result, we need only do something
6160 if there is a side-effect somewhere in the expression. If there
6161 is, short-circuit the most common cases here. Note that we must
6162 not call expand_expr with anything but const0_rtx in case this
6163 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6167 if (! TREE_SIDE_EFFECTS (exp))
6170 /* Ensure we reference a volatile object even if value is ignored, but
6171 don't do this if all we are doing is taking its address. */
6172 if (TREE_THIS_VOLATILE (exp)
6173 && TREE_CODE (exp) != FUNCTION_DECL
6174 && mode != VOIDmode && mode != BLKmode
6175 && modifier != EXPAND_CONST_ADDRESS)
6177 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6178 if (GET_CODE (temp) == MEM)
6179 temp = copy_to_reg (temp);
6183 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6184 || code == INDIRECT_REF || code == BUFFER_REF)
6185 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6188 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6189 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6191 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6192 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6195 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6196 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6197 /* If the second operand has no side effects, just evaluate
6199 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6201 else if (code == BIT_FIELD_REF)
6203 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6204 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6205 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6212 /* If will do cse, generate all results into pseudo registers
6213 since 1) that allows cse to find more things
6214 and 2) otherwise cse could produce an insn the machine
6215 cannot support. An exception is a CONSTRUCTOR into a multi-word
6216 MEM: that's much more likely to be most efficient into the MEM.
6217 Another is a CALL_EXPR which must return in memory. */
6219 if (! cse_not_expected && mode != BLKmode && target
6220 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6221 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6222 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6229 tree function = decl_function_context (exp);
6230 /* Labels in containing functions, or labels used from initializers,
6232 if (modifier == EXPAND_INITIALIZER
6233 || (function != current_function_decl
6234 && function != inline_function_decl
6236 temp = force_label_rtx (exp);
6238 temp = label_rtx (exp);
6240 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6241 if (function != current_function_decl
6242 && function != inline_function_decl && function != 0)
6243 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6248 if (!DECL_RTL_SET_P (exp))
6250 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6251 return CONST0_RTX (mode);
6254 /* ... fall through ... */
6257 /* If a static var's type was incomplete when the decl was written,
6258 but the type is complete now, lay out the decl now. */
6259 if (DECL_SIZE (exp) == 0
6260 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6261 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6262 layout_decl (exp, 0);
6264 /* ... fall through ... */
6268 if (DECL_RTL (exp) == 0)
6271 /* Ensure variable marked as used even if it doesn't go through
6272 a parser. If it hasn't be used yet, write out an external
6274 if (! TREE_USED (exp))
6276 assemble_external (exp);
6277 TREE_USED (exp) = 1;
6280 /* Show we haven't gotten RTL for this yet. */
6283 /* Handle variables inherited from containing functions. */
6284 context = decl_function_context (exp);
6286 /* We treat inline_function_decl as an alias for the current function
6287 because that is the inline function whose vars, types, etc.
6288 are being merged into the current function.
6289 See expand_inline_function. */
6291 if (context != 0 && context != current_function_decl
6292 && context != inline_function_decl
6293 /* If var is static, we don't need a static chain to access it. */
6294 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6295 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6299 /* Mark as non-local and addressable. */
6300 DECL_NONLOCAL (exp) = 1;
6301 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6303 lang_hooks.mark_addressable (exp);
6304 if (GET_CODE (DECL_RTL (exp)) != MEM)
6306 addr = XEXP (DECL_RTL (exp), 0);
6307 if (GET_CODE (addr) == MEM)
6309 = replace_equiv_address (addr,
6310 fix_lexical_addr (XEXP (addr, 0), exp));
6312 addr = fix_lexical_addr (addr, exp);
6314 temp = replace_equiv_address (DECL_RTL (exp), addr);
6317 /* This is the case of an array whose size is to be determined
6318 from its initializer, while the initializer is still being parsed.
6321 else if (GET_CODE (DECL_RTL (exp)) == MEM
6322 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6323 temp = validize_mem (DECL_RTL (exp));
6325 /* If DECL_RTL is memory, we are in the normal case and either
6326 the address is not valid or it is not a register and -fforce-addr
6327 is specified, get the address into a register. */
6329 else if (GET_CODE (DECL_RTL (exp)) == MEM
6330 && modifier != EXPAND_CONST_ADDRESS
6331 && modifier != EXPAND_SUM
6332 && modifier != EXPAND_INITIALIZER
6333 && (! memory_address_p (DECL_MODE (exp),
6334 XEXP (DECL_RTL (exp), 0))
6336 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6339 *alt_rtl = DECL_RTL (exp);
6340 temp = replace_equiv_address (DECL_RTL (exp),
6341 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6344 /* If we got something, return it. But first, set the alignment
6345 if the address is a register. */
6348 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6349 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6354 /* If the mode of DECL_RTL does not match that of the decl, it
6355 must be a promoted value. We return a SUBREG of the wanted mode,
6356 but mark it so that we know that it was already extended. */
6358 if (GET_CODE (DECL_RTL (exp)) == REG
6359 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6361 /* Get the signedness used for this variable. Ensure we get the
6362 same mode we got when the variable was declared. */
6363 if (GET_MODE (DECL_RTL (exp))
6364 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6365 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6368 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6369 SUBREG_PROMOTED_VAR_P (temp) = 1;
6370 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6374 return DECL_RTL (exp);
6377 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6378 TREE_INT_CST_HIGH (exp), mode);
6380 /* ??? If overflow is set, fold will have done an incomplete job,
6381 which can result in (plus xx (const_int 0)), which can get
6382 simplified by validate_replace_rtx during virtual register
6383 instantiation, which can result in unrecognizable insns.
6384 Avoid this by forcing all overflows into registers. */
6385 if (TREE_CONSTANT_OVERFLOW (exp)
6386 && modifier != EXPAND_INITIALIZER)
6387 temp = force_reg (mode, temp);
6392 return const_vector_from_tree (exp);
6395 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6398 /* If optimized, generate immediate CONST_DOUBLE
6399 which will be turned into memory by reload if necessary.
6401 We used to force a register so that loop.c could see it. But
6402 this does not allow gen_* patterns to perform optimizations with
6403 the constants. It also produces two insns in cases like "x = 1.0;".
6404 On most machines, floating-point constants are not permitted in
6405 many insns, so we'd end up copying it to a register in any case.
6407 Now, we do the copying in expand_binop, if appropriate. */
6408 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6409 TYPE_MODE (TREE_TYPE (exp)));
6412 /* Handle evaluating a complex constant in a CONCAT target. */
6413 if (original_target && GET_CODE (original_target) == CONCAT)
6415 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6418 rtarg = XEXP (original_target, 0);
6419 itarg = XEXP (original_target, 1);
6421 /* Move the real and imaginary parts separately. */
6422 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6423 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6426 emit_move_insn (rtarg, op0);
6428 emit_move_insn (itarg, op1);
6430 return original_target;
6433 /* ... fall through ... */
6436 temp = output_constant_def (exp, 1);
6438 /* temp contains a constant address.
6439 On RISC machines where a constant address isn't valid,
6440 make some insns to get that address into a register. */
6441 if (modifier != EXPAND_CONST_ADDRESS
6442 && modifier != EXPAND_INITIALIZER
6443 && modifier != EXPAND_SUM
6444 && (! memory_address_p (mode, XEXP (temp, 0))
6445 || flag_force_addr))
6446 return replace_equiv_address (temp,
6447 copy_rtx (XEXP (temp, 0)));
6450 case EXPR_WITH_FILE_LOCATION:
6453 struct file_stack fs;
6455 fs.location = input_location;
6456 fs.next = expr_wfl_stack;
6457 input_filename = EXPR_WFL_FILENAME (exp);
6458 input_line = EXPR_WFL_LINENO (exp);
6459 expr_wfl_stack = &fs;
6460 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6461 emit_line_note (input_location);
6462 /* Possibly avoid switching back and forth here. */
6463 to_return = expand_expr (EXPR_WFL_NODE (exp),
6464 (ignore ? const0_rtx : target),
6466 if (expr_wfl_stack != &fs)
6468 input_location = fs.location;
6469 expr_wfl_stack = fs.next;
6474 context = decl_function_context (exp);
6476 /* If this SAVE_EXPR was at global context, assume we are an
6477 initialization function and move it into our context. */
6479 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6481 /* We treat inline_function_decl as an alias for the current function
6482 because that is the inline function whose vars, types, etc.
6483 are being merged into the current function.
6484 See expand_inline_function. */
6485 if (context == current_function_decl || context == inline_function_decl)
6488 /* If this is non-local, handle it. */
6491 /* The following call just exists to abort if the context is
6492 not of a containing function. */
6493 find_function_data (context);
6495 temp = SAVE_EXPR_RTL (exp);
6496 if (temp && GET_CODE (temp) == REG)
6498 put_var_into_stack (exp, /*rescan=*/true);
6499 temp = SAVE_EXPR_RTL (exp);
6501 if (temp == 0 || GET_CODE (temp) != MEM)
6504 replace_equiv_address (temp,
6505 fix_lexical_addr (XEXP (temp, 0), exp));
6507 if (SAVE_EXPR_RTL (exp) == 0)
6509 if (mode == VOIDmode)
6512 temp = assign_temp (build_qualified_type (type,
6514 | TYPE_QUAL_CONST)),
6517 SAVE_EXPR_RTL (exp) = temp;
6518 if (!optimize && GET_CODE (temp) == REG)
6519 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6522 /* If the mode of TEMP does not match that of the expression, it
6523 must be a promoted value. We pass store_expr a SUBREG of the
6524 wanted mode but mark it so that we know that it was already
6527 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6529 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6530 promote_mode (type, mode, &unsignedp, 0);
6531 SUBREG_PROMOTED_VAR_P (temp) = 1;
6532 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6535 if (temp == const0_rtx)
6536 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6538 store_expr (TREE_OPERAND (exp, 0), temp,
6539 modifier == EXPAND_STACK_PARM ? 2 : 0);
6541 TREE_USED (exp) = 1;
6544 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6545 must be a promoted value. We return a SUBREG of the wanted mode,
6546 but mark it so that we know that it was already extended. */
6548 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6549 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6551 /* Compute the signedness and make the proper SUBREG. */
6552 promote_mode (type, mode, &unsignedp, 0);
6553 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6554 SUBREG_PROMOTED_VAR_P (temp) = 1;
6555 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6559 return SAVE_EXPR_RTL (exp);
6564 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6565 TREE_OPERAND (exp, 0)
6566 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6571 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6572 expand_goto (TREE_OPERAND (exp, 0));
6574 expand_computed_goto (TREE_OPERAND (exp, 0));
6578 expand_exit_loop_if_false (NULL,
6579 invert_truthvalue (TREE_OPERAND (exp, 0)));
6582 case LABELED_BLOCK_EXPR:
6583 if (LABELED_BLOCK_BODY (exp))
6584 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6585 /* Should perhaps use expand_label, but this is simpler and safer. */
6586 do_pending_stack_adjust ();
6587 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6590 case EXIT_BLOCK_EXPR:
6591 if (EXIT_BLOCK_RETURN (exp))
6592 sorry ("returned value in block_exit_expr");
6593 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6598 expand_start_loop (1);
6599 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6607 tree vars = TREE_OPERAND (exp, 0);
6609 /* Need to open a binding contour here because
6610 if there are any cleanups they must be contained here. */
6611 expand_start_bindings (2);
6613 /* Mark the corresponding BLOCK for output in its proper place. */
6614 if (TREE_OPERAND (exp, 2) != 0
6615 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6616 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
6618 /* If VARS have not yet been expanded, expand them now. */
6621 if (!DECL_RTL_SET_P (vars))
6623 expand_decl_init (vars);
6624 vars = TREE_CHAIN (vars);
6627 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6629 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6635 if (RTL_EXPR_SEQUENCE (exp))
6637 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6639 emit_insn (RTL_EXPR_SEQUENCE (exp));
6640 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6642 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6643 free_temps_for_rtl_expr (exp);
6645 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6646 return RTL_EXPR_RTL (exp);
6649 /* If we don't need the result, just ensure we evaluate any
6655 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6656 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6661 /* All elts simple constants => refer to a constant in memory. But
6662 if this is a non-BLKmode mode, let it store a field at a time
6663 since that should make a CONST_INT or CONST_DOUBLE when we
6664 fold. Likewise, if we have a target we can use, it is best to
6665 store directly into the target unless the type is large enough
6666 that memcpy will be used. If we are making an initializer and
6667 all operands are constant, put it in memory as well.
6669 FIXME: Avoid trying to fill vector constructors piece-meal.
6670 Output them with output_constant_def below unless we're sure
6671 they're zeros. This should go away when vector initializers
6672 are treated like VECTOR_CST instead of arrays.
6674 else if ((TREE_STATIC (exp)
6675 && ((mode == BLKmode
6676 && ! (target != 0 && safe_from_p (target, exp, 1)))
6677 || TREE_ADDRESSABLE (exp)
6678 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6679 && (! MOVE_BY_PIECES_P
6680 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6682 && ((TREE_CODE (type) == VECTOR_TYPE
6683 && !is_zeros_p (exp))
6684 || ! mostly_zeros_p (exp)))))
6685 || ((modifier == EXPAND_INITIALIZER
6686 || modifier == EXPAND_CONST_ADDRESS)
6687 && TREE_CONSTANT (exp)))
6689 rtx constructor = output_constant_def (exp, 1);
6691 if (modifier != EXPAND_CONST_ADDRESS
6692 && modifier != EXPAND_INITIALIZER
6693 && modifier != EXPAND_SUM)
6694 constructor = validize_mem (constructor);
6700 /* Handle calls that pass values in multiple non-contiguous
6701 locations. The Irix 6 ABI has examples of this. */
6702 if (target == 0 || ! safe_from_p (target, exp, 1)
6703 || GET_CODE (target) == PARALLEL
6704 || modifier == EXPAND_STACK_PARM)
6706 = assign_temp (build_qualified_type (type,
6708 | (TREE_READONLY (exp)
6709 * TYPE_QUAL_CONST))),
6710 0, TREE_ADDRESSABLE (exp), 1);
6712 store_constructor (exp, target, 0, int_expr_size (exp));
6718 tree exp1 = TREE_OPERAND (exp, 0);
6720 tree string = string_constant (exp1, &index);
6722 /* Try to optimize reads from const strings. */
6724 && TREE_CODE (string) == STRING_CST
6725 && TREE_CODE (index) == INTEGER_CST
6726 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6727 && GET_MODE_CLASS (mode) == MODE_INT
6728 && GET_MODE_SIZE (mode) == 1
6729 && modifier != EXPAND_WRITE)
6730 return gen_int_mode (TREE_STRING_POINTER (string)
6731 [TREE_INT_CST_LOW (index)], mode);
6733 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6734 op0 = memory_address (mode, op0);
6735 temp = gen_rtx_MEM (mode, op0);
6736 set_mem_attributes (temp, exp, 0);
6738 /* If we are writing to this object and its type is a record with
6739 readonly fields, we must mark it as readonly so it will
6740 conflict with readonly references to those fields. */
6741 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6742 RTX_UNCHANGING_P (temp) = 1;
6748 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6752 tree array = TREE_OPERAND (exp, 0);
6753 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6754 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6755 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6758 /* Optimize the special-case of a zero lower bound.
6760 We convert the low_bound to sizetype to avoid some problems
6761 with constant folding. (E.g. suppose the lower bound is 1,
6762 and its mode is QI. Without the conversion, (ARRAY
6763 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6764 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6766 if (! integer_zerop (low_bound))
6767 index = size_diffop (index, convert (sizetype, low_bound));
6769 /* Fold an expression like: "foo"[2].
6770 This is not done in fold so it won't happen inside &.
6771 Don't fold if this is for wide characters since it's too
6772 difficult to do correctly and this is a very rare case. */
6774 if (modifier != EXPAND_CONST_ADDRESS
6775 && modifier != EXPAND_INITIALIZER
6776 && modifier != EXPAND_MEMORY
6777 && TREE_CODE (array) == STRING_CST
6778 && TREE_CODE (index) == INTEGER_CST
6779 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6780 && GET_MODE_CLASS (mode) == MODE_INT
6781 && GET_MODE_SIZE (mode) == 1)
6782 return gen_int_mode (TREE_STRING_POINTER (array)
6783 [TREE_INT_CST_LOW (index)], mode);
6785 /* If this is a constant index into a constant array,
6786 just get the value from the array. Handle both the cases when
6787 we have an explicit constructor and when our operand is a variable
6788 that was declared const. */
6790 if (modifier != EXPAND_CONST_ADDRESS
6791 && modifier != EXPAND_INITIALIZER
6792 && modifier != EXPAND_MEMORY
6793 && TREE_CODE (array) == CONSTRUCTOR
6794 && ! TREE_SIDE_EFFECTS (array)
6795 && TREE_CODE (index) == INTEGER_CST
6796 && 0 > compare_tree_int (index,
6797 list_length (CONSTRUCTOR_ELTS
6798 (TREE_OPERAND (exp, 0)))))
6802 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6803 i = TREE_INT_CST_LOW (index);
6804 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6808 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6812 else if (optimize >= 1
6813 && modifier != EXPAND_CONST_ADDRESS
6814 && modifier != EXPAND_INITIALIZER
6815 && modifier != EXPAND_MEMORY
6816 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6817 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6818 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6819 && targetm.binds_local_p (array))
6821 if (TREE_CODE (index) == INTEGER_CST)
6823 tree init = DECL_INITIAL (array);
6825 if (TREE_CODE (init) == CONSTRUCTOR)
6829 for (elem = CONSTRUCTOR_ELTS (init);
6831 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6832 elem = TREE_CHAIN (elem))
6835 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6836 return expand_expr (fold (TREE_VALUE (elem)), target,
6839 else if (TREE_CODE (init) == STRING_CST
6840 && 0 > compare_tree_int (index,
6841 TREE_STRING_LENGTH (init)))
6843 tree type = TREE_TYPE (TREE_TYPE (init));
6844 enum machine_mode mode = TYPE_MODE (type);
6846 if (GET_MODE_CLASS (mode) == MODE_INT
6847 && GET_MODE_SIZE (mode) == 1)
6848 return gen_int_mode (TREE_STRING_POINTER (init)
6849 [TREE_INT_CST_LOW (index)], mode);
6854 goto normal_inner_ref;
6857 /* If the operand is a CONSTRUCTOR, we can just extract the
6858 appropriate field if it is present. */
6859 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6863 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6864 elt = TREE_CHAIN (elt))
6865 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6866 /* We can normally use the value of the field in the
6867 CONSTRUCTOR. However, if this is a bitfield in
6868 an integral mode that we can fit in a HOST_WIDE_INT,
6869 we must mask only the number of bits in the bitfield,
6870 since this is done implicitly by the constructor. If
6871 the bitfield does not meet either of those conditions,
6872 we can't do this optimization. */
6873 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6874 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6876 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6877 <= HOST_BITS_PER_WIDE_INT))))
6879 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6880 && modifier == EXPAND_STACK_PARM)
6882 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6883 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6885 HOST_WIDE_INT bitsize
6886 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6887 enum machine_mode imode
6888 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6890 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6892 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6893 op0 = expand_and (imode, op0, op1, target);
6898 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6901 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6903 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6911 goto normal_inner_ref;
6914 case ARRAY_RANGE_REF:
6917 enum machine_mode mode1;
6918 HOST_WIDE_INT bitsize, bitpos;
6921 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6922 &mode1, &unsignedp, &volatilep);
6925 /* If we got back the original object, something is wrong. Perhaps
6926 we are evaluating an expression too early. In any event, don't
6927 infinitely recurse. */
6931 /* If TEM's type is a union of variable size, pass TARGET to the inner
6932 computation, since it will need a temporary and TARGET is known
6933 to have to do. This occurs in unchecked conversion in Ada. */
6937 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6938 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6940 && modifier != EXPAND_STACK_PARM
6941 ? target : NULL_RTX),
6943 (modifier == EXPAND_INITIALIZER
6944 || modifier == EXPAND_CONST_ADDRESS
6945 || modifier == EXPAND_STACK_PARM)
6946 ? modifier : EXPAND_NORMAL);
6948 /* If this is a constant, put it into a register if it is a
6949 legitimate constant and OFFSET is 0 and memory if it isn't. */
6950 if (CONSTANT_P (op0))
6952 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6953 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6955 op0 = force_reg (mode, op0);
6957 op0 = validize_mem (force_const_mem (mode, op0));
6960 /* Otherwise, if this object not in memory and we either have an
6961 offset or a BLKmode result, put it there. This case can't occur in
6962 C, but can in Ada if we have unchecked conversion of an expression
6963 from a scalar type to an array or record type or for an
6964 ARRAY_RANGE_REF whose type is BLKmode. */
6965 else if (GET_CODE (op0) != MEM
6967 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
6969 /* If the operand is a SAVE_EXPR, we can deal with this by
6970 forcing the SAVE_EXPR into memory. */
6971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6973 put_var_into_stack (TREE_OPERAND (exp, 0),
6975 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6980 = build_qualified_type (TREE_TYPE (tem),
6981 (TYPE_QUALS (TREE_TYPE (tem))
6982 | TYPE_QUAL_CONST));
6983 rtx memloc = assign_temp (nt, 1, 1, 1);
6985 emit_move_insn (memloc, op0);
6992 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
6995 if (GET_CODE (op0) != MEM)
6998 #ifdef POINTERS_EXTEND_UNSIGNED
6999 if (GET_MODE (offset_rtx) != Pmode)
7000 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7002 if (GET_MODE (offset_rtx) != ptr_mode)
7003 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7006 if (GET_MODE (op0) == BLKmode
7007 /* A constant address in OP0 can have VOIDmode, we must
7008 not try to call force_reg in that case. */
7009 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7011 && (bitpos % bitsize) == 0
7012 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7013 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7015 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7019 op0 = offset_address (op0, offset_rtx,
7020 highest_pow2_factor (offset));
7023 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7024 record its alignment as BIGGEST_ALIGNMENT. */
7025 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7026 && is_aligning_offset (offset, tem))
7027 set_mem_align (op0, BIGGEST_ALIGNMENT);
7029 /* Don't forget about volatility even if this is a bitfield. */
7030 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7032 if (op0 == orig_op0)
7033 op0 = copy_rtx (op0);
7035 MEM_VOLATILE_P (op0) = 1;
7038 /* The following code doesn't handle CONCAT.
7039 Assume only bitpos == 0 can be used for CONCAT, due to
7040 one element arrays having the same mode as its element. */
7041 if (GET_CODE (op0) == CONCAT)
7043 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7048 /* In cases where an aligned union has an unaligned object
7049 as a field, we might be extracting a BLKmode value from
7050 an integer-mode (e.g., SImode) object. Handle this case
7051 by doing the extract into an object as wide as the field
7052 (which we know to be the width of a basic mode), then
7053 storing into memory, and changing the mode to BLKmode. */
7054 if (mode1 == VOIDmode
7055 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7056 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7057 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7058 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7059 && modifier != EXPAND_CONST_ADDRESS
7060 && modifier != EXPAND_INITIALIZER)
7061 /* If the field isn't aligned enough to fetch as a memref,
7062 fetch it as a bit field. */
7063 || (mode1 != BLKmode
7064 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7065 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7066 || (GET_CODE (op0) == MEM
7067 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7068 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7069 && ((modifier == EXPAND_CONST_ADDRESS
7070 || modifier == EXPAND_INITIALIZER)
7072 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7073 || (bitpos % BITS_PER_UNIT != 0)))
7074 /* If the type and the field are a constant size and the
7075 size of the type isn't the same size as the bitfield,
7076 we must use bitfield operations. */
7078 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7080 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7083 enum machine_mode ext_mode = mode;
7085 if (ext_mode == BLKmode
7086 && ! (target != 0 && GET_CODE (op0) == MEM
7087 && GET_CODE (target) == MEM
7088 && bitpos % BITS_PER_UNIT == 0))
7089 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7091 if (ext_mode == BLKmode)
7094 target = assign_temp (type, 0, 1, 1);
7099 /* In this case, BITPOS must start at a byte boundary and
7100 TARGET, if specified, must be a MEM. */
7101 if (GET_CODE (op0) != MEM
7102 || (target != 0 && GET_CODE (target) != MEM)
7103 || bitpos % BITS_PER_UNIT != 0)
7106 emit_block_move (target,
7107 adjust_address (op0, VOIDmode,
7108 bitpos / BITS_PER_UNIT),
7109 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7111 (modifier == EXPAND_STACK_PARM
7112 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7117 op0 = validize_mem (op0);
7119 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7120 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7122 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7123 (modifier == EXPAND_STACK_PARM
7124 ? NULL_RTX : target),
7126 int_size_in_bytes (TREE_TYPE (tem)));
7128 /* If the result is a record type and BITSIZE is narrower than
7129 the mode of OP0, an integral mode, and this is a big endian
7130 machine, we must put the field into the high-order bits. */
7131 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7132 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7133 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7134 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7135 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7139 /* If the result type is BLKmode, store the data into a temporary
7140 of the appropriate type, but with the mode corresponding to the
7141 mode for the data we have (op0's mode). It's tempting to make
7142 this a constant type, since we know it's only being stored once,
7143 but that can cause problems if we are taking the address of this
7144 COMPONENT_REF because the MEM of any reference via that address
7145 will have flags corresponding to the type, which will not
7146 necessarily be constant. */
7147 if (mode == BLKmode)
7150 = assign_stack_temp_for_type
7151 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7153 emit_move_insn (new, op0);
7154 op0 = copy_rtx (new);
7155 PUT_MODE (op0, BLKmode);
7156 set_mem_attributes (op0, exp, 1);
7162 /* If the result is BLKmode, use that to access the object
7164 if (mode == BLKmode)
7167 /* Get a reference to just this component. */
7168 if (modifier == EXPAND_CONST_ADDRESS
7169 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7170 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7172 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7174 if (op0 == orig_op0)
7175 op0 = copy_rtx (op0);
7177 set_mem_attributes (op0, exp, 0);
7178 if (GET_CODE (XEXP (op0, 0)) == REG)
7179 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7181 MEM_VOLATILE_P (op0) |= volatilep;
7182 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7183 || modifier == EXPAND_CONST_ADDRESS
7184 || modifier == EXPAND_INITIALIZER)
7186 else if (target == 0)
7187 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7189 convert_move (target, op0, unsignedp);
7195 rtx insn, before = get_last_insn (), vtbl_ref;
7197 /* Evaluate the interior expression. */
7198 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7201 /* Get or create an instruction off which to hang a note. */
7202 if (REG_P (subtarget))
7205 insn = get_last_insn ();
7208 if (! INSN_P (insn))
7209 insn = prev_nonnote_insn (insn);
7213 target = gen_reg_rtx (GET_MODE (subtarget));
7214 insn = emit_move_insn (target, subtarget);
7217 /* Collect the data for the note. */
7218 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7219 vtbl_ref = plus_constant (vtbl_ref,
7220 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7221 /* Discard the initial CONST that was added. */
7222 vtbl_ref = XEXP (vtbl_ref, 0);
7225 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7230 /* Intended for a reference to a buffer of a file-object in Pascal.
7231 But it's not certain that a special tree code will really be
7232 necessary for these. INDIRECT_REF might work for them. */
7238 /* Pascal set IN expression.
7241 rlo = set_low - (set_low%bits_per_word);
7242 the_word = set [ (index - rlo)/bits_per_word ];
7243 bit_index = index % bits_per_word;
7244 bitmask = 1 << bit_index;
7245 return !!(the_word & bitmask); */
7247 tree set = TREE_OPERAND (exp, 0);
7248 tree index = TREE_OPERAND (exp, 1);
7249 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7250 tree set_type = TREE_TYPE (set);
7251 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7252 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7253 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7254 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7255 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7256 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7257 rtx setaddr = XEXP (setval, 0);
7258 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7260 rtx diff, quo, rem, addr, bit, result;
7262 /* If domain is empty, answer is no. Likewise if index is constant
7263 and out of bounds. */
7264 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7265 && TREE_CODE (set_low_bound) == INTEGER_CST
7266 && tree_int_cst_lt (set_high_bound, set_low_bound))
7267 || (TREE_CODE (index) == INTEGER_CST
7268 && TREE_CODE (set_low_bound) == INTEGER_CST
7269 && tree_int_cst_lt (index, set_low_bound))
7270 || (TREE_CODE (set_high_bound) == INTEGER_CST
7271 && TREE_CODE (index) == INTEGER_CST
7272 && tree_int_cst_lt (set_high_bound, index))))
7276 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7278 /* If we get here, we have to generate the code for both cases
7279 (in range and out of range). */
7281 op0 = gen_label_rtx ();
7282 op1 = gen_label_rtx ();
7284 if (! (GET_CODE (index_val) == CONST_INT
7285 && GET_CODE (lo_r) == CONST_INT))
7286 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7287 GET_MODE (index_val), iunsignedp, op1);
7289 if (! (GET_CODE (index_val) == CONST_INT
7290 && GET_CODE (hi_r) == CONST_INT))
7291 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7292 GET_MODE (index_val), iunsignedp, op1);
7294 /* Calculate the element number of bit zero in the first word
7296 if (GET_CODE (lo_r) == CONST_INT)
7297 rlow = GEN_INT (INTVAL (lo_r)
7298 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7300 rlow = expand_binop (index_mode, and_optab, lo_r,
7301 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7302 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7304 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7305 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7307 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7308 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7309 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7310 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7312 addr = memory_address (byte_mode,
7313 expand_binop (index_mode, add_optab, diff,
7314 setaddr, NULL_RTX, iunsignedp,
7317 /* Extract the bit we want to examine. */
7318 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7319 gen_rtx_MEM (byte_mode, addr),
7320 make_tree (TREE_TYPE (index), rem),
7322 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7323 GET_MODE (target) == byte_mode ? target : 0,
7324 1, OPTAB_LIB_WIDEN);
7326 if (result != target)
7327 convert_move (target, result, 1);
7329 /* Output the code to handle the out-of-range case. */
7332 emit_move_insn (target, const0_rtx);
7337 case WITH_CLEANUP_EXPR:
7338 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7340 WITH_CLEANUP_EXPR_RTL (exp)
7341 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7342 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7343 CLEANUP_EH_ONLY (exp));
7345 /* That's it for this cleanup. */
7346 TREE_OPERAND (exp, 1) = 0;
7348 return WITH_CLEANUP_EXPR_RTL (exp);
7350 case CLEANUP_POINT_EXPR:
7352 /* Start a new binding layer that will keep track of all cleanup
7353 actions to be performed. */
7354 expand_start_bindings (2);
7356 target_temp_slot_level = temp_slot_level;
7358 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7359 /* If we're going to use this value, load it up now. */
7361 op0 = force_not_mem (op0);
7362 preserve_temp_slots (op0);
7363 expand_end_bindings (NULL_TREE, 0, 0);
7368 /* Check for a built-in function. */
7369 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7370 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7372 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7374 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7375 == BUILT_IN_FRONTEND)
7376 /* ??? Use (*fun) form because expand_expr is a macro. */
7377 return (*lang_hooks.expand_expr) (exp, original_target,
7381 return expand_builtin (exp, target, subtarget, tmode, ignore);
7384 return expand_call (exp, target, ignore);
7386 case NON_LVALUE_EXPR:
7389 case REFERENCE_EXPR:
7390 if (TREE_OPERAND (exp, 0) == error_mark_node)
7393 if (TREE_CODE (type) == UNION_TYPE)
7395 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7397 /* If both input and output are BLKmode, this conversion isn't doing
7398 anything except possibly changing memory attribute. */
7399 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7401 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7404 result = copy_rtx (result);
7405 set_mem_attributes (result, exp, 0);
7411 if (TYPE_MODE (type) != BLKmode)
7412 target = gen_reg_rtx (TYPE_MODE (type));
7414 target = assign_temp (type, 0, 1, 1);
7417 if (GET_CODE (target) == MEM)
7418 /* Store data into beginning of memory target. */
7419 store_expr (TREE_OPERAND (exp, 0),
7420 adjust_address (target, TYPE_MODE (valtype), 0),
7421 modifier == EXPAND_STACK_PARM ? 2 : 0);
7423 else if (GET_CODE (target) == REG)
7424 /* Store this field into a union of the proper type. */
7425 store_field (target,
7426 MIN ((int_size_in_bytes (TREE_TYPE
7427 (TREE_OPERAND (exp, 0)))
7429 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7430 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7431 VOIDmode, 0, type, 0);
7435 /* Return the entire union. */
7439 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7441 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7444 /* If the signedness of the conversion differs and OP0 is
7445 a promoted SUBREG, clear that indication since we now
7446 have to do the proper extension. */
7447 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7448 && GET_CODE (op0) == SUBREG)
7449 SUBREG_PROMOTED_VAR_P (op0) = 0;
7454 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7455 if (GET_MODE (op0) == mode)
7458 /* If OP0 is a constant, just convert it into the proper mode. */
7459 if (CONSTANT_P (op0))
7461 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7462 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7464 if (modifier == EXPAND_INITIALIZER)
7465 return simplify_gen_subreg (mode, op0, inner_mode,
7466 subreg_lowpart_offset (mode,
7469 return convert_modes (mode, inner_mode, op0,
7470 TREE_UNSIGNED (inner_type));
7473 if (modifier == EXPAND_INITIALIZER)
7474 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7478 convert_to_mode (mode, op0,
7479 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7481 convert_move (target, op0,
7482 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7485 case VIEW_CONVERT_EXPR:
7486 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7488 /* If the input and output modes are both the same, we are done.
7489 Otherwise, if neither mode is BLKmode and both are integral and within
7490 a word, we can use gen_lowpart. If neither is true, make sure the
7491 operand is in memory and convert the MEM to the new mode. */
7492 if (TYPE_MODE (type) == GET_MODE (op0))
7494 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7495 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7496 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7497 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7498 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7499 op0 = gen_lowpart (TYPE_MODE (type), op0);
7500 else if (GET_CODE (op0) != MEM)
7502 /* If the operand is not a MEM, force it into memory. Since we
7503 are going to be be changing the mode of the MEM, don't call
7504 force_const_mem for constants because we don't allow pool
7505 constants to change mode. */
7506 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7508 if (TREE_ADDRESSABLE (exp))
7511 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7513 = assign_stack_temp_for_type
7514 (TYPE_MODE (inner_type),
7515 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7517 emit_move_insn (target, op0);
7521 /* At this point, OP0 is in the correct mode. If the output type is such
7522 that the operand is known to be aligned, indicate that it is.
7523 Otherwise, we need only be concerned about alignment for non-BLKmode
7525 if (GET_CODE (op0) == MEM)
7527 op0 = copy_rtx (op0);
7529 if (TYPE_ALIGN_OK (type))
7530 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7531 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7532 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7534 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7535 HOST_WIDE_INT temp_size
7536 = MAX (int_size_in_bytes (inner_type),
7537 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7538 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7539 temp_size, 0, type);
7540 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7542 if (TREE_ADDRESSABLE (exp))
7545 if (GET_MODE (op0) == BLKmode)
7546 emit_block_move (new_with_op0_mode, op0,
7547 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7548 (modifier == EXPAND_STACK_PARM
7549 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7551 emit_move_insn (new_with_op0_mode, op0);
7556 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7562 this_optab = ! unsignedp && flag_trapv
7563 && (GET_MODE_CLASS (mode) == MODE_INT)
7564 ? addv_optab : add_optab;
7566 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7567 something else, make sure we add the register to the constant and
7568 then to the other thing. This case can occur during strength
7569 reduction and doing it this way will produce better code if the
7570 frame pointer or argument pointer is eliminated.
7572 fold-const.c will ensure that the constant is always in the inner
7573 PLUS_EXPR, so the only case we need to do anything about is if
7574 sp, ap, or fp is our second argument, in which case we must swap
7575 the innermost first argument and our second argument. */
7577 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7578 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7579 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7580 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7581 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7582 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7584 tree t = TREE_OPERAND (exp, 1);
7586 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7587 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7590 /* If the result is to be ptr_mode and we are adding an integer to
7591 something, we might be forming a constant. So try to use
7592 plus_constant. If it produces a sum and we can't accept it,
7593 use force_operand. This allows P = &ARR[const] to generate
7594 efficient code on machines where a SYMBOL_REF is not a valid
7597 If this is an EXPAND_SUM call, always return the sum. */
7598 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7599 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7601 if (modifier == EXPAND_STACK_PARM)
7603 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7604 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7605 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7609 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7611 /* Use immed_double_const to ensure that the constant is
7612 truncated according to the mode of OP1, then sign extended
7613 to a HOST_WIDE_INT. Using the constant directly can result
7614 in non-canonical RTL in a 64x32 cross compile. */
7616 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7618 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7619 op1 = plus_constant (op1, INTVAL (constant_part));
7620 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7621 op1 = force_operand (op1, target);
7625 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7626 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7627 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7631 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7632 (modifier == EXPAND_INITIALIZER
7633 ? EXPAND_INITIALIZER : EXPAND_SUM));
7634 if (! CONSTANT_P (op0))
7636 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7637 VOIDmode, modifier);
7638 /* Return a PLUS if modifier says it's OK. */
7639 if (modifier == EXPAND_SUM
7640 || modifier == EXPAND_INITIALIZER)
7641 return simplify_gen_binary (PLUS, mode, op0, op1);
7644 /* Use immed_double_const to ensure that the constant is
7645 truncated according to the mode of OP1, then sign extended
7646 to a HOST_WIDE_INT. Using the constant directly can result
7647 in non-canonical RTL in a 64x32 cross compile. */
7649 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7651 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7652 op0 = plus_constant (op0, INTVAL (constant_part));
7653 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7654 op0 = force_operand (op0, target);
7659 /* No sense saving up arithmetic to be done
7660 if it's all in the wrong mode to form part of an address.
7661 And force_operand won't know whether to sign-extend or
7663 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7664 || mode != ptr_mode)
7666 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7667 subtarget, &op0, &op1, 0);
7668 if (op0 == const0_rtx)
7670 if (op1 == const0_rtx)
7675 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7676 subtarget, &op0, &op1, modifier);
7677 return simplify_gen_binary (PLUS, mode, op0, op1);
7680 /* For initializers, we are allowed to return a MINUS of two
7681 symbolic constants. Here we handle all cases when both operands
7683 /* Handle difference of two symbolic constants,
7684 for the sake of an initializer. */
7685 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7686 && really_constant_p (TREE_OPERAND (exp, 0))
7687 && really_constant_p (TREE_OPERAND (exp, 1)))
7689 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7690 NULL_RTX, &op0, &op1, modifier);
7692 /* If the last operand is a CONST_INT, use plus_constant of
7693 the negated constant. Else make the MINUS. */
7694 if (GET_CODE (op1) == CONST_INT)
7695 return plus_constant (op0, - INTVAL (op1));
7697 return gen_rtx_MINUS (mode, op0, op1);
7700 this_optab = ! unsignedp && flag_trapv
7701 && (GET_MODE_CLASS(mode) == MODE_INT)
7702 ? subv_optab : sub_optab;
7704 /* No sense saving up arithmetic to be done
7705 if it's all in the wrong mode to form part of an address.
7706 And force_operand won't know whether to sign-extend or
7708 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7709 || mode != ptr_mode)
7712 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7713 subtarget, &op0, &op1, modifier);
7715 /* Convert A - const to A + (-const). */
7716 if (GET_CODE (op1) == CONST_INT)
7718 op1 = negate_rtx (mode, op1);
7719 return simplify_gen_binary (PLUS, mode, op0, op1);
7725 /* If first operand is constant, swap them.
7726 Thus the following special case checks need only
7727 check the second operand. */
7728 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7730 tree t1 = TREE_OPERAND (exp, 0);
7731 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7732 TREE_OPERAND (exp, 1) = t1;
7735 /* Attempt to return something suitable for generating an
7736 indexed address, for machines that support that. */
7738 if (modifier == EXPAND_SUM && mode == ptr_mode
7739 && host_integerp (TREE_OPERAND (exp, 1), 0))
7741 tree exp1 = TREE_OPERAND (exp, 1);
7743 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7746 if (GET_CODE (op0) != REG)
7747 op0 = force_operand (op0, NULL_RTX);
7748 if (GET_CODE (op0) != REG)
7749 op0 = copy_to_mode_reg (mode, op0);
7751 return gen_rtx_MULT (mode, op0,
7752 gen_int_mode (tree_low_cst (exp1, 0),
7753 TYPE_MODE (TREE_TYPE (exp1))));
7756 if (modifier == EXPAND_STACK_PARM)
7759 /* Check for multiplying things that have been extended
7760 from a narrower type. If this machine supports multiplying
7761 in that narrower type with a result in the desired type,
7762 do it that way, and avoid the explicit type-conversion. */
7763 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7764 && TREE_CODE (type) == INTEGER_TYPE
7765 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7766 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7767 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7768 && int_fits_type_p (TREE_OPERAND (exp, 1),
7769 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7770 /* Don't use a widening multiply if a shift will do. */
7771 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7772 > HOST_BITS_PER_WIDE_INT)
7773 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7775 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7776 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7778 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7779 /* If both operands are extended, they must either both
7780 be zero-extended or both be sign-extended. */
7781 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7783 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7785 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7786 enum machine_mode innermode = TYPE_MODE (op0type);
7787 bool zextend_p = TREE_UNSIGNED (op0type);
7788 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7789 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7791 if (mode == GET_MODE_WIDER_MODE (innermode))
7793 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7795 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7796 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7797 TREE_OPERAND (exp, 1),
7798 NULL_RTX, &op0, &op1, 0);
7800 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7801 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7802 NULL_RTX, &op0, &op1, 0);
7805 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7806 && innermode == word_mode)
7809 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7810 NULL_RTX, VOIDmode, 0);
7811 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7812 op1 = convert_modes (innermode, mode,
7813 expand_expr (TREE_OPERAND (exp, 1),
7814 NULL_RTX, VOIDmode, 0),
7817 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7818 NULL_RTX, VOIDmode, 0);
7819 temp = expand_binop (mode, other_optab, op0, op1, target,
7820 unsignedp, OPTAB_LIB_WIDEN);
7821 hipart = gen_highpart (innermode, temp);
7822 htem = expand_mult_highpart_adjust (innermode, hipart,
7826 emit_move_insn (hipart, htem);
7831 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7832 subtarget, &op0, &op1, 0);
7833 return expand_mult (mode, op0, op1, target, unsignedp);
7835 case TRUNC_DIV_EXPR:
7836 case FLOOR_DIV_EXPR:
7838 case ROUND_DIV_EXPR:
7839 case EXACT_DIV_EXPR:
7840 if (modifier == EXPAND_STACK_PARM)
7842 /* Possible optimization: compute the dividend with EXPAND_SUM
7843 then if the divisor is constant can optimize the case
7844 where some terms of the dividend have coeffs divisible by it. */
7845 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7846 subtarget, &op0, &op1, 0);
7847 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7850 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7851 expensive divide. If not, combine will rebuild the original
7853 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7854 && TREE_CODE (type) == REAL_TYPE
7855 && !real_onep (TREE_OPERAND (exp, 0)))
7856 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7857 build (RDIV_EXPR, type,
7858 build_real (type, dconst1),
7859 TREE_OPERAND (exp, 1))),
7860 target, tmode, modifier);
7861 this_optab = sdiv_optab;
7864 case TRUNC_MOD_EXPR:
7865 case FLOOR_MOD_EXPR:
7867 case ROUND_MOD_EXPR:
7868 if (modifier == EXPAND_STACK_PARM)
7870 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7871 subtarget, &op0, &op1, 0);
7872 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7874 case FIX_ROUND_EXPR:
7875 case FIX_FLOOR_EXPR:
7877 abort (); /* Not used for C. */
7879 case FIX_TRUNC_EXPR:
7880 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7881 if (target == 0 || modifier == EXPAND_STACK_PARM)
7882 target = gen_reg_rtx (mode);
7883 expand_fix (target, op0, unsignedp);
7887 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7888 if (target == 0 || modifier == EXPAND_STACK_PARM)
7889 target = gen_reg_rtx (mode);
7890 /* expand_float can't figure out what to do if FROM has VOIDmode.
7891 So give it the correct mode. With -O, cse will optimize this. */
7892 if (GET_MODE (op0) == VOIDmode)
7893 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7895 expand_float (target, op0,
7896 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7900 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7901 if (modifier == EXPAND_STACK_PARM)
7903 temp = expand_unop (mode,
7904 ! unsignedp && flag_trapv
7905 && (GET_MODE_CLASS(mode) == MODE_INT)
7906 ? negv_optab : neg_optab, op0, target, 0);
7912 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7913 if (modifier == EXPAND_STACK_PARM)
7916 /* ABS_EXPR is not valid for complex arguments. */
7917 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7918 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7921 /* Unsigned abs is simply the operand. Testing here means we don't
7922 risk generating incorrect code below. */
7923 if (TREE_UNSIGNED (type))
7926 return expand_abs (mode, op0, target, unsignedp,
7927 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7931 target = original_target;
7933 || modifier == EXPAND_STACK_PARM
7934 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7935 || GET_MODE (target) != mode
7936 || (GET_CODE (target) == REG
7937 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7938 target = gen_reg_rtx (mode);
7939 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7940 target, &op0, &op1, 0);
7942 /* First try to do it with a special MIN or MAX instruction.
7943 If that does not win, use a conditional jump to select the proper
7945 this_optab = (unsignedp
7946 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7947 : (code == MIN_EXPR ? smin_optab : smax_optab));
7949 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7954 /* At this point, a MEM target is no longer useful; we will get better
7957 if (GET_CODE (target) == MEM)
7958 target = gen_reg_rtx (mode);
7960 /* If op1 was placed in target, swap op0 and op1. */
7961 if (target != op0 && target == op1)
7969 emit_move_insn (target, op0);
7971 op0 = gen_label_rtx ();
7973 /* If this mode is an integer too wide to compare properly,
7974 compare word by word. Rely on cse to optimize constant cases. */
7975 if (GET_MODE_CLASS (mode) == MODE_INT
7976 && ! can_compare_p (GE, mode, ccp_jump))
7978 if (code == MAX_EXPR)
7979 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7982 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7987 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7988 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7990 emit_move_insn (target, op1);
7995 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7996 if (modifier == EXPAND_STACK_PARM)
7998 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8003 /* ??? Can optimize bitwise operations with one arg constant.
8004 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8005 and (a bitwise1 b) bitwise2 b (etc)
8006 but that is probably not worth while. */
8008 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8009 boolean values when we want in all cases to compute both of them. In
8010 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8011 as actual zero-or-1 values and then bitwise anding. In cases where
8012 there cannot be any side effects, better code would be made by
8013 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8014 how to recognize those cases. */
8016 case TRUTH_AND_EXPR:
8018 this_optab = and_optab;
8023 this_optab = ior_optab;
8026 case TRUTH_XOR_EXPR:
8028 this_optab = xor_optab;
8035 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8037 if (modifier == EXPAND_STACK_PARM)
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8040 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8043 /* Could determine the answer when only additive constants differ. Also,
8044 the addition of one can be handled by changing the condition. */
8051 case UNORDERED_EXPR:
8058 temp = do_store_flag (exp,
8059 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8060 tmode != VOIDmode ? tmode : mode, 0);
8064 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8065 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8067 && GET_CODE (original_target) == REG
8068 && (GET_MODE (original_target)
8069 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8071 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8074 /* If temp is constant, we can just compute the result. */
8075 if (GET_CODE (temp) == CONST_INT)
8077 if (INTVAL (temp) != 0)
8078 emit_move_insn (target, const1_rtx);
8080 emit_move_insn (target, const0_rtx);
8085 if (temp != original_target)
8087 enum machine_mode mode1 = GET_MODE (temp);
8088 if (mode1 == VOIDmode)
8089 mode1 = tmode != VOIDmode ? tmode : mode;
8091 temp = copy_to_mode_reg (mode1, temp);
8094 op1 = gen_label_rtx ();
8095 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8096 GET_MODE (temp), unsignedp, op1);
8097 emit_move_insn (temp, const1_rtx);
8102 /* If no set-flag instruction, must generate a conditional
8103 store into a temporary variable. Drop through
8104 and handle this like && and ||. */
8106 case TRUTH_ANDIF_EXPR:
8107 case TRUTH_ORIF_EXPR:
8110 || modifier == EXPAND_STACK_PARM
8111 || ! safe_from_p (target, exp, 1)
8112 /* Make sure we don't have a hard reg (such as function's return
8113 value) live across basic blocks, if not optimizing. */
8114 || (!optimize && GET_CODE (target) == REG
8115 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8116 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8119 emit_clr_insn (target);
8121 op1 = gen_label_rtx ();
8122 jumpifnot (exp, op1);
8125 emit_0_to_1_insn (target);
8128 return ignore ? const0_rtx : target;
8130 case TRUTH_NOT_EXPR:
8131 if (modifier == EXPAND_STACK_PARM)
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8134 /* The parser is careful to generate TRUTH_NOT_EXPR
8135 only with operands that are always zero or one. */
8136 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8137 target, 1, OPTAB_LIB_WIDEN);
8143 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8145 return expand_expr_real (TREE_OPERAND (exp, 1),
8146 (ignore ? const0_rtx : target),
8147 VOIDmode, modifier, alt_rtl);
8150 /* If we would have a "singleton" (see below) were it not for a
8151 conversion in each arm, bring that conversion back out. */
8152 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8153 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8154 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8155 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8157 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8158 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8160 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8161 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8162 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8163 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8164 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8165 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8166 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8167 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8168 return expand_expr (build1 (NOP_EXPR, type,
8169 build (COND_EXPR, TREE_TYPE (iftrue),
8170 TREE_OPERAND (exp, 0),
8172 target, tmode, modifier);
8176 /* Note that COND_EXPRs whose type is a structure or union
8177 are required to be constructed to contain assignments of
8178 a temporary variable, so that we can evaluate them here
8179 for side effect only. If type is void, we must do likewise. */
8181 /* If an arm of the branch requires a cleanup,
8182 only that cleanup is performed. */
8185 tree binary_op = 0, unary_op = 0;
8187 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8188 convert it to our mode, if necessary. */
8189 if (integer_onep (TREE_OPERAND (exp, 1))
8190 && integer_zerop (TREE_OPERAND (exp, 2))
8191 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8195 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8200 if (modifier == EXPAND_STACK_PARM)
8202 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8203 if (GET_MODE (op0) == mode)
8207 target = gen_reg_rtx (mode);
8208 convert_move (target, op0, unsignedp);
8212 /* Check for X ? A + B : A. If we have this, we can copy A to the
8213 output and conditionally add B. Similarly for unary operations.
8214 Don't do this if X has side-effects because those side effects
8215 might affect A or B and the "?" operation is a sequence point in
8216 ANSI. (operand_equal_p tests for side effects.) */
8218 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8219 && operand_equal_p (TREE_OPERAND (exp, 2),
8220 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8221 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8222 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8223 && operand_equal_p (TREE_OPERAND (exp, 1),
8224 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8225 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8226 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8227 && operand_equal_p (TREE_OPERAND (exp, 2),
8228 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8229 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8230 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8231 && operand_equal_p (TREE_OPERAND (exp, 1),
8232 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8233 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8235 /* If we are not to produce a result, we have no target. Otherwise,
8236 if a target was specified use it; it will not be used as an
8237 intermediate target unless it is safe. If no target, use a
8242 else if (modifier == EXPAND_STACK_PARM)
8243 temp = assign_temp (type, 0, 0, 1);
8244 else if (original_target
8245 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8246 || (singleton && GET_CODE (original_target) == REG
8247 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8248 && original_target == var_rtx (singleton)))
8249 && GET_MODE (original_target) == mode
8250 #ifdef HAVE_conditional_move
8251 && (! can_conditionally_move_p (mode)
8252 || GET_CODE (original_target) == REG
8253 || TREE_ADDRESSABLE (type))
8255 && (GET_CODE (original_target) != MEM
8256 || TREE_ADDRESSABLE (type)))
8257 temp = original_target;
8258 else if (TREE_ADDRESSABLE (type))
8261 temp = assign_temp (type, 0, 0, 1);
8263 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8264 do the test of X as a store-flag operation, do this as
8265 A + ((X != 0) << log C). Similarly for other simple binary
8266 operators. Only do for C == 1 if BRANCH_COST is low. */
8267 if (temp && singleton && binary_op
8268 && (TREE_CODE (binary_op) == PLUS_EXPR
8269 || TREE_CODE (binary_op) == MINUS_EXPR
8270 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8271 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8272 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8273 : integer_onep (TREE_OPERAND (binary_op, 1)))
8274 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8278 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8279 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8280 ? addv_optab : add_optab)
8281 : TREE_CODE (binary_op) == MINUS_EXPR
8282 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8283 ? subv_optab : sub_optab)
8284 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8287 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8288 if (singleton == TREE_OPERAND (exp, 1))
8289 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8291 cond = TREE_OPERAND (exp, 0);
8293 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8295 mode, BRANCH_COST <= 1);
8297 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8298 result = expand_shift (LSHIFT_EXPR, mode, result,
8299 build_int_2 (tree_log2
8303 (safe_from_p (temp, singleton, 1)
8304 ? temp : NULL_RTX), 0);
8308 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8309 return expand_binop (mode, boptab, op1, result, temp,
8310 unsignedp, OPTAB_LIB_WIDEN);
8314 do_pending_stack_adjust ();
8316 op0 = gen_label_rtx ();
8318 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8322 /* If the target conflicts with the other operand of the
8323 binary op, we can't use it. Also, we can't use the target
8324 if it is a hard register, because evaluating the condition
8325 might clobber it. */
8327 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8328 || (GET_CODE (temp) == REG
8329 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8330 temp = gen_reg_rtx (mode);
8331 store_expr (singleton, temp,
8332 modifier == EXPAND_STACK_PARM ? 2 : 0);
8335 expand_expr (singleton,
8336 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8337 if (singleton == TREE_OPERAND (exp, 1))
8338 jumpif (TREE_OPERAND (exp, 0), op0);
8340 jumpifnot (TREE_OPERAND (exp, 0), op0);
8342 start_cleanup_deferral ();
8343 if (binary_op && temp == 0)
8344 /* Just touch the other operand. */
8345 expand_expr (TREE_OPERAND (binary_op, 1),
8346 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8348 store_expr (build (TREE_CODE (binary_op), type,
8349 make_tree (type, temp),
8350 TREE_OPERAND (binary_op, 1)),
8351 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8353 store_expr (build1 (TREE_CODE (unary_op), type,
8354 make_tree (type, temp)),
8355 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8358 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8359 comparison operator. If we have one of these cases, set the
8360 output to A, branch on A (cse will merge these two references),
8361 then set the output to FOO. */
8363 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8364 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8365 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8366 TREE_OPERAND (exp, 1), 0)
8367 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8368 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8369 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8371 if (GET_CODE (temp) == REG
8372 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8373 temp = gen_reg_rtx (mode);
8374 store_expr (TREE_OPERAND (exp, 1), temp,
8375 modifier == EXPAND_STACK_PARM ? 2 : 0);
8376 jumpif (TREE_OPERAND (exp, 0), op0);
8378 start_cleanup_deferral ();
8379 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8380 store_expr (TREE_OPERAND (exp, 2), temp,
8381 modifier == EXPAND_STACK_PARM ? 2 : 0);
8383 expand_expr (TREE_OPERAND (exp, 2),
8384 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8388 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8389 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8390 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8391 TREE_OPERAND (exp, 2), 0)
8392 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8393 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8394 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8396 if (GET_CODE (temp) == REG
8397 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8398 temp = gen_reg_rtx (mode);
8399 store_expr (TREE_OPERAND (exp, 2), temp,
8400 modifier == EXPAND_STACK_PARM ? 2 : 0);
8401 jumpifnot (TREE_OPERAND (exp, 0), op0);
8403 start_cleanup_deferral ();
8404 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8405 store_expr (TREE_OPERAND (exp, 1), temp,
8406 modifier == EXPAND_STACK_PARM ? 2 : 0);
8408 expand_expr (TREE_OPERAND (exp, 1),
8409 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8414 op1 = gen_label_rtx ();
8415 jumpifnot (TREE_OPERAND (exp, 0), op0);
8417 start_cleanup_deferral ();
8419 /* One branch of the cond can be void, if it never returns. For
8420 example A ? throw : E */
8422 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8423 store_expr (TREE_OPERAND (exp, 1), temp,
8424 modifier == EXPAND_STACK_PARM ? 2 : 0);
8426 expand_expr (TREE_OPERAND (exp, 1),
8427 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8428 end_cleanup_deferral ();
8430 emit_jump_insn (gen_jump (op1));
8433 start_cleanup_deferral ();
8435 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8436 store_expr (TREE_OPERAND (exp, 2), temp,
8437 modifier == EXPAND_STACK_PARM ? 2 : 0);
8439 expand_expr (TREE_OPERAND (exp, 2),
8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8443 end_cleanup_deferral ();
8454 /* Something needs to be initialized, but we didn't know
8455 where that thing was when building the tree. For example,
8456 it could be the return value of a function, or a parameter
8457 to a function which lays down in the stack, or a temporary
8458 variable which must be passed by reference.
8460 We guarantee that the expression will either be constructed
8461 or copied into our original target. */
8463 tree slot = TREE_OPERAND (exp, 0);
8464 tree cleanups = NULL_TREE;
8467 if (TREE_CODE (slot) != VAR_DECL)
8471 target = original_target;
8473 /* Set this here so that if we get a target that refers to a
8474 register variable that's already been used, put_reg_into_stack
8475 knows that it should fix up those uses. */
8476 TREE_USED (slot) = 1;
8480 if (DECL_RTL_SET_P (slot))
8482 target = DECL_RTL (slot);
8483 /* If we have already expanded the slot, so don't do
8485 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8490 target = assign_temp (type, 2, 0, 1);
8491 /* All temp slots at this level must not conflict. */
8492 preserve_temp_slots (target);
8493 SET_DECL_RTL (slot, target);
8494 if (TREE_ADDRESSABLE (slot))
8495 put_var_into_stack (slot, /*rescan=*/false);
8497 /* Since SLOT is not known to the called function
8498 to belong to its stack frame, we must build an explicit
8499 cleanup. This case occurs when we must build up a reference
8500 to pass the reference as an argument. In this case,
8501 it is very likely that such a reference need not be
8504 if (TREE_OPERAND (exp, 2) == 0)
8505 TREE_OPERAND (exp, 2)
8506 = lang_hooks.maybe_build_cleanup (slot);
8507 cleanups = TREE_OPERAND (exp, 2);
8512 /* This case does occur, when expanding a parameter which
8513 needs to be constructed on the stack. The target
8514 is the actual stack address that we want to initialize.
8515 The function we call will perform the cleanup in this case. */
8517 /* If we have already assigned it space, use that space,
8518 not target that we were passed in, as our target
8519 parameter is only a hint. */
8520 if (DECL_RTL_SET_P (slot))
8522 target = DECL_RTL (slot);
8523 /* If we have already expanded the slot, so don't do
8525 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8530 SET_DECL_RTL (slot, target);
8531 /* If we must have an addressable slot, then make sure that
8532 the RTL that we just stored in slot is OK. */
8533 if (TREE_ADDRESSABLE (slot))
8534 put_var_into_stack (slot, /*rescan=*/true);
8538 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8539 /* Mark it as expanded. */
8540 TREE_OPERAND (exp, 1) = NULL_TREE;
8542 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8544 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8551 tree lhs = TREE_OPERAND (exp, 0);
8552 tree rhs = TREE_OPERAND (exp, 1);
8554 temp = expand_assignment (lhs, rhs, ! ignore);
8560 /* If lhs is complex, expand calls in rhs before computing it.
8561 That's so we don't compute a pointer and save it over a
8562 call. If lhs is simple, compute it first so we can give it
8563 as a target if the rhs is just a call. This avoids an
8564 extra temp and copy and that prevents a partial-subsumption
8565 which makes bad code. Actually we could treat
8566 component_ref's of vars like vars. */
8568 tree lhs = TREE_OPERAND (exp, 0);
8569 tree rhs = TREE_OPERAND (exp, 1);
8573 /* Check for |= or &= of a bitfield of size one into another bitfield
8574 of size 1. In this case, (unless we need the result of the
8575 assignment) we can do this more efficiently with a
8576 test followed by an assignment, if necessary.
8578 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8579 things change so we do, this code should be enhanced to
8582 && TREE_CODE (lhs) == COMPONENT_REF
8583 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8584 || TREE_CODE (rhs) == BIT_AND_EXPR)
8585 && TREE_OPERAND (rhs, 0) == lhs
8586 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8587 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8588 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8590 rtx label = gen_label_rtx ();
8592 do_jump (TREE_OPERAND (rhs, 1),
8593 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8594 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8595 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8596 (TREE_CODE (rhs) == BIT_IOR_EXPR
8598 : integer_zero_node)),
8600 do_pending_stack_adjust ();
8605 temp = expand_assignment (lhs, rhs, ! ignore);
8611 if (!TREE_OPERAND (exp, 0))
8612 expand_null_return ();
8614 expand_return (TREE_OPERAND (exp, 0));
8617 case PREINCREMENT_EXPR:
8618 case PREDECREMENT_EXPR:
8619 return expand_increment (exp, 0, ignore);
8621 case POSTINCREMENT_EXPR:
8622 case POSTDECREMENT_EXPR:
8623 /* Faster to treat as pre-increment if result is not used. */
8624 return expand_increment (exp, ! ignore, ignore);
8627 if (modifier == EXPAND_STACK_PARM)
8629 /* Are we taking the address of a nested function? */
8630 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8631 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8632 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8633 && ! TREE_STATIC (exp))
8635 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8636 op0 = force_operand (op0, target);
8638 /* If we are taking the address of something erroneous, just
8640 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8642 /* If we are taking the address of a constant and are at the
8643 top level, we have to use output_constant_def since we can't
8644 call force_const_mem at top level. */
8646 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8647 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8649 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8652 /* We make sure to pass const0_rtx down if we came in with
8653 ignore set, to avoid doing the cleanups twice for something. */
8654 op0 = expand_expr (TREE_OPERAND (exp, 0),
8655 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8656 (modifier == EXPAND_INITIALIZER
8657 ? modifier : EXPAND_CONST_ADDRESS));
8659 /* If we are going to ignore the result, OP0 will have been set
8660 to const0_rtx, so just return it. Don't get confused and
8661 think we are taking the address of the constant. */
8665 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8666 clever and returns a REG when given a MEM. */
8667 op0 = protect_from_queue (op0, 1);
8669 /* We would like the object in memory. If it is a constant, we can
8670 have it be statically allocated into memory. For a non-constant,
8671 we need to allocate some memory and store the value into it. */
8673 if (CONSTANT_P (op0))
8674 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8676 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8677 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8678 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8680 /* If the operand is a SAVE_EXPR, we can deal with this by
8681 forcing the SAVE_EXPR into memory. */
8682 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8684 put_var_into_stack (TREE_OPERAND (exp, 0),
8686 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8690 /* If this object is in a register, it can't be BLKmode. */
8691 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8692 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8694 if (GET_CODE (op0) == PARALLEL)
8695 /* Handle calls that pass values in multiple
8696 non-contiguous locations. The Irix 6 ABI has examples
8698 emit_group_store (memloc, op0, inner_type,
8699 int_size_in_bytes (inner_type));
8701 emit_move_insn (memloc, op0);
8707 if (GET_CODE (op0) != MEM)
8710 mark_temp_addr_taken (op0);
8711 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8713 op0 = XEXP (op0, 0);
8714 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8715 op0 = convert_memory_address (ptr_mode, op0);
8719 /* If OP0 is not aligned as least as much as the type requires, we
8720 need to make a temporary, copy OP0 to it, and take the address of
8721 the temporary. We want to use the alignment of the type, not of
8722 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8723 the test for BLKmode means that can't happen. The test for
8724 BLKmode is because we never make mis-aligned MEMs with
8727 We don't need to do this at all if the machine doesn't have
8728 strict alignment. */
8729 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8730 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8732 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8734 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8737 if (TYPE_ALIGN_OK (inner_type))
8740 if (TREE_ADDRESSABLE (inner_type))
8742 /* We can't make a bitwise copy of this object, so fail. */
8743 error ("cannot take the address of an unaligned member");
8747 new = assign_stack_temp_for_type
8748 (TYPE_MODE (inner_type),
8749 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8750 : int_size_in_bytes (inner_type),
8751 1, build_qualified_type (inner_type,
8752 (TYPE_QUALS (inner_type)
8753 | TYPE_QUAL_CONST)));
8755 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8756 (modifier == EXPAND_STACK_PARM
8757 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8762 op0 = force_operand (XEXP (op0, 0), target);
8766 && GET_CODE (op0) != REG
8767 && modifier != EXPAND_CONST_ADDRESS
8768 && modifier != EXPAND_INITIALIZER
8769 && modifier != EXPAND_SUM)
8770 op0 = force_reg (Pmode, op0);
8772 if (GET_CODE (op0) == REG
8773 && ! REG_USERVAR_P (op0))
8774 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8776 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8777 op0 = convert_memory_address (ptr_mode, op0);
8781 case ENTRY_VALUE_EXPR:
8784 /* COMPLEX type for Extended Pascal & Fortran */
8787 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8790 /* Get the rtx code of the operands. */
8791 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8792 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8795 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8799 /* Move the real (op0) and imaginary (op1) parts to their location. */
8800 emit_move_insn (gen_realpart (mode, target), op0);
8801 emit_move_insn (gen_imagpart (mode, target), op1);
8803 insns = get_insns ();
8806 /* Complex construction should appear as a single unit. */
8807 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8808 each with a separate pseudo as destination.
8809 It's not correct for flow to treat them as a unit. */
8810 if (GET_CODE (target) != CONCAT)
8811 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8819 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8820 return gen_realpart (mode, op0);
8823 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8824 return gen_imagpart (mode, op0);
8828 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8835 target = gen_reg_rtx (mode);
8839 /* Store the realpart and the negated imagpart to target. */
8840 emit_move_insn (gen_realpart (partmode, target),
8841 gen_realpart (partmode, op0));
8843 imag_t = gen_imagpart (partmode, target);
8844 temp = expand_unop (partmode,
8845 ! unsignedp && flag_trapv
8846 && (GET_MODE_CLASS(partmode) == MODE_INT)
8847 ? negv_optab : neg_optab,
8848 gen_imagpart (partmode, op0), imag_t, 0);
8850 emit_move_insn (imag_t, temp);
8852 insns = get_insns ();
8855 /* Conjugate should appear as a single unit
8856 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8857 each with a separate pseudo as destination.
8858 It's not correct for flow to treat them as a unit. */
8859 if (GET_CODE (target) != CONCAT)
8860 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8867 case TRY_CATCH_EXPR:
8869 tree handler = TREE_OPERAND (exp, 1);
8871 expand_eh_region_start ();
8873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8875 expand_eh_region_end_cleanup (handler);
8880 case TRY_FINALLY_EXPR:
8882 tree try_block = TREE_OPERAND (exp, 0);
8883 tree finally_block = TREE_OPERAND (exp, 1);
8885 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8887 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8888 is not sufficient, so we cannot expand the block twice.
8889 So we play games with GOTO_SUBROUTINE_EXPR to let us
8890 expand the thing only once. */
8891 /* When not optimizing, we go ahead with this form since
8892 (1) user breakpoints operate more predictably without
8893 code duplication, and
8894 (2) we're not running any of the global optimizers
8895 that would explode in time/space with the highly
8896 connected CFG created by the indirect branching. */
8898 rtx finally_label = gen_label_rtx ();
8899 rtx done_label = gen_label_rtx ();
8900 rtx return_link = gen_reg_rtx (Pmode);
8901 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8902 (tree) finally_label, (tree) return_link);
8903 TREE_SIDE_EFFECTS (cleanup) = 1;
8905 /* Start a new binding layer that will keep track of all cleanup
8906 actions to be performed. */
8907 expand_start_bindings (2);
8908 target_temp_slot_level = temp_slot_level;
8910 expand_decl_cleanup (NULL_TREE, cleanup);
8911 op0 = expand_expr (try_block, target, tmode, modifier);
8913 preserve_temp_slots (op0);
8914 expand_end_bindings (NULL_TREE, 0, 0);
8915 emit_jump (done_label);
8916 emit_label (finally_label);
8917 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8918 emit_indirect_jump (return_link);
8919 emit_label (done_label);
8923 expand_start_bindings (2);
8924 target_temp_slot_level = temp_slot_level;
8926 expand_decl_cleanup (NULL_TREE, finally_block);
8927 op0 = expand_expr (try_block, target, tmode, modifier);
8929 preserve_temp_slots (op0);
8930 expand_end_bindings (NULL_TREE, 0, 0);
8936 case GOTO_SUBROUTINE_EXPR:
8938 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8939 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8940 rtx return_address = gen_label_rtx ();
8941 emit_move_insn (return_link,
8942 gen_rtx_LABEL_REF (Pmode, return_address));
8944 emit_label (return_address);
8949 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8952 return get_exception_pointer (cfun);
8955 /* Function descriptors are not valid except for as
8956 initialization constants, and should not be expanded. */
8960 /* ??? Use (*fun) form because expand_expr is a macro. */
8961 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
8965 /* Here to do an ordinary binary operator, generating an instruction
8966 from the optab already placed in `this_optab'. */
8968 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8969 subtarget, &op0, &op1, 0);
8971 if (modifier == EXPAND_STACK_PARM)
8973 temp = expand_binop (mode, this_optab, op0, op1, target,
8974 unsignedp, OPTAB_LIB_WIDEN);
8980 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8981 when applied to the address of EXP produces an address known to be
8982 aligned more than BIGGEST_ALIGNMENT. */
8985 is_aligning_offset (tree offset, tree exp)
8987 /* Strip off any conversions. */
8988 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8989 || TREE_CODE (offset) == NOP_EXPR
8990 || TREE_CODE (offset) == CONVERT_EXPR)
8991 offset = TREE_OPERAND (offset, 0);
8993 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8994 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8995 if (TREE_CODE (offset) != BIT_AND_EXPR
8996 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8997 || compare_tree_int (TREE_OPERAND (offset, 1),
8998 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8999 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9002 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9003 It must be NEGATE_EXPR. Then strip any more conversions. */
9004 offset = TREE_OPERAND (offset, 0);
9005 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9006 || TREE_CODE (offset) == NOP_EXPR
9007 || TREE_CODE (offset) == CONVERT_EXPR)
9008 offset = TREE_OPERAND (offset, 0);
9010 if (TREE_CODE (offset) != NEGATE_EXPR)
9013 offset = TREE_OPERAND (offset, 0);
9014 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9015 || TREE_CODE (offset) == NOP_EXPR
9016 || TREE_CODE (offset) == CONVERT_EXPR)
9017 offset = TREE_OPERAND (offset, 0);
9019 /* This must now be the address of EXP. */
9020 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9023 /* Return the tree node if an ARG corresponds to a string constant or zero
9024 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9025 in bytes within the string that ARG is accessing. The type of the
9026 offset will be `sizetype'. */
9029 string_constant (tree arg, tree *ptr_offset)
9033 if (TREE_CODE (arg) == ADDR_EXPR
9034 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9036 *ptr_offset = size_zero_node;
9037 return TREE_OPERAND (arg, 0);
9039 else if (TREE_CODE (arg) == PLUS_EXPR)
9041 tree arg0 = TREE_OPERAND (arg, 0);
9042 tree arg1 = TREE_OPERAND (arg, 1);
9047 if (TREE_CODE (arg0) == ADDR_EXPR
9048 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9050 *ptr_offset = convert (sizetype, arg1);
9051 return TREE_OPERAND (arg0, 0);
9053 else if (TREE_CODE (arg1) == ADDR_EXPR
9054 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9056 *ptr_offset = convert (sizetype, arg0);
9057 return TREE_OPERAND (arg1, 0);
9064 /* Expand code for a post- or pre- increment or decrement
9065 and return the RTX for the result.
9066 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9069 expand_increment (tree exp, int post, int ignore)
9073 tree incremented = TREE_OPERAND (exp, 0);
9074 optab this_optab = add_optab;
9076 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9077 int op0_is_copy = 0;
9078 int single_insn = 0;
9079 /* 1 means we can't store into OP0 directly,
9080 because it is a subreg narrower than a word,
9081 and we don't dare clobber the rest of the word. */
9084 /* Stabilize any component ref that might need to be
9085 evaluated more than once below. */
9087 || TREE_CODE (incremented) == BIT_FIELD_REF
9088 || (TREE_CODE (incremented) == COMPONENT_REF
9089 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9090 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9091 incremented = stabilize_reference (incremented);
9092 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9093 ones into save exprs so that they don't accidentally get evaluated
9094 more than once by the code below. */
9095 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9096 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9097 incremented = save_expr (incremented);
9099 /* Compute the operands as RTX.
9100 Note whether OP0 is the actual lvalue or a copy of it:
9101 I believe it is a copy iff it is a register or subreg
9102 and insns were generated in computing it. */
9104 temp = get_last_insn ();
9105 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9107 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9108 in place but instead must do sign- or zero-extension during assignment,
9109 so we copy it into a new register and let the code below use it as
9112 Note that we can safely modify this SUBREG since it is know not to be
9113 shared (it was made by the expand_expr call above). */
9115 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9118 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9122 else if (GET_CODE (op0) == SUBREG
9123 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9125 /* We cannot increment this SUBREG in place. If we are
9126 post-incrementing, get a copy of the old value. Otherwise,
9127 just mark that we cannot increment in place. */
9129 op0 = copy_to_reg (op0);
9134 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9135 && temp != get_last_insn ());
9136 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9138 /* Decide whether incrementing or decrementing. */
9139 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9140 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9141 this_optab = sub_optab;
9143 /* Convert decrement by a constant into a negative increment. */
9144 if (this_optab == sub_optab
9145 && GET_CODE (op1) == CONST_INT)
9147 op1 = GEN_INT (-INTVAL (op1));
9148 this_optab = add_optab;
9151 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9152 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9154 /* For a preincrement, see if we can do this with a single instruction. */
9157 icode = (int) this_optab->handlers[(int) mode].insn_code;
9158 if (icode != (int) CODE_FOR_nothing
9159 /* Make sure that OP0 is valid for operands 0 and 1
9160 of the insn we want to queue. */
9161 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9162 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9163 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9167 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9168 then we cannot just increment OP0. We must therefore contrive to
9169 increment the original value. Then, for postincrement, we can return
9170 OP0 since it is a copy of the old value. For preincrement, expand here
9171 unless we can do it with a single insn.
9173 Likewise if storing directly into OP0 would clobber high bits
9174 we need to preserve (bad_subreg). */
9175 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9177 /* This is the easiest way to increment the value wherever it is.
9178 Problems with multiple evaluation of INCREMENTED are prevented
9179 because either (1) it is a component_ref or preincrement,
9180 in which case it was stabilized above, or (2) it is an array_ref
9181 with constant index in an array in a register, which is
9182 safe to reevaluate. */
9183 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9184 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9185 ? MINUS_EXPR : PLUS_EXPR),
9188 TREE_OPERAND (exp, 1));
9190 while (TREE_CODE (incremented) == NOP_EXPR
9191 || TREE_CODE (incremented) == CONVERT_EXPR)
9193 newexp = convert (TREE_TYPE (incremented), newexp);
9194 incremented = TREE_OPERAND (incremented, 0);
9197 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9198 return post ? op0 : temp;
9203 /* We have a true reference to the value in OP0.
9204 If there is an insn to add or subtract in this mode, queue it.
9205 Queuing the increment insn avoids the register shuffling
9206 that often results if we must increment now and first save
9207 the old value for subsequent use. */
9209 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9210 op0 = stabilize (op0);
9213 icode = (int) this_optab->handlers[(int) mode].insn_code;
9214 if (icode != (int) CODE_FOR_nothing
9215 /* Make sure that OP0 is valid for operands 0 and 1
9216 of the insn we want to queue. */
9217 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9218 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9220 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9221 op1 = force_reg (mode, op1);
9223 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9225 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9227 rtx addr = (general_operand (XEXP (op0, 0), mode)
9228 ? force_reg (Pmode, XEXP (op0, 0))
9229 : copy_to_reg (XEXP (op0, 0)));
9232 op0 = replace_equiv_address (op0, addr);
9233 temp = force_reg (GET_MODE (op0), op0);
9234 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9235 op1 = force_reg (mode, op1);
9237 /* The increment queue is LIFO, thus we have to `queue'
9238 the instructions in reverse order. */
9239 enqueue_insn (op0, gen_move_insn (op0, temp));
9240 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9245 /* Preincrement, or we can't increment with one simple insn. */
9247 /* Save a copy of the value before inc or dec, to return it later. */
9248 temp = value = copy_to_reg (op0);
9250 /* Arrange to return the incremented value. */
9251 /* Copy the rtx because expand_binop will protect from the queue,
9252 and the results of that would be invalid for us to return
9253 if our caller does emit_queue before using our result. */
9254 temp = copy_rtx (value = op0);
9256 /* Increment however we can. */
9257 op1 = expand_binop (mode, this_optab, value, op1, op0,
9258 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9260 /* Make sure the value is stored into OP0. */
9262 emit_move_insn (op0, op1);
9267 /* Generate code to calculate EXP using a store-flag instruction
9268 and return an rtx for the result. EXP is either a comparison
9269 or a TRUTH_NOT_EXPR whose operand is a comparison.
9271 If TARGET is nonzero, store the result there if convenient.
9273 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9276 Return zero if there is no suitable set-flag instruction
9277 available on this machine.
9279 Once expand_expr has been called on the arguments of the comparison,
9280 we are committed to doing the store flag, since it is not safe to
9281 re-evaluate the expression. We emit the store-flag insn by calling
9282 emit_store_flag, but only expand the arguments if we have a reason
9283 to believe that emit_store_flag will be successful. If we think that
9284 it will, but it isn't, we have to simulate the store-flag with a
9285 set/jump/set sequence. */
9288 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9291 tree arg0, arg1, type;
9293 enum machine_mode operand_mode;
9297 enum insn_code icode;
9298 rtx subtarget = target;
9301 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9302 result at the end. We can't simply invert the test since it would
9303 have already been inverted if it were valid. This case occurs for
9304 some floating-point comparisons. */
9306 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9307 invert = 1, exp = TREE_OPERAND (exp, 0);
9309 arg0 = TREE_OPERAND (exp, 0);
9310 arg1 = TREE_OPERAND (exp, 1);
9312 /* Don't crash if the comparison was erroneous. */
9313 if (arg0 == error_mark_node || arg1 == error_mark_node)
9316 type = TREE_TYPE (arg0);
9317 operand_mode = TYPE_MODE (type);
9318 unsignedp = TREE_UNSIGNED (type);
9320 /* We won't bother with BLKmode store-flag operations because it would mean
9321 passing a lot of information to emit_store_flag. */
9322 if (operand_mode == BLKmode)
9325 /* We won't bother with store-flag operations involving function pointers
9326 when function pointers must be canonicalized before comparisons. */
9327 #ifdef HAVE_canonicalize_funcptr_for_compare
9328 if (HAVE_canonicalize_funcptr_for_compare
9329 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9330 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9332 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9333 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9334 == FUNCTION_TYPE))))
9341 /* Get the rtx comparison code to use. We know that EXP is a comparison
9342 operation of some type. Some comparisons against 1 and -1 can be
9343 converted to comparisons with zero. Do so here so that the tests
9344 below will be aware that we have a comparison with zero. These
9345 tests will not catch constants in the first operand, but constants
9346 are rarely passed as the first operand. */
9348 switch (TREE_CODE (exp))
9357 if (integer_onep (arg1))
9358 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9360 code = unsignedp ? LTU : LT;
9363 if (! unsignedp && integer_all_onesp (arg1))
9364 arg1 = integer_zero_node, code = LT;
9366 code = unsignedp ? LEU : LE;
9369 if (! unsignedp && integer_all_onesp (arg1))
9370 arg1 = integer_zero_node, code = GE;
9372 code = unsignedp ? GTU : GT;
9375 if (integer_onep (arg1))
9376 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9378 code = unsignedp ? GEU : GE;
9381 case UNORDERED_EXPR:
9407 /* Put a constant second. */
9408 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9410 tem = arg0; arg0 = arg1; arg1 = tem;
9411 code = swap_condition (code);
9414 /* If this is an equality or inequality test of a single bit, we can
9415 do this by shifting the bit being tested to the low-order bit and
9416 masking the result with the constant 1. If the condition was EQ,
9417 we xor it with 1. This does not require an scc insn and is faster
9418 than an scc insn even if we have it.
9420 The code to make this transformation was moved into fold_single_bit_test,
9421 so we just call into the folder and expand its result. */
9423 if ((code == NE || code == EQ)
9424 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9425 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9427 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9428 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9430 target, VOIDmode, EXPAND_NORMAL);
9433 /* Now see if we are likely to be able to do this. Return if not. */
9434 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9437 icode = setcc_gen_code[(int) code];
9438 if (icode == CODE_FOR_nothing
9439 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9441 /* We can only do this if it is one of the special cases that
9442 can be handled without an scc insn. */
9443 if ((code == LT && integer_zerop (arg1))
9444 || (! only_cheap && code == GE && integer_zerop (arg1)))
9446 else if (BRANCH_COST >= 0
9447 && ! only_cheap && (code == NE || code == EQ)
9448 && TREE_CODE (type) != REAL_TYPE
9449 && ((abs_optab->handlers[(int) operand_mode].insn_code
9450 != CODE_FOR_nothing)
9451 || (ffs_optab->handlers[(int) operand_mode].insn_code
9452 != CODE_FOR_nothing)))
9458 if (! get_subtarget (target)
9459 || GET_MODE (subtarget) != operand_mode)
9462 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9465 target = gen_reg_rtx (mode);
9467 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9468 because, if the emit_store_flag does anything it will succeed and
9469 OP0 and OP1 will not be used subsequently. */
9471 result = emit_store_flag (target, code,
9472 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9473 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9474 operand_mode, unsignedp, 1);
9479 result = expand_binop (mode, xor_optab, result, const1_rtx,
9480 result, 0, OPTAB_LIB_WIDEN);
9484 /* If this failed, we have to do this with set/compare/jump/set code. */
9485 if (GET_CODE (target) != REG
9486 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9487 target = gen_reg_rtx (GET_MODE (target));
9489 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9490 result = compare_from_rtx (op0, op1, code, unsignedp,
9491 operand_mode, NULL_RTX);
9492 if (GET_CODE (result) == CONST_INT)
9493 return (((result == const0_rtx && ! invert)
9494 || (result != const0_rtx && invert))
9495 ? const0_rtx : const1_rtx);
9497 /* The code of RESULT may not match CODE if compare_from_rtx
9498 decided to swap its operands and reverse the original code.
9500 We know that compare_from_rtx returns either a CONST_INT or
9501 a new comparison code, so it is safe to just extract the
9502 code from RESULT. */
9503 code = GET_CODE (result);
9505 label = gen_label_rtx ();
9506 if (bcc_gen_fctn[(int) code] == 0)
9509 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9510 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9517 /* Stubs in case we haven't got a casesi insn. */
9519 # define HAVE_casesi 0
9520 # define gen_casesi(a, b, c, d, e) (0)
9521 # define CODE_FOR_casesi CODE_FOR_nothing
9524 /* If the machine does not have a case insn that compares the bounds,
9525 this means extra overhead for dispatch tables, which raises the
9526 threshold for using them. */
9527 #ifndef CASE_VALUES_THRESHOLD
9528 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9529 #endif /* CASE_VALUES_THRESHOLD */
9532 case_values_threshold (void)
9534 return CASE_VALUES_THRESHOLD;
9537 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9538 0 otherwise (i.e. if there is no casesi instruction). */
9540 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9541 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9543 enum machine_mode index_mode = SImode;
9544 int index_bits = GET_MODE_BITSIZE (index_mode);
9545 rtx op1, op2, index;
9546 enum machine_mode op_mode;
9551 /* Convert the index to SImode. */
9552 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9554 enum machine_mode omode = TYPE_MODE (index_type);
9555 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9557 /* We must handle the endpoints in the original mode. */
9558 index_expr = build (MINUS_EXPR, index_type,
9559 index_expr, minval);
9560 minval = integer_zero_node;
9561 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9562 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9563 omode, 1, default_label);
9564 /* Now we can safely truncate. */
9565 index = convert_to_mode (index_mode, index, 0);
9569 if (TYPE_MODE (index_type) != index_mode)
9571 index_expr = convert (lang_hooks.types.type_for_size
9572 (index_bits, 0), index_expr);
9573 index_type = TREE_TYPE (index_expr);
9576 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9579 index = protect_from_queue (index, 0);
9580 do_pending_stack_adjust ();
9582 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9583 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9585 index = copy_to_mode_reg (op_mode, index);
9587 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9589 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9590 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9591 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9592 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9594 op1 = copy_to_mode_reg (op_mode, op1);
9596 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9598 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9599 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9600 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9601 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9603 op2 = copy_to_mode_reg (op_mode, op2);
9605 emit_jump_insn (gen_casesi (index, op1, op2,
9606 table_label, default_label));
9610 /* Attempt to generate a tablejump instruction; same concept. */
9611 #ifndef HAVE_tablejump
9612 #define HAVE_tablejump 0
9613 #define gen_tablejump(x, y) (0)
9616 /* Subroutine of the next function.
9618 INDEX is the value being switched on, with the lowest value
9619 in the table already subtracted.
9620 MODE is its expected mode (needed if INDEX is constant).
9621 RANGE is the length of the jump table.
9622 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9624 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9625 index value is out of range. */
9628 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9633 if (INTVAL (range) > cfun->max_jumptable_ents)
9634 cfun->max_jumptable_ents = INTVAL (range);
9636 /* Do an unsigned comparison (in the proper mode) between the index
9637 expression and the value which represents the length of the range.
9638 Since we just finished subtracting the lower bound of the range
9639 from the index expression, this comparison allows us to simultaneously
9640 check that the original index expression value is both greater than
9641 or equal to the minimum value of the range and less than or equal to
9642 the maximum value of the range. */
9644 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9647 /* If index is in range, it must fit in Pmode.
9648 Convert to Pmode so we can index with it. */
9650 index = convert_to_mode (Pmode, index, 1);
9652 /* Don't let a MEM slip through, because then INDEX that comes
9653 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9654 and break_out_memory_refs will go to work on it and mess it up. */
9655 #ifdef PIC_CASE_VECTOR_ADDRESS
9656 if (flag_pic && GET_CODE (index) != REG)
9657 index = copy_to_mode_reg (Pmode, index);
9660 /* If flag_force_addr were to affect this address
9661 it could interfere with the tricky assumptions made
9662 about addresses that contain label-refs,
9663 which may be valid only very near the tablejump itself. */
9664 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9665 GET_MODE_SIZE, because this indicates how large insns are. The other
9666 uses should all be Pmode, because they are addresses. This code
9667 could fail if addresses and insns are not the same size. */
9668 index = gen_rtx_PLUS (Pmode,
9669 gen_rtx_MULT (Pmode, index,
9670 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9671 gen_rtx_LABEL_REF (Pmode, table_label));
9672 #ifdef PIC_CASE_VECTOR_ADDRESS
9674 index = PIC_CASE_VECTOR_ADDRESS (index);
9677 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9678 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9679 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9680 RTX_UNCHANGING_P (vector) = 1;
9681 MEM_NOTRAP_P (vector) = 1;
9682 convert_move (temp, vector, 0);
9684 emit_jump_insn (gen_tablejump (temp, table_label));
9686 /* If we are generating PIC code or if the table is PC-relative, the
9687 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9688 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9693 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9694 rtx table_label, rtx default_label)
9698 if (! HAVE_tablejump)
9701 index_expr = fold (build (MINUS_EXPR, index_type,
9702 convert (index_type, index_expr),
9703 convert (index_type, minval)));
9704 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9706 index = protect_from_queue (index, 0);
9707 do_pending_stack_adjust ();
9709 do_tablejump (index, TYPE_MODE (index_type),
9710 convert_modes (TYPE_MODE (index_type),
9711 TYPE_MODE (TREE_TYPE (range)),
9712 expand_expr (range, NULL_RTX,
9714 TREE_UNSIGNED (TREE_TYPE (range))),
9715 table_label, default_label);
9719 /* Nonzero if the mode is a valid vector mode for this architecture.
9720 This returns nonzero even if there is no hardware support for the
9721 vector mode, but we can emulate with narrower modes. */
9724 vector_mode_valid_p (enum machine_mode mode)
9726 enum mode_class class = GET_MODE_CLASS (mode);
9727 enum machine_mode innermode;
9729 /* Doh! What's going on? */
9730 if (class != MODE_VECTOR_INT
9731 && class != MODE_VECTOR_FLOAT)
9734 /* Hardware support. Woo hoo! */
9735 if (VECTOR_MODE_SUPPORTED_P (mode))
9738 innermode = GET_MODE_INNER (mode);
9740 /* We should probably return 1 if requesting V4DI and we have no DI,
9741 but we have V2DI, but this is probably very unlikely. */
9743 /* If we have support for the inner mode, we can safely emulate it.
9744 We may not have V2DI, but me can emulate with a pair of DIs. */
9745 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9748 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9750 const_vector_from_tree (tree exp)
9755 enum machine_mode inner, mode;
9757 mode = TYPE_MODE (TREE_TYPE (exp));
9759 if (is_zeros_p (exp))
9760 return CONST0_RTX (mode);
9762 units = GET_MODE_NUNITS (mode);
9763 inner = GET_MODE_INNER (mode);
9765 v = rtvec_alloc (units);
9767 link = TREE_VECTOR_CST_ELTS (exp);
9768 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9770 elt = TREE_VALUE (link);
9772 if (TREE_CODE (elt) == REAL_CST)
9773 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9776 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9777 TREE_INT_CST_HIGH (elt),
9781 /* Initialize remaining elements to 0. */
9782 for (; i < units; ++i)
9783 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9785 return gen_rtx_raw_CONST_VECTOR (mode, v);
9788 #include "gt-expr.h"