1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
48 /* Decide whether a function's arguments should be processed
49 from first to last or from last to first.
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
56 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
57 #define PUSH_ARGS_REVERSED /* If it's last to first. */
62 #ifndef STACK_PUSH_CODE
63 #ifdef STACK_GROWS_DOWNWARD
64 #define STACK_PUSH_CODE PRE_DEC
66 #define STACK_PUSH_CODE PRE_INC
70 /* Assume that case vectors are not pc-relative. */
71 #ifndef CASE_VECTOR_PC_RELATIVE
72 #define CASE_VECTOR_PC_RELATIVE 0
75 /* Hook called by safe_from_p for language-specific tree codes. It is
76 up to the language front-end to install a hook if it has any such
77 codes that safe_from_p needs to know about. Since same_from_p will
78 recursively explore the TREE_OPERANDs of an expression, this hook
79 should not reexamine those pieces. This routine may recursively
80 call safe_from_p; it should always pass `0' as the TOP_P
82 int (*lang_safe_from_p) PARAMS ((rtx, tree));
84 /* If this is nonzero, we do not bother generating VOLATILE
85 around volatile memory references, and we are willing to
86 output indirect addresses. If cse is to follow, we reject
87 indirect addresses so a useful potential cse is generated;
88 if it is used only once, instruction combination will produce
89 the same indirect address eventually. */
92 /* Don't check memory usage, since code is being emitted to check a memory
93 usage. Used when current_function_check_memory_usage is true, to avoid
94 infinite recursion. */
95 static int in_check_memory_usage;
97 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
98 static tree placeholder_list = 0;
100 /* This structure is used by move_by_pieces to describe the move to
102 struct move_by_pieces
111 int explicit_inc_from;
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
117 /* This structure is used by store_by_pieces to describe the clear to
120 struct store_by_pieces
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
133 extern struct obstack permanent_obstack;
135 static rtx get_push_address PARAMS ((int));
137 static rtx enqueue_insn PARAMS ((rtx, rtx));
138 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
139 PARAMS ((unsigned HOST_WIDE_INT,
141 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
142 struct move_by_pieces *));
143 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
151 struct store_by_pieces *));
152 static rtx get_subtarget PARAMS ((rtx));
153 static int is_zeros_p PARAMS ((tree));
154 static int mostly_zeros_p PARAMS ((tree));
155 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int));
158 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
159 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
161 tree, enum machine_mode, int, tree,
163 static enum memory_use_mode
164 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
165 static rtx var_rtx PARAMS ((tree));
166 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
167 static rtx expand_increment PARAMS ((tree, int, int));
168 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
169 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
170 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
172 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
174 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
176 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
178 /* Record for each mode whether we can move a register directly to or
179 from an object of that mode in memory. If we can't, we won't try
180 to use that mode directly when accessing a field of that mode. */
182 static char direct_load[NUM_MACHINE_MODES];
183 static char direct_store[NUM_MACHINE_MODES];
185 /* If a memory-to-memory move would take MOVE_RATIO or more simple
186 move-instruction sequences, we will do a movstr or libcall instead. */
189 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
192 /* If we are optimizing for space (-Os), cut down the default move ratio. */
193 #define MOVE_RATIO (optimize_size ? 3 : 15)
197 /* This macro is used to determine whether move_by_pieces should be called
198 to perform a structure copy. */
199 #ifndef MOVE_BY_PIECES_P
200 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
201 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
212 #ifndef SLOW_UNALIGNED_ACCESS
213 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
216 /* This is run once per compilation to set up which modes can be used
217 directly in memory and to initialize the block move optab. */
223 enum machine_mode mode;
229 /* Try indexing by frame ptr and try by stack ptr.
230 It is known that on the Convex the stack ptr isn't a valid index.
231 With luck, one or the other is valid on any machine. */
232 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
233 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
235 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
236 pat = PATTERN (insn);
238 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
239 mode = (enum machine_mode) ((int) mode + 1))
244 direct_load[(int) mode] = direct_store[(int) mode] = 0;
245 PUT_MODE (mem, mode);
246 PUT_MODE (mem1, mode);
248 /* See if there is some register that can be used in this mode and
249 directly loaded or stored from memory. */
251 if (mode != VOIDmode && mode != BLKmode)
252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
253 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
256 if (! HARD_REGNO_MODE_OK (regno, mode))
259 reg = gen_rtx_REG (mode, regno);
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
266 SET_SRC (pat) = mem1;
267 SET_DEST (pat) = reg;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_load[(int) mode] = 1;
272 SET_DEST (pat) = mem;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
277 SET_DEST (pat) = mem1;
278 if (recog (pat, insn, &num_clobbers) >= 0)
279 direct_store[(int) mode] = 1;
286 /* This is run at the start of compiling a function. */
291 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
294 pending_stack_adjust = 0;
295 stack_pointer_delta = 0;
296 inhibit_defer_pop = 0;
298 apply_args_value = 0;
304 struct expr_status *p;
309 ggc_mark_rtx (p->x_saveregs_value);
310 ggc_mark_rtx (p->x_apply_args_value);
311 ggc_mark_rtx (p->x_forced_labels);
322 /* Small sanity check that the queue is empty at the end of a function. */
325 finish_expr_for_function ()
331 /* Manage the queue of increment instructions to be output
332 for POSTINCREMENT_EXPR expressions, etc. */
334 /* Queue up to increment (or change) VAR later. BODY says how:
335 BODY should be the same thing you would pass to emit_insn
336 to increment right away. It will go to emit_insn later on.
338 The value is a QUEUED expression to be used in place of VAR
339 where you want to guarantee the pre-incrementation value of VAR. */
342 enqueue_insn (var, body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (x, modify)
370 RTX_CODE code = GET_CODE (x);
372 #if 0 /* A QUEUED can hang around after the queue is forced out. */
373 /* Shortcut for most common case. */
374 if (pending_chain == 0)
380 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
381 use of autoincrement. Make a copy of the contents of the memory
382 location rather than a copy of the address, but not if the value is
383 of mode BLKmode. Don't modify X in place since it might be
385 if (code == MEM && GET_MODE (x) != BLKmode
386 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
389 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
393 rtx temp = gen_reg_rtx (GET_MODE (x));
395 emit_insn_before (gen_move_insn (temp, new),
400 /* Copy the address into a pseudo, so that the returned value
401 remains correct across calls to emit_queue. */
402 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
405 /* Otherwise, recursively protect the subexpressions of all
406 the kinds of rtx's that can contain a QUEUED. */
409 rtx tem = protect_from_queue (XEXP (x, 0), 0);
410 if (tem != XEXP (x, 0))
416 else if (code == PLUS || code == MULT)
418 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
419 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
420 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
429 /* If the increment has not happened, use the variable itself. Copy it
430 into a new pseudo so that the value remains correct across calls to
432 if (QUEUED_INSN (x) == 0)
433 return copy_to_reg (QUEUED_VAR (x));
434 /* If the increment has happened and a pre-increment copy exists,
436 if (QUEUED_COPY (x) != 0)
437 return QUEUED_COPY (x);
438 /* The increment has happened but we haven't set up a pre-increment copy.
439 Set one up now, and use it. */
440 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
441 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
443 return QUEUED_COPY (x);
446 /* Return nonzero if X contains a QUEUED expression:
447 if it contains anything that will be altered by a queued increment.
448 We handle only combinations of MEM, PLUS, MINUS and MULT operators
449 since memory addresses generally contain only those. */
455 enum rtx_code code = GET_CODE (x);
461 return queued_subexp_p (XEXP (x, 0));
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 if (GET_CODE (body) == SEQUENCE)
484 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
485 emit_insn (QUEUED_BODY (p));
488 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
489 pending_chain = QUEUED_NEXT (p);
493 /* Copy data from FROM to TO, where the machine modes are not the same.
494 Both modes may be integer, or both may be floating.
495 UNSIGNEDP should be nonzero if FROM is an unsigned type.
496 This causes zero-extension instead of sign-extension. */
499 convert_move (to, from, unsignedp)
503 enum machine_mode to_mode = GET_MODE (to);
504 enum machine_mode from_mode = GET_MODE (from);
505 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
506 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
510 /* rtx code for making an equivalent value. */
511 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
513 to = protect_from_queue (to, 1);
514 from = protect_from_queue (from, 0);
516 if (to_real != from_real)
519 /* If FROM is a SUBREG that indicates that we have already done at least
520 the required extension, strip it. We don't handle such SUBREGs as
523 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
524 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
525 >= GET_MODE_SIZE (to_mode))
526 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
527 from = gen_lowpart (to_mode, from), from_mode = to_mode;
529 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
532 if (to_mode == from_mode
533 || (from_mode == VOIDmode && CONSTANT_P (from)))
535 emit_move_insn (to, from);
539 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
541 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
544 if (VECTOR_MODE_P (to_mode))
545 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
547 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
549 emit_move_insn (to, from);
553 if (to_real != from_real)
560 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
562 /* Try converting directly if the insn is supported. */
563 if ((code = can_extend_p (to_mode, from_mode, 0))
566 emit_unop_insn (code, to, from, UNKNOWN);
571 #ifdef HAVE_trunchfqf2
572 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
574 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
578 #ifdef HAVE_trunctqfqf2
579 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
581 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
585 #ifdef HAVE_truncsfqf2
586 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
588 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
592 #ifdef HAVE_truncdfqf2
593 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
595 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
599 #ifdef HAVE_truncxfqf2
600 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
602 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
606 #ifdef HAVE_trunctfqf2
607 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
609 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
614 #ifdef HAVE_trunctqfhf2
615 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
617 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
621 #ifdef HAVE_truncsfhf2
622 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
624 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
628 #ifdef HAVE_truncdfhf2
629 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
631 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
635 #ifdef HAVE_truncxfhf2
636 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
638 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
642 #ifdef HAVE_trunctfhf2
643 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
645 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
650 #ifdef HAVE_truncsftqf2
651 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
653 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
657 #ifdef HAVE_truncdftqf2
658 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
660 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
664 #ifdef HAVE_truncxftqf2
665 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
667 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
671 #ifdef HAVE_trunctftqf2
672 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
674 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
679 #ifdef HAVE_truncdfsf2
680 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
682 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
686 #ifdef HAVE_truncxfsf2
687 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
689 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
693 #ifdef HAVE_trunctfsf2
694 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
696 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
700 #ifdef HAVE_truncxfdf2
701 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
703 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
707 #ifdef HAVE_trunctfdf2
708 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
710 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
722 libcall = extendsfdf2_libfunc;
726 libcall = extendsfxf2_libfunc;
730 libcall = extendsftf2_libfunc;
742 libcall = truncdfsf2_libfunc;
746 libcall = extenddfxf2_libfunc;
750 libcall = extenddftf2_libfunc;
762 libcall = truncxfsf2_libfunc;
766 libcall = truncxfdf2_libfunc;
778 libcall = trunctfsf2_libfunc;
782 libcall = trunctfdf2_libfunc;
794 if (libcall == (rtx) 0)
795 /* This conversion is not implemented yet. */
799 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
801 insns = get_insns ();
803 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
808 /* Now both modes are integers. */
810 /* Handle expanding beyond a word. */
811 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
812 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
819 enum machine_mode lowpart_mode;
820 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
822 /* Try converting directly if the insn is supported. */
823 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
826 /* If FROM is a SUBREG, put it into a register. Do this
827 so that we always generate the same set of insns for
828 better cse'ing; if an intermediate assignment occurred,
829 we won't be doing the operation directly on the SUBREG. */
830 if (optimize > 0 && GET_CODE (from) == SUBREG)
831 from = force_reg (from_mode, from);
832 emit_unop_insn (code, to, from, equiv_code);
835 /* Next, try converting via full word. */
836 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
837 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
838 != CODE_FOR_nothing))
840 if (GET_CODE (to) == REG)
841 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
842 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
843 emit_unop_insn (code, to,
844 gen_lowpart (word_mode, to), equiv_code);
848 /* No special multiword conversion insn; do it by hand. */
851 /* Since we will turn this into a no conflict block, we must ensure
852 that the source does not overlap the target. */
854 if (reg_overlap_mentioned_p (to, from))
855 from = force_reg (from_mode, from);
857 /* Get a copy of FROM widened to a word, if necessary. */
858 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
859 lowpart_mode = word_mode;
861 lowpart_mode = from_mode;
863 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
865 lowpart = gen_lowpart (lowpart_mode, to);
866 emit_move_insn (lowpart, lowfrom);
868 /* Compute the value to put in each remaining word. */
870 fill_value = const0_rtx;
875 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
876 && STORE_FLAG_VALUE == -1)
878 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
880 fill_value = gen_reg_rtx (word_mode);
881 emit_insn (gen_slt (fill_value));
887 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
888 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
890 fill_value = convert_to_mode (word_mode, fill_value, 1);
894 /* Fill the remaining words. */
895 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
897 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
898 rtx subword = operand_subword (to, index, 1, to_mode);
903 if (fill_value != subword)
904 emit_move_insn (subword, fill_value);
907 insns = get_insns ();
910 emit_no_conflict_block (insns, to, from, NULL_RTX,
911 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
915 /* Truncating multi-word to a word or less. */
916 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
917 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
919 if (!((GET_CODE (from) == MEM
920 && ! MEM_VOLATILE_P (from)
921 && direct_load[(int) to_mode]
922 && ! mode_dependent_address_p (XEXP (from, 0)))
923 || GET_CODE (from) == REG
924 || GET_CODE (from) == SUBREG))
925 from = force_reg (from_mode, from);
926 convert_move (to, gen_lowpart (word_mode, from), 0);
930 /* Handle pointer conversion. */ /* SPEE 900220. */
931 if (to_mode == PQImode)
933 if (from_mode != QImode)
934 from = convert_to_mode (QImode, from, unsignedp);
936 #ifdef HAVE_truncqipqi2
937 if (HAVE_truncqipqi2)
939 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
942 #endif /* HAVE_truncqipqi2 */
946 if (from_mode == PQImode)
948 if (to_mode != QImode)
950 from = convert_to_mode (QImode, from, unsignedp);
955 #ifdef HAVE_extendpqiqi2
956 if (HAVE_extendpqiqi2)
958 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
961 #endif /* HAVE_extendpqiqi2 */
966 if (to_mode == PSImode)
968 if (from_mode != SImode)
969 from = convert_to_mode (SImode, from, unsignedp);
971 #ifdef HAVE_truncsipsi2
972 if (HAVE_truncsipsi2)
974 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
977 #endif /* HAVE_truncsipsi2 */
981 if (from_mode == PSImode)
983 if (to_mode != SImode)
985 from = convert_to_mode (SImode, from, unsignedp);
990 #ifdef HAVE_extendpsisi2
991 if (! unsignedp && HAVE_extendpsisi2)
993 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
996 #endif /* HAVE_extendpsisi2 */
997 #ifdef HAVE_zero_extendpsisi2
998 if (unsignedp && HAVE_zero_extendpsisi2)
1000 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1003 #endif /* HAVE_zero_extendpsisi2 */
1008 if (to_mode == PDImode)
1010 if (from_mode != DImode)
1011 from = convert_to_mode (DImode, from, unsignedp);
1013 #ifdef HAVE_truncdipdi2
1014 if (HAVE_truncdipdi2)
1016 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1019 #endif /* HAVE_truncdipdi2 */
1023 if (from_mode == PDImode)
1025 if (to_mode != DImode)
1027 from = convert_to_mode (DImode, from, unsignedp);
1032 #ifdef HAVE_extendpdidi2
1033 if (HAVE_extendpdidi2)
1035 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1038 #endif /* HAVE_extendpdidi2 */
1043 /* Now follow all the conversions between integers
1044 no more than a word long. */
1046 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1047 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1048 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1049 GET_MODE_BITSIZE (from_mode)))
1051 if (!((GET_CODE (from) == MEM
1052 && ! MEM_VOLATILE_P (from)
1053 && direct_load[(int) to_mode]
1054 && ! mode_dependent_address_p (XEXP (from, 0)))
1055 || GET_CODE (from) == REG
1056 || GET_CODE (from) == SUBREG))
1057 from = force_reg (from_mode, from);
1058 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1059 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1060 from = copy_to_reg (from);
1061 emit_move_insn (to, gen_lowpart (to_mode, from));
1065 /* Handle extension. */
1066 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1068 /* Convert directly if that works. */
1069 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1070 != CODE_FOR_nothing)
1072 emit_unop_insn (code, to, from, equiv_code);
1077 enum machine_mode intermediate;
1081 /* Search for a mode to convert via. */
1082 for (intermediate = from_mode; intermediate != VOIDmode;
1083 intermediate = GET_MODE_WIDER_MODE (intermediate))
1084 if (((can_extend_p (to_mode, intermediate, unsignedp)
1085 != CODE_FOR_nothing)
1086 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1087 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1088 GET_MODE_BITSIZE (intermediate))))
1089 && (can_extend_p (intermediate, from_mode, unsignedp)
1090 != CODE_FOR_nothing))
1092 convert_move (to, convert_to_mode (intermediate, from,
1093 unsignedp), unsignedp);
1097 /* No suitable intermediate mode.
1098 Generate what we need with shifts. */
1099 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1100 - GET_MODE_BITSIZE (from_mode), 0);
1101 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1102 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1104 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1107 emit_move_insn (to, tmp);
1112 /* Support special truncate insns for certain modes. */
1114 if (from_mode == DImode && to_mode == SImode)
1116 #ifdef HAVE_truncdisi2
1117 if (HAVE_truncdisi2)
1119 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1123 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 if (from_mode == DImode && to_mode == HImode)
1129 #ifdef HAVE_truncdihi2
1130 if (HAVE_truncdihi2)
1132 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1136 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 if (from_mode == DImode && to_mode == QImode)
1142 #ifdef HAVE_truncdiqi2
1143 if (HAVE_truncdiqi2)
1145 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1149 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 if (from_mode == SImode && to_mode == HImode)
1155 #ifdef HAVE_truncsihi2
1156 if (HAVE_truncsihi2)
1158 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1162 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 if (from_mode == SImode && to_mode == QImode)
1168 #ifdef HAVE_truncsiqi2
1169 if (HAVE_truncsiqi2)
1171 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1175 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 if (from_mode == HImode && to_mode == QImode)
1181 #ifdef HAVE_trunchiqi2
1182 if (HAVE_trunchiqi2)
1184 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1188 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 if (from_mode == TImode && to_mode == DImode)
1194 #ifdef HAVE_trunctidi2
1195 if (HAVE_trunctidi2)
1197 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1201 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 if (from_mode == TImode && to_mode == SImode)
1207 #ifdef HAVE_trunctisi2
1208 if (HAVE_trunctisi2)
1210 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1214 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 if (from_mode == TImode && to_mode == HImode)
1220 #ifdef HAVE_trunctihi2
1221 if (HAVE_trunctihi2)
1223 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1227 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 if (from_mode == TImode && to_mode == QImode)
1233 #ifdef HAVE_trunctiqi2
1234 if (HAVE_trunctiqi2)
1236 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1240 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 /* Handle truncation of volatile memrefs, and so on;
1245 the things that couldn't be truncated directly,
1246 and for which there was no special instruction. */
1247 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1249 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1250 emit_move_insn (to, temp);
1254 /* Mode combination is not recognized. */
1258 /* Return an rtx for a value that would result
1259 from converting X to mode MODE.
1260 Both X and MODE may be floating, or both integer.
1261 UNSIGNEDP is nonzero if X is an unsigned value.
1262 This can be done by referring to a part of X in place
1263 or by copying to a new temporary with conversion.
1265 This function *must not* call protect_from_queue
1266 except when putting X into an insn (in which case convert_move does it). */
1269 convert_to_mode (mode, x, unsignedp)
1270 enum machine_mode mode;
1274 return convert_modes (mode, VOIDmode, x, unsignedp);
1277 /* Return an rtx for a value that would result
1278 from converting X from mode OLDMODE to mode MODE.
1279 Both modes may be floating, or both integer.
1280 UNSIGNEDP is nonzero if X is an unsigned value.
1282 This can be done by referring to a part of X in place
1283 or by copying to a new temporary with conversion.
1285 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1287 This function *must not* call protect_from_queue
1288 except when putting X into an insn (in which case convert_move does it). */
1291 convert_modes (mode, oldmode, x, unsignedp)
1292 enum machine_mode mode, oldmode;
1298 /* If FROM is a SUBREG that indicates that we have already done at least
1299 the required extension, strip it. */
1301 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1302 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1303 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1304 x = gen_lowpart (mode, x);
1306 if (GET_MODE (x) != VOIDmode)
1307 oldmode = GET_MODE (x);
1309 if (mode == oldmode)
1312 /* There is one case that we must handle specially: If we are converting
1313 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1314 we are to interpret the constant as unsigned, gen_lowpart will do
1315 the wrong if the constant appears negative. What we want to do is
1316 make the high-order word of the constant zero, not all ones. */
1318 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1319 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1320 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1322 HOST_WIDE_INT val = INTVAL (x);
1324 if (oldmode != VOIDmode
1325 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1327 int width = GET_MODE_BITSIZE (oldmode);
1329 /* We need to zero extend VAL. */
1330 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1333 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1336 /* We can do this with a gen_lowpart if both desired and current modes
1337 are integer, and this is either a constant integer, a register, or a
1338 non-volatile MEM. Except for the constant case where MODE is no
1339 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1341 if ((GET_CODE (x) == CONST_INT
1342 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1343 || (GET_MODE_CLASS (mode) == MODE_INT
1344 && GET_MODE_CLASS (oldmode) == MODE_INT
1345 && (GET_CODE (x) == CONST_DOUBLE
1346 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1347 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1348 && direct_load[(int) mode])
1349 || (GET_CODE (x) == REG
1350 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1351 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1353 /* ?? If we don't know OLDMODE, we have to assume here that
1354 X does not need sign- or zero-extension. This may not be
1355 the case, but it's the best we can do. */
1356 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1357 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1359 HOST_WIDE_INT val = INTVAL (x);
1360 int width = GET_MODE_BITSIZE (oldmode);
1362 /* We must sign or zero-extend in this case. Start by
1363 zero-extending, then sign extend if we need to. */
1364 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1366 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1367 val |= (HOST_WIDE_INT) (-1) << width;
1369 return GEN_INT (trunc_int_for_mode (val, mode));
1372 return gen_lowpart (mode, x);
1375 temp = gen_reg_rtx (mode);
1376 convert_move (temp, x, unsignedp);
1380 /* This macro is used to determine what the largest unit size that
1381 move_by_pieces can use is. */
1383 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1384 move efficiently, as opposed to MOVE_MAX which is the maximum
1385 number of bytes we can move with a single instruction. */
1387 #ifndef MOVE_MAX_PIECES
1388 #define MOVE_MAX_PIECES MOVE_MAX
1391 /* Generate several move instructions to copy LEN bytes from block FROM to
1392 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1393 and TO through protect_from_queue before calling.
1395 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1396 used to push FROM to the stack.
1398 ALIGN is maximum alignment we can assume. */
1401 move_by_pieces (to, from, len, align)
1403 unsigned HOST_WIDE_INT len;
1406 struct move_by_pieces data;
1407 rtx to_addr, from_addr = XEXP (from, 0);
1408 unsigned int max_size = MOVE_MAX_PIECES + 1;
1409 enum machine_mode mode = VOIDmode, tmode;
1410 enum insn_code icode;
1413 data.from_addr = from_addr;
1416 to_addr = XEXP (to, 0);
1419 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1420 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1429 #ifdef STACK_GROWS_DOWNWARD
1435 data.to_addr = to_addr;
1438 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1439 || GET_CODE (from_addr) == POST_INC
1440 || GET_CODE (from_addr) == POST_DEC);
1442 data.explicit_inc_from = 0;
1443 data.explicit_inc_to = 0;
1444 if (data.reverse) data.offset = len;
1447 /* If copying requires more than two move insns,
1448 copy addresses to registers (to make displacements shorter)
1449 and use post-increment if available. */
1450 if (!(data.autinc_from && data.autinc_to)
1451 && move_by_pieces_ninsns (len, align) > 2)
1453 /* Find the mode of the largest move... */
1454 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1455 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1456 if (GET_MODE_SIZE (tmode) < max_size)
1459 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1461 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1462 data.autinc_from = 1;
1463 data.explicit_inc_from = -1;
1465 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1467 data.from_addr = copy_addr_to_reg (from_addr);
1468 data.autinc_from = 1;
1469 data.explicit_inc_from = 1;
1471 if (!data.autinc_from && CONSTANT_P (from_addr))
1472 data.from_addr = copy_addr_to_reg (from_addr);
1473 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1475 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1477 data.explicit_inc_to = -1;
1479 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1481 data.to_addr = copy_addr_to_reg (to_addr);
1483 data.explicit_inc_to = 1;
1485 if (!data.autinc_to && CONSTANT_P (to_addr))
1486 data.to_addr = copy_addr_to_reg (to_addr);
1489 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1490 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1491 align = MOVE_MAX * BITS_PER_UNIT;
1493 /* First move what we can in the largest integer mode, then go to
1494 successively smaller modes. */
1496 while (max_size > 1)
1498 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1499 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1500 if (GET_MODE_SIZE (tmode) < max_size)
1503 if (mode == VOIDmode)
1506 icode = mov_optab->handlers[(int) mode].insn_code;
1507 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1508 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1510 max_size = GET_MODE_SIZE (mode);
1513 /* The code above should have handled everything. */
1518 /* Return number of insns required to move L bytes by pieces.
1519 ALIGN (in bits) is maximum alignment we can assume. */
1521 static unsigned HOST_WIDE_INT
1522 move_by_pieces_ninsns (l, align)
1523 unsigned HOST_WIDE_INT l;
1526 unsigned HOST_WIDE_INT n_insns = 0;
1527 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1529 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1530 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1531 align = MOVE_MAX * BITS_PER_UNIT;
1533 while (max_size > 1)
1535 enum machine_mode mode = VOIDmode, tmode;
1536 enum insn_code icode;
1538 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1539 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1540 if (GET_MODE_SIZE (tmode) < max_size)
1543 if (mode == VOIDmode)
1546 icode = mov_optab->handlers[(int) mode].insn_code;
1547 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1548 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1550 max_size = GET_MODE_SIZE (mode);
1558 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1559 with move instructions for mode MODE. GENFUN is the gen_... function
1560 to make a move insn for that mode. DATA has all the other info. */
1563 move_by_pieces_1 (genfun, mode, data)
1564 rtx (*genfun) PARAMS ((rtx, ...));
1565 enum machine_mode mode;
1566 struct move_by_pieces *data;
1568 unsigned int size = GET_MODE_SIZE (mode);
1569 rtx to1 = NULL_RTX, from1;
1571 while (data->len >= size)
1574 data->offset -= size;
1578 if (data->autinc_to)
1579 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1582 to1 = adjust_address (data->to, mode, data->offset);
1585 if (data->autinc_from)
1586 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1589 from1 = adjust_address (data->from, mode, data->offset);
1591 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1593 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1597 emit_insn ((*genfun) (to1, from1));
1600 #ifdef PUSH_ROUNDING
1601 emit_single_push_insn (mode, from1, NULL);
1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1612 if (! data->reverse)
1613 data->offset += size;
1619 /* Emit code to move a block Y to a block X.
1620 This may be done with string-move instructions,
1621 with multiple scalar move instructions, or with a library call.
1623 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1625 SIZE is an rtx that says how long they are.
1626 ALIGN is the maximum alignment we can assume they have.
1628 Return the address of the new block, if memcpy is called and returns it,
1632 emit_block_move (x, y, size)
1637 #ifdef TARGET_MEM_FUNCTIONS
1639 tree call_expr, arg_list;
1641 unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1643 if (GET_MODE (x) != BLKmode)
1646 if (GET_MODE (y) != BLKmode)
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
1651 size = protect_from_queue (size, 0);
1653 if (GET_CODE (x) != MEM)
1655 if (GET_CODE (y) != MEM)
1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1661 move_by_pieces (x, y, INTVAL (size), align);
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
1668 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1669 enum machine_mode mode;
1671 /* Since this is a move insn, we don't care about volatility. */
1674 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1675 mode = GET_MODE_WIDER_MODE (mode))
1677 enum insn_code code = movstr_optab[(int) mode];
1678 insn_operand_predicate_fn pred;
1680 if (code != CODE_FOR_nothing
1681 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1682 here because if SIZE is less than the mode mask, as it is
1683 returned by the macro, it will definitely be less than the
1684 actual mode mask. */
1685 && ((GET_CODE (size) == CONST_INT
1686 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1687 <= (GET_MODE_MASK (mode) >> 1)))
1688 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1689 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1690 || (*pred) (x, BLKmode))
1691 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1692 || (*pred) (y, BLKmode))
1693 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1694 || (*pred) (opalign, VOIDmode)))
1697 rtx last = get_last_insn ();
1700 op2 = convert_to_mode (mode, size, 1);
1701 pred = insn_data[(int) code].operand[2].predicate;
1702 if (pred != 0 && ! (*pred) (op2, mode))
1703 op2 = copy_to_mode_reg (mode, op2);
1705 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1713 delete_insns_since (last);
1719 /* X, Y, or SIZE may have been passed through protect_from_queue.
1721 It is unsafe to save the value generated by protect_from_queue
1722 and reuse it later. Consider what happens if emit_queue is
1723 called before the return value from protect_from_queue is used.
1725 Expansion of the CALL_EXPR below will call emit_queue before
1726 we are finished emitting RTL for argument setup. So if we are
1727 not careful we could get the wrong value for an argument.
1729 To avoid this problem we go ahead and emit code to copy X, Y &
1730 SIZE into new pseudos. We can then place those new pseudos
1731 into an RTL_EXPR and use them later, even after a call to
1734 Note this is not strictly needed for library calls since they
1735 do not call emit_queue before loading their arguments. However,
1736 we may need to have library calls call emit_queue in the future
1737 since failing to do so could cause problems for targets which
1738 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1739 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1740 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1742 #ifdef TARGET_MEM_FUNCTIONS
1743 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1745 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node));
1747 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1750 #ifdef TARGET_MEM_FUNCTIONS
1751 /* It is incorrect to use the libcall calling conventions to call
1752 memcpy in this context.
1754 This could be a user call to memcpy and the user may wish to
1755 examine the return value from memcpy.
1757 For targets where libcalls and normal calls have different conventions
1758 for returning pointers, we could end up generating incorrect code.
1760 So instead of using a libcall sequence we build up a suitable
1761 CALL_EXPR and expand the call in the normal fashion. */
1762 if (fn == NULL_TREE)
1766 /* This was copied from except.c, I don't know if all this is
1767 necessary in this context or not. */
1768 fn = get_identifier ("memcpy");
1769 fntype = build_pointer_type (void_type_node);
1770 fntype = build_function_type (fntype, NULL_TREE);
1771 fn = build_decl (FUNCTION_DECL, fn, fntype);
1772 ggc_add_tree_root (&fn, 1);
1773 DECL_EXTERNAL (fn) = 1;
1774 TREE_PUBLIC (fn) = 1;
1775 DECL_ARTIFICIAL (fn) = 1;
1776 TREE_NOTHROW (fn) = 1;
1777 make_decl_rtl (fn, NULL);
1778 assemble_external (fn);
1781 /* We need to make an argument list for the function call.
1783 memcpy has three arguments, the first two are void * addresses and
1784 the last is a size_t byte count for the copy. */
1786 = build_tree_list (NULL_TREE,
1787 make_tree (build_pointer_type (void_type_node), x));
1788 TREE_CHAIN (arg_list)
1789 = build_tree_list (NULL_TREE,
1790 make_tree (build_pointer_type (void_type_node), y));
1791 TREE_CHAIN (TREE_CHAIN (arg_list))
1792 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1793 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1795 /* Now we have to build up the CALL_EXPR itself. */
1796 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1797 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1798 call_expr, arg_list, NULL_TREE);
1799 TREE_SIDE_EFFECTS (call_expr) = 1;
1801 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1803 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1804 VOIDmode, 3, y, Pmode, x, Pmode,
1805 convert_to_mode (TYPE_MODE (integer_type_node), size,
1806 TREE_UNSIGNED (integer_type_node)),
1807 TYPE_MODE (integer_type_node));
1810 /* If we are initializing a readonly value, show the above call
1811 clobbered it. Otherwise, a load from it may erroneously be hoisted
1813 if (RTX_UNCHANGING_P (x))
1814 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
1820 /* Copy all or part of a value X into registers starting at REGNO.
1821 The number of registers to be filled is NREGS. */
1824 move_block_to_reg (regno, x, nregs, mode)
1828 enum machine_mode mode;
1831 #ifdef HAVE_load_multiple
1839 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1840 x = validize_mem (force_const_mem (mode, x));
1842 /* See if the machine can do this with a load multiple insn. */
1843 #ifdef HAVE_load_multiple
1844 if (HAVE_load_multiple)
1846 last = get_last_insn ();
1847 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1855 delete_insns_since (last);
1859 for (i = 0; i < nregs; i++)
1860 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1861 operand_subword_force (x, i, mode));
1864 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1865 The number of registers to be filled is NREGS. SIZE indicates the number
1866 of bytes in the object X. */
1869 move_block_from_reg (regno, x, nregs, size)
1876 #ifdef HAVE_store_multiple
1880 enum machine_mode mode;
1885 /* If SIZE is that of a mode no bigger than a word, just use that
1886 mode's store operation. */
1887 if (size <= UNITS_PER_WORD
1888 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1890 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1894 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1895 to the left before storing to memory. Note that the previous test
1896 doesn't handle all cases (e.g. SIZE == 3). */
1897 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1899 rtx tem = operand_subword (x, 0, 1, BLKmode);
1905 shift = expand_shift (LSHIFT_EXPR, word_mode,
1906 gen_rtx_REG (word_mode, regno),
1907 build_int_2 ((UNITS_PER_WORD - size)
1908 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1909 emit_move_insn (tem, shift);
1913 /* See if the machine can do this with a store multiple insn. */
1914 #ifdef HAVE_store_multiple
1915 if (HAVE_store_multiple)
1917 last = get_last_insn ();
1918 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1926 delete_insns_since (last);
1930 for (i = 0; i < nregs; i++)
1932 rtx tem = operand_subword (x, i, 1, BLKmode);
1937 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1941 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block SRC in bytes, or -1 if not known. */
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 emit_group_load (dst, orig_src, ssize)
1959 if (GET_CODE (dst) != PARALLEL)
1962 /* Check for a NULL entry, used to indicate that the parameter goes
1963 both on the stack and in registers. */
1964 if (XEXP (XVECEXP (dst, 0, 0), 0))
1969 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1971 /* Process the pieces. */
1972 for (i = start; i < XVECLEN (dst, 0); i++)
1974 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1975 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1976 unsigned int bytelen = GET_MODE_SIZE (mode);
1979 /* Handle trailing fragments that run over the size of the struct. */
1980 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1982 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1983 bytelen = ssize - bytepos;
1988 /* If we won't be loading directly from memory, protect the real source
1989 from strange tricks we might play; but make sure that the source can
1990 be loaded directly into the destination. */
1992 if (GET_CODE (orig_src) != MEM
1993 && (!CONSTANT_P (orig_src)
1994 || (GET_MODE (orig_src) != mode
1995 && GET_MODE (orig_src) != VOIDmode)))
1997 if (GET_MODE (orig_src) == VOIDmode)
1998 src = gen_reg_rtx (mode);
2000 src = gen_reg_rtx (GET_MODE (orig_src));
2002 emit_move_insn (src, orig_src);
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src) == MEM
2007 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2008 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2009 && bytelen == GET_MODE_SIZE (mode))
2011 tmps[i] = gen_reg_rtx (mode);
2012 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 else if (GET_CODE (src) == CONCAT)
2017 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2018 tmps[i] = XEXP (src, 0);
2019 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2021 tmps[i] = XEXP (src, 1);
2022 else if (bytepos == 0)
2024 rtx mem = assign_stack_temp (GET_MODE (src),
2025 GET_MODE_SIZE (GET_MODE (src)), 0);
2026 emit_move_insn (mem, src);
2027 tmps[i] = adjust_address (mem, mode, 0);
2032 else if (CONSTANT_P (src)
2033 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2036 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2037 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2040 if (BYTES_BIG_ENDIAN && shift)
2041 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2042 tmps[i], 0, OPTAB_WIDEN);
2047 /* Copy the extracted pieces into the proper (probable) hard regs. */
2048 for (i = start; i < XVECLEN (dst, 0); i++)
2049 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2052 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2053 registers represented by a PARALLEL. SSIZE represents the total size of
2054 block DST, or -1 if not known. */
2057 emit_group_store (orig_dst, src, ssize)
2064 if (GET_CODE (src) != PARALLEL)
2067 /* Check for a NULL entry, used to indicate that the parameter goes
2068 both on the stack and in registers. */
2069 if (XEXP (XVECEXP (src, 0, 0), 0))
2074 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2076 /* Copy the (probable) hard regs into pseudos. */
2077 for (i = start; i < XVECLEN (src, 0); i++)
2079 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2080 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2081 emit_move_insn (tmps[i], reg);
2085 /* If we won't be storing directly into memory, protect the real destination
2086 from strange tricks we might play. */
2088 if (GET_CODE (dst) == PARALLEL)
2092 /* We can get a PARALLEL dst if there is a conditional expression in
2093 a return statement. In that case, the dst and src are the same,
2094 so no action is necessary. */
2095 if (rtx_equal_p (dst, src))
2098 /* It is unclear if we can ever reach here, but we may as well handle
2099 it. Allocate a temporary, and split this into a store/load to/from
2102 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2103 emit_group_store (temp, src, ssize);
2104 emit_group_load (dst, temp, ssize);
2107 else if (GET_CODE (dst) != MEM)
2109 dst = gen_reg_rtx (GET_MODE (orig_dst));
2110 /* Make life a bit easier for combine. */
2111 emit_move_insn (dst, const0_rtx);
2114 /* Process the pieces. */
2115 for (i = start; i < XVECLEN (src, 0); i++)
2117 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2118 enum machine_mode mode = GET_MODE (tmps[i]);
2119 unsigned int bytelen = GET_MODE_SIZE (mode);
2121 /* Handle trailing fragments that run over the size of the struct. */
2122 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2124 if (BYTES_BIG_ENDIAN)
2126 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2127 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2128 tmps[i], 0, OPTAB_WIDEN);
2130 bytelen = ssize - bytepos;
2133 /* Optimize the access just a bit. */
2134 if (GET_CODE (dst) == MEM
2135 && MEM_ALIGN (dst) >= GET_MODE_ALIGNMENT (mode)
2136 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2137 && bytelen == GET_MODE_SIZE (mode))
2138 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2140 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2141 mode, tmps[i], ssize);
2146 /* Copy from the pseudo into the (probable) hard reg. */
2147 if (GET_CODE (dst) == REG)
2148 emit_move_insn (orig_dst, dst);
2151 /* Generate code to copy a BLKmode object of TYPE out of a
2152 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2153 is null, a stack temporary is created. TGTBLK is returned.
2155 The primary purpose of this routine is to handle functions
2156 that return BLKmode structures in registers. Some machines
2157 (the PA for example) want to return all small structures
2158 in registers regardless of the structure's alignment. */
2161 copy_blkmode_from_reg (tgtblk, srcreg, type)
2166 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2167 rtx src = NULL, dst = NULL;
2168 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2169 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2173 tgtblk = assign_temp (build_qualified_type (type,
2175 | TYPE_QUAL_CONST)),
2177 preserve_temp_slots (tgtblk);
2180 /* This code assumes srcreg is at least a full word. If it isn't,
2181 copy it into a new pseudo which is a full word. */
2182 if (GET_MODE (srcreg) != BLKmode
2183 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2184 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2186 /* Structures whose size is not a multiple of a word are aligned
2187 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2188 machine, this means we must skip the empty high order bytes when
2189 calculating the bit offset. */
2190 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2191 big_endian_correction
2192 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2194 /* Copy the structure BITSIZE bites at a time.
2196 We could probably emit more efficient code for machines which do not use
2197 strict alignment, but it doesn't seem worth the effort at the current
2199 for (bitpos = 0, xbitpos = big_endian_correction;
2200 bitpos < bytes * BITS_PER_UNIT;
2201 bitpos += bitsize, xbitpos += bitsize)
2203 /* We need a new source operand each time xbitpos is on a
2204 word boundary and when xbitpos == big_endian_correction
2205 (the first time through). */
2206 if (xbitpos % BITS_PER_WORD == 0
2207 || xbitpos == big_endian_correction)
2208 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2211 /* We need a new destination operand each time bitpos is on
2213 if (bitpos % BITS_PER_WORD == 0)
2214 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2216 /* Use xbitpos for the source extraction (right justified) and
2217 xbitpos for the destination store (left justified). */
2218 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2219 extract_bit_field (src, bitsize,
2220 xbitpos % BITS_PER_WORD, 1,
2221 NULL_RTX, word_mode, word_mode,
2229 /* Add a USE expression for REG to the (possibly empty) list pointed
2230 to by CALL_FUSAGE. REG must denote a hard register. */
2233 use_reg (call_fusage, reg)
2234 rtx *call_fusage, reg;
2236 if (GET_CODE (reg) != REG
2237 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2241 = gen_rtx_EXPR_LIST (VOIDmode,
2242 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2245 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2246 starting at REGNO. All of these registers must be hard registers. */
2249 use_regs (call_fusage, regno, nregs)
2256 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2259 for (i = 0; i < nregs; i++)
2260 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2263 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2264 PARALLEL REGS. This is for calls that pass values in multiple
2265 non-contiguous locations. The Irix 6 ABI has examples of this. */
2268 use_group_regs (call_fusage, regs)
2274 for (i = 0; i < XVECLEN (regs, 0); i++)
2276 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2278 /* A NULL entry means the parameter goes both on the stack and in
2279 registers. This can also be a MEM for targets that pass values
2280 partially on the stack and partially in registers. */
2281 if (reg != 0 && GET_CODE (reg) == REG)
2282 use_reg (call_fusage, reg);
2288 can_store_by_pieces (len, constfun, constfundata, align)
2289 unsigned HOST_WIDE_INT len;
2290 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2294 unsigned HOST_WIDE_INT max_size, l;
2295 HOST_WIDE_INT offset = 0;
2296 enum machine_mode mode, tmode;
2297 enum insn_code icode;
2301 if (! MOVE_BY_PIECES_P (len, align))
2304 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2305 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2306 align = MOVE_MAX * BITS_PER_UNIT;
2308 /* We would first store what we can in the largest integer mode, then go to
2309 successively smaller modes. */
2312 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2317 max_size = MOVE_MAX_PIECES + 1;
2318 while (max_size > 1)
2320 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2321 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2322 if (GET_MODE_SIZE (tmode) < max_size)
2325 if (mode == VOIDmode)
2328 icode = mov_optab->handlers[(int) mode].insn_code;
2329 if (icode != CODE_FOR_nothing
2330 && align >= GET_MODE_ALIGNMENT (mode))
2332 unsigned int size = GET_MODE_SIZE (mode);
2339 cst = (*constfun) (constfundata, offset, mode);
2340 if (!LEGITIMATE_CONSTANT_P (cst))
2350 max_size = GET_MODE_SIZE (mode);
2353 /* The code above should have handled everything. */
2361 /* Generate several move instructions to store LEN bytes generated by
2362 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2363 pointer which will be passed as argument in every CONSTFUN call.
2364 ALIGN is maximum alignment we can assume. */
2367 store_by_pieces (to, len, constfun, constfundata, align)
2369 unsigned HOST_WIDE_INT len;
2370 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2374 struct store_by_pieces data;
2376 if (! MOVE_BY_PIECES_P (len, align))
2378 to = protect_from_queue (to, 1);
2379 data.constfun = constfun;
2380 data.constfundata = constfundata;
2383 store_by_pieces_1 (&data, align);
2386 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2387 rtx with BLKmode). The caller must pass TO through protect_from_queue
2388 before calling. ALIGN is maximum alignment we can assume. */
2391 clear_by_pieces (to, len, align)
2393 unsigned HOST_WIDE_INT len;
2396 struct store_by_pieces data;
2398 data.constfun = clear_by_pieces_1;
2399 data.constfundata = NULL;
2402 store_by_pieces_1 (&data, align);
2405 /* Callback routine for clear_by_pieces.
2406 Return const0_rtx unconditionally. */
2409 clear_by_pieces_1 (data, offset, mode)
2410 PTR data ATTRIBUTE_UNUSED;
2411 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2412 enum machine_mode mode ATTRIBUTE_UNUSED;
2417 /* Subroutine of clear_by_pieces and store_by_pieces.
2418 Generate several move instructions to store LEN bytes of block TO. (A MEM
2419 rtx with BLKmode). The caller must pass TO through protect_from_queue
2420 before calling. ALIGN is maximum alignment we can assume. */
2423 store_by_pieces_1 (data, align)
2424 struct store_by_pieces *data;
2427 rtx to_addr = XEXP (data->to, 0);
2428 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2429 enum machine_mode mode = VOIDmode, tmode;
2430 enum insn_code icode;
2433 data->to_addr = to_addr;
2435 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2436 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2438 data->explicit_inc_to = 0;
2440 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2442 data->offset = data->len;
2444 /* If storing requires more than two move insns,
2445 copy addresses to registers (to make displacements shorter)
2446 and use post-increment if available. */
2447 if (!data->autinc_to
2448 && move_by_pieces_ninsns (data->len, align) > 2)
2450 /* Determine the main mode we'll be using. */
2451 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2452 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2453 if (GET_MODE_SIZE (tmode) < max_size)
2456 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2458 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2459 data->autinc_to = 1;
2460 data->explicit_inc_to = -1;
2463 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2464 && ! data->autinc_to)
2466 data->to_addr = copy_addr_to_reg (to_addr);
2467 data->autinc_to = 1;
2468 data->explicit_inc_to = 1;
2471 if ( !data->autinc_to && CONSTANT_P (to_addr))
2472 data->to_addr = copy_addr_to_reg (to_addr);
2475 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2476 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2477 align = MOVE_MAX * BITS_PER_UNIT;
2479 /* First store what we can in the largest integer mode, then go to
2480 successively smaller modes. */
2482 while (max_size > 1)
2484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2486 if (GET_MODE_SIZE (tmode) < max_size)
2489 if (mode == VOIDmode)
2492 icode = mov_optab->handlers[(int) mode].insn_code;
2493 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2494 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2496 max_size = GET_MODE_SIZE (mode);
2499 /* The code above should have handled everything. */
2504 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2505 with move instructions for mode MODE. GENFUN is the gen_... function
2506 to make a move insn for that mode. DATA has all the other info. */
2509 store_by_pieces_2 (genfun, mode, data)
2510 rtx (*genfun) PARAMS ((rtx, ...));
2511 enum machine_mode mode;
2512 struct store_by_pieces *data;
2514 unsigned int size = GET_MODE_SIZE (mode);
2517 while (data->len >= size)
2520 data->offset -= size;
2522 if (data->autinc_to)
2523 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2526 to1 = adjust_address (data->to, mode, data->offset);
2528 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2529 emit_insn (gen_add2_insn (data->to_addr,
2530 GEN_INT (-(HOST_WIDE_INT) size)));
2532 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2533 emit_insn ((*genfun) (to1, cst));
2535 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2536 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2538 if (! data->reverse)
2539 data->offset += size;
2545 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2546 its length in bytes. */
2549 clear_storage (object, size)
2553 #ifdef TARGET_MEM_FUNCTIONS
2555 tree call_expr, arg_list;
2558 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2559 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2561 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2562 just move a zero. Otherwise, do this a piece at a time. */
2563 if (GET_MODE (object) != BLKmode
2564 && GET_CODE (size) == CONST_INT
2565 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2566 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2569 object = protect_from_queue (object, 1);
2570 size = protect_from_queue (size, 0);
2572 if (GET_CODE (size) == CONST_INT
2573 && MOVE_BY_PIECES_P (INTVAL (size), align))
2574 clear_by_pieces (object, INTVAL (size), align);
2577 /* Try the most limited insn first, because there's no point
2578 including more than one in the machine description unless
2579 the more limited one has some advantage. */
2581 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2582 enum machine_mode mode;
2584 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2585 mode = GET_MODE_WIDER_MODE (mode))
2587 enum insn_code code = clrstr_optab[(int) mode];
2588 insn_operand_predicate_fn pred;
2590 if (code != CODE_FOR_nothing
2591 /* We don't need MODE to be narrower than
2592 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2593 the mode mask, as it is returned by the macro, it will
2594 definitely be less than the actual mode mask. */
2595 && ((GET_CODE (size) == CONST_INT
2596 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2597 <= (GET_MODE_MASK (mode) >> 1)))
2598 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2599 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2600 || (*pred) (object, BLKmode))
2601 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2602 || (*pred) (opalign, VOIDmode)))
2605 rtx last = get_last_insn ();
2608 op1 = convert_to_mode (mode, size, 1);
2609 pred = insn_data[(int) code].operand[1].predicate;
2610 if (pred != 0 && ! (*pred) (op1, mode))
2611 op1 = copy_to_mode_reg (mode, op1);
2613 pat = GEN_FCN ((int) code) (object, op1, opalign);
2620 delete_insns_since (last);
2624 /* OBJECT or SIZE may have been passed through protect_from_queue.
2626 It is unsafe to save the value generated by protect_from_queue
2627 and reuse it later. Consider what happens if emit_queue is
2628 called before the return value from protect_from_queue is used.
2630 Expansion of the CALL_EXPR below will call emit_queue before
2631 we are finished emitting RTL for argument setup. So if we are
2632 not careful we could get the wrong value for an argument.
2634 To avoid this problem we go ahead and emit code to copy OBJECT
2635 and SIZE into new pseudos. We can then place those new pseudos
2636 into an RTL_EXPR and use them later, even after a call to
2639 Note this is not strictly needed for library calls since they
2640 do not call emit_queue before loading their arguments. However,
2641 we may need to have library calls call emit_queue in the future
2642 since failing to do so could cause problems for targets which
2643 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2644 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2646 #ifdef TARGET_MEM_FUNCTIONS
2647 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2649 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2650 TREE_UNSIGNED (integer_type_node));
2651 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2654 #ifdef TARGET_MEM_FUNCTIONS
2655 /* It is incorrect to use the libcall calling conventions to call
2656 memset in this context.
2658 This could be a user call to memset and the user may wish to
2659 examine the return value from memset.
2661 For targets where libcalls and normal calls have different
2662 conventions for returning pointers, we could end up generating
2665 So instead of using a libcall sequence we build up a suitable
2666 CALL_EXPR and expand the call in the normal fashion. */
2667 if (fn == NULL_TREE)
2671 /* This was copied from except.c, I don't know if all this is
2672 necessary in this context or not. */
2673 fn = get_identifier ("memset");
2674 fntype = build_pointer_type (void_type_node);
2675 fntype = build_function_type (fntype, NULL_TREE);
2676 fn = build_decl (FUNCTION_DECL, fn, fntype);
2677 ggc_add_tree_root (&fn, 1);
2678 DECL_EXTERNAL (fn) = 1;
2679 TREE_PUBLIC (fn) = 1;
2680 DECL_ARTIFICIAL (fn) = 1;
2681 TREE_NOTHROW (fn) = 1;
2682 make_decl_rtl (fn, NULL);
2683 assemble_external (fn);
2686 /* We need to make an argument list for the function call.
2688 memset has three arguments, the first is a void * addresses, the
2689 second an integer with the initialization value, the last is a
2690 size_t byte count for the copy. */
2692 = build_tree_list (NULL_TREE,
2693 make_tree (build_pointer_type (void_type_node),
2695 TREE_CHAIN (arg_list)
2696 = build_tree_list (NULL_TREE,
2697 make_tree (integer_type_node, const0_rtx));
2698 TREE_CHAIN (TREE_CHAIN (arg_list))
2699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2702 /* Now we have to build up the CALL_EXPR itself. */
2703 call_expr = build1 (ADDR_EXPR,
2704 build_pointer_type (TREE_TYPE (fn)), fn);
2705 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2706 call_expr, arg_list, NULL_TREE);
2707 TREE_SIDE_EFFECTS (call_expr) = 1;
2709 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2711 emit_library_call (bzero_libfunc, LCT_NORMAL,
2712 VOIDmode, 2, object, Pmode, size,
2713 TYPE_MODE (integer_type_node));
2716 /* If we are initializing a readonly value, show the above call
2717 clobbered it. Otherwise, a load from it may erroneously be
2718 hoisted from a loop. */
2719 if (RTX_UNCHANGING_P (object))
2720 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2727 /* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2732 Return the last instruction emitted. */
2735 emit_move_insn (x, y)
2738 enum machine_mode mode = GET_MODE (x);
2739 rtx y_cst = NULL_RTX;
2742 x = protect_from_queue (x, 1);
2743 y = protect_from_queue (y, 0);
2745 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2748 /* Never force constant_p_rtx to memory. */
2749 if (GET_CODE (y) == CONSTANT_P_RTX)
2751 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2754 y = force_const_mem (mode, y);
2757 /* If X or Y are memory references, verify that their addresses are valid
2759 if (GET_CODE (x) == MEM
2760 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2761 && ! push_operand (x, GET_MODE (x)))
2763 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2764 x = validize_mem (x);
2766 if (GET_CODE (y) == MEM
2767 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2769 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2770 y = validize_mem (y);
2772 if (mode == BLKmode)
2775 last_insn = emit_move_insn_1 (x, y);
2777 if (y_cst && GET_CODE (x) == REG)
2778 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2783 /* Low level part of emit_move_insn.
2784 Called just like emit_move_insn, but assumes X and Y
2785 are basically valid. */
2788 emit_move_insn_1 (x, y)
2791 enum machine_mode mode = GET_MODE (x);
2792 enum machine_mode submode;
2793 enum mode_class class = GET_MODE_CLASS (mode);
2796 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2799 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2801 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2803 /* Expand complex moves by moving real part and imag part, if possible. */
2804 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2805 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2807 (class == MODE_COMPLEX_INT
2808 ? MODE_INT : MODE_FLOAT),
2810 && (mov_optab->handlers[(int) submode].insn_code
2811 != CODE_FOR_nothing))
2813 /* Don't split destination if it is a stack push. */
2814 int stack = push_operand (x, GET_MODE (x));
2816 #ifdef PUSH_ROUNDING
2817 /* In case we output to the stack, but the size is smaller machine can
2818 push exactly, we need to use move instructions. */
2820 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2823 int offset1, offset2;
2825 /* Do not use anti_adjust_stack, since we don't want to update
2826 stack_pointer_delta. */
2827 temp = expand_binop (Pmode,
2828 #ifdef STACK_GROWS_DOWNWARD
2835 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2839 if (temp != stack_pointer_rtx)
2840 emit_move_insn (stack_pointer_rtx, temp);
2841 #ifdef STACK_GROWS_DOWNWARD
2843 offset2 = GET_MODE_SIZE (submode);
2845 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2846 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2847 + GET_MODE_SIZE (submode));
2849 emit_move_insn (change_address (x, submode,
2850 gen_rtx_PLUS (Pmode,
2852 GEN_INT (offset1))),
2853 gen_realpart (submode, y));
2854 emit_move_insn (change_address (x, submode,
2855 gen_rtx_PLUS (Pmode,
2857 GEN_INT (offset2))),
2858 gen_imagpart (submode, y));
2862 /* If this is a stack, push the highpart first, so it
2863 will be in the argument order.
2865 In that case, change_address is used only to convert
2866 the mode, not to change the address. */
2869 /* Note that the real part always precedes the imag part in memory
2870 regardless of machine's endianness. */
2871 #ifdef STACK_GROWS_DOWNWARD
2872 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2873 (gen_rtx_MEM (submode, XEXP (x, 0)),
2874 gen_imagpart (submode, y)));
2875 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2876 (gen_rtx_MEM (submode, XEXP (x, 0)),
2877 gen_realpart (submode, y)));
2879 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2880 (gen_rtx_MEM (submode, XEXP (x, 0)),
2881 gen_realpart (submode, y)));
2882 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2883 (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_imagpart (submode, y)));
2889 rtx realpart_x, realpart_y;
2890 rtx imagpart_x, imagpart_y;
2892 /* If this is a complex value with each part being smaller than a
2893 word, the usual calling sequence will likely pack the pieces into
2894 a single register. Unfortunately, SUBREG of hard registers only
2895 deals in terms of words, so we have a problem converting input
2896 arguments to the CONCAT of two registers that is used elsewhere
2897 for complex values. If this is before reload, we can copy it into
2898 memory and reload. FIXME, we should see about using extract and
2899 insert on integer registers, but complex short and complex char
2900 variables should be rarely used. */
2901 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2902 && (reload_in_progress | reload_completed) == 0)
2904 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2905 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2907 if (packed_dest_p || packed_src_p)
2909 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2910 ? MODE_FLOAT : MODE_INT);
2912 enum machine_mode reg_mode
2913 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2915 if (reg_mode != BLKmode)
2917 rtx mem = assign_stack_temp (reg_mode,
2918 GET_MODE_SIZE (mode), 0);
2919 rtx cmem = adjust_address (mem, mode, 0);
2922 = N_("function using short complex types cannot be inline");
2926 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2927 emit_move_insn_1 (cmem, y);
2928 return emit_move_insn_1 (sreg, mem);
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2933 emit_move_insn_1 (mem, sreg);
2934 return emit_move_insn_1 (x, cmem);
2940 realpart_x = gen_realpart (submode, x);
2941 realpart_y = gen_realpart (submode, y);
2942 imagpart_x = gen_imagpart (submode, x);
2943 imagpart_y = gen_imagpart (submode, y);
2945 /* Show the output dies here. This is necessary for SUBREGs
2946 of pseudos since we cannot track their lifetimes correctly;
2947 hard regs shouldn't appear here except as return values.
2948 We never want to emit such a clobber after reload. */
2950 && ! (reload_in_progress || reload_completed)
2951 && (GET_CODE (realpart_x) == SUBREG
2952 || GET_CODE (imagpart_x) == SUBREG))
2954 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2957 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2958 (realpart_x, realpart_y));
2959 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2960 (imagpart_x, imagpart_y));
2963 return get_last_insn ();
2966 /* This will handle any multi-word mode that lacks a move_insn pattern.
2967 However, you will get better code if you define such patterns,
2968 even if they must turn into multiple assembler instructions. */
2969 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2975 #ifdef PUSH_ROUNDING
2977 /* If X is a push on the stack, do the push now and replace
2978 X with a reference to the stack pointer. */
2979 if (push_operand (x, GET_MODE (x)))
2984 /* Do not use anti_adjust_stack, since we don't want to update
2985 stack_pointer_delta. */
2986 temp = expand_binop (Pmode,
2987 #ifdef STACK_GROWS_DOWNWARD
2994 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2998 if (temp != stack_pointer_rtx)
2999 emit_move_insn (stack_pointer_rtx, temp);
3001 code = GET_CODE (XEXP (x, 0));
3002 /* Just hope that small offsets off SP are OK. */
3003 if (code == POST_INC)
3004 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3005 GEN_INT (-(HOST_WIDE_INT)
3006 GET_MODE_SIZE (GET_MODE (x))));
3007 else if (code == POST_DEC)
3008 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3009 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3011 temp = stack_pointer_rtx;
3013 x = change_address (x, VOIDmode, temp);
3017 /* If we are in reload, see if either operand is a MEM whose address
3018 is scheduled for replacement. */
3019 if (reload_in_progress && GET_CODE (x) == MEM
3020 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3021 x = replace_equiv_address_nv (x, inner);
3022 if (reload_in_progress && GET_CODE (y) == MEM
3023 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3024 y = replace_equiv_address_nv (y, inner);
3030 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3033 rtx xpart = operand_subword (x, i, 1, mode);
3034 rtx ypart = operand_subword (y, i, 1, mode);
3036 /* If we can't get a part of Y, put Y into memory if it is a
3037 constant. Otherwise, force it into a register. If we still
3038 can't get a part of Y, abort. */
3039 if (ypart == 0 && CONSTANT_P (y))
3041 y = force_const_mem (mode, y);
3042 ypart = operand_subword (y, i, 1, mode);
3044 else if (ypart == 0)
3045 ypart = operand_subword_force (y, i, mode);
3047 if (xpart == 0 || ypart == 0)
3050 need_clobber |= (GET_CODE (xpart) == SUBREG);
3052 last_insn = emit_move_insn (xpart, ypart);
3055 seq = gen_sequence ();
3058 /* Show the output dies here. This is necessary for SUBREGs
3059 of pseudos since we cannot track their lifetimes correctly;
3060 hard regs shouldn't appear here except as return values.
3061 We never want to emit such a clobber after reload. */
3063 && ! (reload_in_progress || reload_completed)
3064 && need_clobber != 0)
3066 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3077 /* Pushing data onto the stack. */
3079 /* Push a block of length SIZE (perhaps variable)
3080 and return an rtx to address the beginning of the block.
3081 Note that it is not possible for the value returned to be a QUEUED.
3082 The value may be virtual_outgoing_args_rtx.
3084 EXTRA is the number of bytes of padding to push in addition to SIZE.
3085 BELOW nonzero means this padding comes at low addresses;
3086 otherwise, the padding comes at high addresses. */
3089 push_block (size, extra, below)
3095 size = convert_modes (Pmode, ptr_mode, size, 1);
3096 if (CONSTANT_P (size))
3097 anti_adjust_stack (plus_constant (size, extra));
3098 else if (GET_CODE (size) == REG && extra == 0)
3099 anti_adjust_stack (size);
3102 temp = copy_to_mode_reg (Pmode, size);
3104 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3105 temp, 0, OPTAB_LIB_WIDEN);
3106 anti_adjust_stack (temp);
3109 #ifndef STACK_GROWS_DOWNWARD
3115 temp = virtual_outgoing_args_rtx;
3116 if (extra != 0 && below)
3117 temp = plus_constant (temp, extra);
3121 if (GET_CODE (size) == CONST_INT)
3122 temp = plus_constant (virtual_outgoing_args_rtx,
3123 -INTVAL (size) - (below ? 0 : extra));
3124 else if (extra != 0 && !below)
3125 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3126 negate_rtx (Pmode, plus_constant (size, extra)));
3128 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3129 negate_rtx (Pmode, size));
3132 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3136 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3137 block of SIZE bytes. */
3140 get_push_address (size)
3145 if (STACK_PUSH_CODE == POST_DEC)
3146 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3147 else if (STACK_PUSH_CODE == POST_INC)
3148 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3150 temp = stack_pointer_rtx;
3152 return copy_to_reg (temp);
3155 #ifdef PUSH_ROUNDING
3157 /* Emit single push insn. */
3160 emit_single_push_insn (mode, x, type)
3162 enum machine_mode mode;
3166 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3168 enum insn_code icode;
3169 insn_operand_predicate_fn pred;
3171 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3172 /* If there is push pattern, use it. Otherwise try old way of throwing
3173 MEM representing push operation to move expander. */
3174 icode = push_optab->handlers[(int) mode].insn_code;
3175 if (icode != CODE_FOR_nothing)
3177 if (((pred = insn_data[(int) icode].operand[0].predicate)
3178 && !((*pred) (x, mode))))
3179 x = force_reg (mode, x);
3180 emit_insn (GEN_FCN (icode) (x));
3183 if (GET_MODE_SIZE (mode) == rounded_size)
3184 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3187 #ifdef STACK_GROWS_DOWNWARD
3188 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3189 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3191 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3192 GEN_INT (rounded_size));
3194 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3197 dest = gen_rtx_MEM (mode, dest_addr);
3201 set_mem_attributes (dest, type, 1);
3203 if (flag_optimize_sibling_calls)
3204 /* Function incoming arguments may overlap with sibling call
3205 outgoing arguments and we cannot allow reordering of reads
3206 from function arguments with stores to outgoing arguments
3207 of sibling calls. */
3208 set_mem_alias_set (dest, 0);
3210 emit_move_insn (dest, x);
3214 /* Generate code to push X onto the stack, assuming it has mode MODE and
3216 MODE is redundant except when X is a CONST_INT (since they don't
3218 SIZE is an rtx for the size of data to be copied (in bytes),
3219 needed only if X is BLKmode.
3221 ALIGN (in bits) is maximum alignment we can assume.
3223 If PARTIAL and REG are both nonzero, then copy that many of the first
3224 words of X into registers starting with REG, and push the rest of X.
3225 The amount of space pushed is decreased by PARTIAL words,
3226 rounded *down* to a multiple of PARM_BOUNDARY.
3227 REG must be a hard register in this case.
3228 If REG is zero but PARTIAL is not, take any all others actions for an
3229 argument partially in registers, but do not actually load any
3232 EXTRA is the amount in bytes of extra space to leave next to this arg.
3233 This is ignored if an argument block has already been allocated.
3235 On a machine that lacks real push insns, ARGS_ADDR is the address of
3236 the bottom of the argument block for this call. We use indexing off there
3237 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3238 argument block has not been preallocated.
3240 ARGS_SO_FAR is the size of args previously pushed for this call.
3242 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3243 for arguments passed in registers. If nonzero, it will be the number
3244 of bytes required. */
3247 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3248 args_addr, args_so_far, reg_parm_stack_space,
3251 enum machine_mode mode;
3260 int reg_parm_stack_space;
3264 enum direction stack_direction
3265 #ifdef STACK_GROWS_DOWNWARD
3271 /* Decide where to pad the argument: `downward' for below,
3272 `upward' for above, or `none' for don't pad it.
3273 Default is below for small data on big-endian machines; else above. */
3274 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3276 /* Invert direction if stack is post-decrement.
3278 if (STACK_PUSH_CODE == POST_DEC)
3279 if (where_pad != none)
3280 where_pad = (where_pad == downward ? upward : downward);
3282 xinner = x = protect_from_queue (x, 0);
3284 if (mode == BLKmode)
3286 /* Copy a block into the stack, entirely or partially. */
3289 int used = partial * UNITS_PER_WORD;
3290 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3298 /* USED is now the # of bytes we need not copy to the stack
3299 because registers will take care of them. */
3302 xinner = adjust_address (xinner, BLKmode, used);
3304 /* If the partial register-part of the arg counts in its stack size,
3305 skip the part of stack space corresponding to the registers.
3306 Otherwise, start copying to the beginning of the stack space,
3307 by setting SKIP to 0. */
3308 skip = (reg_parm_stack_space == 0) ? 0 : used;
3310 #ifdef PUSH_ROUNDING
3311 /* Do it with several push insns if that doesn't take lots of insns
3312 and if there is no difficulty with push insns that skip bytes
3313 on the stack for alignment purposes. */
3316 && GET_CODE (size) == CONST_INT
3318 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3319 /* Here we avoid the case of a structure whose weak alignment
3320 forces many pushes of a small amount of data,
3321 and such small pushes do rounding that causes trouble. */
3322 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3323 || align >= BIGGEST_ALIGNMENT
3324 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3325 == (align / BITS_PER_UNIT)))
3326 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3328 /* Push padding now if padding above and stack grows down,
3329 or if padding below and stack grows up.
3330 But if space already allocated, this has already been done. */
3331 if (extra && args_addr == 0
3332 && where_pad != none && where_pad != stack_direction)
3333 anti_adjust_stack (GEN_INT (extra));
3335 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3337 if (current_function_check_memory_usage && ! in_check_memory_usage)
3341 in_check_memory_usage = 1;
3342 temp = get_push_address (INTVAL (size) - used);
3343 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3344 emit_library_call (chkr_copy_bitmap_libfunc,
3345 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3346 Pmode, XEXP (xinner, 0), Pmode,
3347 GEN_INT (INTVAL (size) - used),
3348 TYPE_MODE (sizetype));
3350 emit_library_call (chkr_set_right_libfunc,
3351 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3352 Pmode, GEN_INT (INTVAL (size) - used),
3353 TYPE_MODE (sizetype),
3354 GEN_INT (MEMORY_USE_RW),
3355 TYPE_MODE (integer_type_node));
3356 in_check_memory_usage = 0;
3360 #endif /* PUSH_ROUNDING */
3364 /* Otherwise make space on the stack and copy the data
3365 to the address of that space. */
3367 /* Deduct words put into registers from the size we must copy. */
3370 if (GET_CODE (size) == CONST_INT)
3371 size = GEN_INT (INTVAL (size) - used);
3373 size = expand_binop (GET_MODE (size), sub_optab, size,
3374 GEN_INT (used), NULL_RTX, 0,
3378 /* Get the address of the stack space.
3379 In this case, we do not deal with EXTRA separately.
3380 A single stack adjust will do. */
3383 temp = push_block (size, extra, where_pad == downward);
3386 else if (GET_CODE (args_so_far) == CONST_INT)
3387 temp = memory_address (BLKmode,
3388 plus_constant (args_addr,
3389 skip + INTVAL (args_so_far)));
3391 temp = memory_address (BLKmode,
3392 plus_constant (gen_rtx_PLUS (Pmode,
3396 if (current_function_check_memory_usage && ! in_check_memory_usage)
3398 in_check_memory_usage = 1;
3399 target = copy_to_reg (temp);
3400 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3401 emit_library_call (chkr_copy_bitmap_libfunc,
3402 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3404 XEXP (xinner, 0), Pmode,
3405 size, TYPE_MODE (sizetype));
3407 emit_library_call (chkr_set_right_libfunc,
3408 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3410 size, TYPE_MODE (sizetype),
3411 GEN_INT (MEMORY_USE_RW),
3412 TYPE_MODE (integer_type_node));
3413 in_check_memory_usage = 0;
3416 target = gen_rtx_MEM (BLKmode, temp);
3420 set_mem_attributes (target, type, 1);
3421 /* Function incoming arguments may overlap with sibling call
3422 outgoing arguments and we cannot allow reordering of reads
3423 from function arguments with stores to outgoing arguments
3424 of sibling calls. */
3425 set_mem_alias_set (target, 0);
3428 set_mem_align (target, align);
3430 /* TEMP is the address of the block. Copy the data there. */
3431 if (GET_CODE (size) == CONST_INT
3432 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3434 move_by_pieces (target, xinner, INTVAL (size), align);
3439 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3440 enum machine_mode mode;
3442 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3444 mode = GET_MODE_WIDER_MODE (mode))
3446 enum insn_code code = movstr_optab[(int) mode];
3447 insn_operand_predicate_fn pred;
3449 if (code != CODE_FOR_nothing
3450 && ((GET_CODE (size) == CONST_INT
3451 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3452 <= (GET_MODE_MASK (mode) >> 1)))
3453 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3454 && (!(pred = insn_data[(int) code].operand[0].predicate)
3455 || ((*pred) (target, BLKmode)))
3456 && (!(pred = insn_data[(int) code].operand[1].predicate)
3457 || ((*pred) (xinner, BLKmode)))
3458 && (!(pred = insn_data[(int) code].operand[3].predicate)
3459 || ((*pred) (opalign, VOIDmode))))
3461 rtx op2 = convert_to_mode (mode, size, 1);
3462 rtx last = get_last_insn ();
3465 pred = insn_data[(int) code].operand[2].predicate;
3466 if (pred != 0 && ! (*pred) (op2, mode))
3467 op2 = copy_to_mode_reg (mode, op2);
3469 pat = GEN_FCN ((int) code) (target, xinner,
3477 delete_insns_since (last);
3482 if (!ACCUMULATE_OUTGOING_ARGS)
3484 /* If the source is referenced relative to the stack pointer,
3485 copy it to another register to stabilize it. We do not need
3486 to do this if we know that we won't be changing sp. */
3488 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3489 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3490 temp = copy_to_reg (temp);
3493 /* Make inhibit_defer_pop nonzero around the library call
3494 to force it to pop the bcopy-arguments right away. */
3496 #ifdef TARGET_MEM_FUNCTIONS
3497 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3498 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3499 convert_to_mode (TYPE_MODE (sizetype),
3500 size, TREE_UNSIGNED (sizetype)),
3501 TYPE_MODE (sizetype));
3503 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3504 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3505 convert_to_mode (TYPE_MODE (integer_type_node),
3507 TREE_UNSIGNED (integer_type_node)),
3508 TYPE_MODE (integer_type_node));
3513 else if (partial > 0)
3515 /* Scalar partly in registers. */
3517 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3520 /* # words of start of argument
3521 that we must make space for but need not store. */
3522 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3523 int args_offset = INTVAL (args_so_far);
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3533 /* If we make space by pushing it, we might as well push
3534 the real data. Otherwise, we can leave OFFSET nonzero
3535 and leave the space uninitialized. */
3539 /* Now NOT_STACK gets the number of words that we don't need to
3540 allocate on the stack. */
3541 not_stack = partial - offset;
3543 /* If the partial register-part of the arg counts in its stack size,
3544 skip the part of stack space corresponding to the registers.
3545 Otherwise, start copying to the beginning of the stack space,
3546 by setting SKIP to 0. */
3547 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3549 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3550 x = validize_mem (force_const_mem (mode, x));
3552 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3553 SUBREGs of such registers are not allowed. */
3554 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3555 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3556 x = copy_to_reg (x);
3558 /* Loop over all the words allocated on the stack for this arg. */
3559 /* We can do it by words, because any scalar bigger than a word
3560 has a size a multiple of a word. */
3561 #ifndef PUSH_ARGS_REVERSED
3562 for (i = not_stack; i < size; i++)
3564 for (i = size - 1; i >= not_stack; i--)
3566 if (i >= not_stack + offset)
3567 emit_push_insn (operand_subword_force (x, i, mode),
3568 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3570 GEN_INT (args_offset + ((i - not_stack + skip)
3572 reg_parm_stack_space, alignment_pad);
3577 rtx target = NULL_RTX;
3580 /* Push padding now if padding above and stack grows down,
3581 or if padding below and stack grows up.
3582 But if space already allocated, this has already been done. */
3583 if (extra && args_addr == 0
3584 && where_pad != none && where_pad != stack_direction)
3585 anti_adjust_stack (GEN_INT (extra));
3587 #ifdef PUSH_ROUNDING
3588 if (args_addr == 0 && PUSH_ARGS)
3589 emit_single_push_insn (mode, x, type);
3593 if (GET_CODE (args_so_far) == CONST_INT)
3595 = memory_address (mode,
3596 plus_constant (args_addr,
3597 INTVAL (args_so_far)));
3599 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3602 dest = gen_rtx_MEM (mode, addr);
3605 set_mem_attributes (dest, type, 1);
3606 /* Function incoming arguments may overlap with sibling call
3607 outgoing arguments and we cannot allow reordering of reads
3608 from function arguments with stores to outgoing arguments
3609 of sibling calls. */
3610 set_mem_alias_set (dest, 0);
3613 emit_move_insn (dest, x);
3617 if (current_function_check_memory_usage && ! in_check_memory_usage)
3619 in_check_memory_usage = 1;
3621 target = get_push_address (GET_MODE_SIZE (mode));
3623 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3624 emit_library_call (chkr_copy_bitmap_libfunc,
3625 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3626 Pmode, XEXP (x, 0), Pmode,
3627 GEN_INT (GET_MODE_SIZE (mode)),
3628 TYPE_MODE (sizetype));
3630 emit_library_call (chkr_set_right_libfunc,
3631 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3632 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3633 TYPE_MODE (sizetype),
3634 GEN_INT (MEMORY_USE_RW),
3635 TYPE_MODE (integer_type_node));
3636 in_check_memory_usage = 0;
3641 /* If part should go in registers, copy that part
3642 into the appropriate registers. Do this now, at the end,
3643 since mem-to-mem copies above may do function calls. */
3644 if (partial > 0 && reg != 0)
3646 /* Handle calls that pass values in multiple non-contiguous locations.
3647 The Irix 6 ABI has examples of this. */
3648 if (GET_CODE (reg) == PARALLEL)
3649 emit_group_load (reg, x, -1); /* ??? size? */
3651 move_block_to_reg (REGNO (reg), x, partial, mode);
3654 if (extra && args_addr == 0 && where_pad == stack_direction)
3655 anti_adjust_stack (GEN_INT (extra));
3657 if (alignment_pad && args_addr == 0)
3658 anti_adjust_stack (alignment_pad);
3661 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3669 /* Only registers can be subtargets. */
3670 || GET_CODE (x) != REG
3671 /* If the register is readonly, it can't be set more than once. */
3672 || RTX_UNCHANGING_P (x)
3673 /* Don't use hard regs to avoid extending their life. */
3674 || REGNO (x) < FIRST_PSEUDO_REGISTER
3675 /* Avoid subtargets inside loops,
3676 since they hide some invariant expressions. */
3677 || preserve_subexpressions_p ())
3681 /* Expand an assignment that stores the value of FROM into TO.
3682 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3683 (This may contain a QUEUED rtx;
3684 if the value is constant, this rtx is a constant.)
3685 Otherwise, the returned value is NULL_RTX.
3687 SUGGEST_REG is no longer actually used.
3688 It used to mean, copy the value through a register
3689 and return that register, if that is possible.
3690 We now use WANT_VALUE to decide whether to do this. */
3693 expand_assignment (to, from, want_value, suggest_reg)
3696 int suggest_reg ATTRIBUTE_UNUSED;
3701 /* Don't crash if the lhs of the assignment was erroneous. */
3703 if (TREE_CODE (to) == ERROR_MARK)
3705 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3706 return want_value ? result : NULL_RTX;
3709 /* Assignment of a structure component needs special treatment
3710 if the structure component's rtx is not simply a MEM.
3711 Assignment of an array element at a constant index, and assignment of
3712 an array element in an unaligned packed structure field, has the same
3715 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3716 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3718 enum machine_mode mode1;
3719 HOST_WIDE_INT bitsize, bitpos;
3727 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3728 &unsignedp, &volatilep);
3730 /* If we are going to use store_bit_field and extract_bit_field,
3731 make sure to_rtx will be safe for multiple use. */
3733 if (mode1 == VOIDmode && want_value)
3734 tem = stabilize_reference (tem);
3736 orig_to_rtx = to_rtx
3737 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3740 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3742 if (GET_CODE (to_rtx) != MEM)
3745 if (GET_MODE (offset_rtx) != ptr_mode)
3746 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3748 #ifdef POINTERS_EXTEND_UNSIGNED
3749 if (GET_MODE (offset_rtx) != Pmode)
3750 offset_rtx = convert_memory_address (Pmode, offset_rtx);
3753 /* A constant address in TO_RTX can have VOIDmode, we must not try
3754 to call force_reg for that case. Avoid that case. */
3755 if (GET_CODE (to_rtx) == MEM
3756 && GET_MODE (to_rtx) == BLKmode
3757 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3759 && (bitpos % bitsize) == 0
3760 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3761 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3764 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3766 if (GET_CODE (XEXP (temp, 0)) == REG)
3769 to_rtx = (replace_equiv_address
3770 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3775 to_rtx = offset_address (to_rtx, offset_rtx,
3776 highest_pow2_factor (offset));
3780 /* Deal with volatile and readonly fields. The former is only done
3781 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3782 if (volatilep && GET_CODE (to_rtx) == MEM)
3784 if (to_rtx == orig_to_rtx)
3785 to_rtx = copy_rtx (to_rtx);
3786 MEM_VOLATILE_P (to_rtx) = 1;
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3792 if (to_rtx == orig_to_rtx)
3793 to_rtx = copy_rtx (to_rtx);
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3797 if (! can_address_p (to))
3799 if (to_rtx == orig_to_rtx)
3800 to_rtx = copy_rtx (to_rtx);
3801 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3804 /* Check the access. */
3805 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3810 enum machine_mode best_mode;
3812 best_mode = get_best_mode (bitsize, bitpos,
3813 TYPE_ALIGN (TREE_TYPE (tem)),
3815 if (best_mode == VOIDmode)
3818 best_mode_size = GET_MODE_BITSIZE (best_mode);
3819 to_addr = plus_constant (XEXP (to_rtx, 0), bitpos / BITS_PER_UNIT);
3820 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3821 size *= GET_MODE_SIZE (best_mode);
3823 /* Check the access right of the pointer. */
3824 in_check_memory_usage = 1;
3826 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3827 VOIDmode, 3, to_addr, Pmode,
3828 GEN_INT (size), TYPE_MODE (sizetype),
3829 GEN_INT (MEMORY_USE_WO),
3830 TYPE_MODE (integer_type_node));
3831 in_check_memory_usage = 0;
3834 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3836 /* Spurious cast for HPUX compiler. */
3837 ? ((enum machine_mode)
3838 TYPE_MODE (TREE_TYPE (to)))
3840 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3842 preserve_temp_slots (result);
3846 /* If the value is meaningful, convert RESULT to the proper mode.
3847 Otherwise, return nothing. */
3848 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3849 TYPE_MODE (TREE_TYPE (from)),
3851 TREE_UNSIGNED (TREE_TYPE (to)))
3855 /* If the rhs is a function call and its value is not an aggregate,
3856 call the function before we start to compute the lhs.
3857 This is needed for correct code for cases such as
3858 val = setjmp (buf) on machines where reference to val
3859 requires loading up part of an address in a separate insn.
3861 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3862 since it might be a promoted variable where the zero- or sign- extension
3863 needs to be done. Handling this in the normal way is safe because no
3864 computation is done before the call. */
3865 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3866 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3867 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3868 && GET_CODE (DECL_RTL (to)) == REG))
3873 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3875 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3877 /* Handle calls that return values in multiple non-contiguous locations.
3878 The Irix 6 ABI has examples of this. */
3879 if (GET_CODE (to_rtx) == PARALLEL)
3880 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
3881 else if (GET_MODE (to_rtx) == BLKmode)
3882 emit_block_move (to_rtx, value, expr_size (from));
3885 #ifdef POINTERS_EXTEND_UNSIGNED
3886 if (POINTER_TYPE_P (TREE_TYPE (to))
3887 && GET_MODE (to_rtx) != GET_MODE (value))
3888 value = convert_memory_address (GET_MODE (to_rtx), value);
3890 emit_move_insn (to_rtx, value);
3892 preserve_temp_slots (to_rtx);
3895 return want_value ? to_rtx : NULL_RTX;
3898 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3899 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3904 /* Don't move directly into a return register. */
3905 if (TREE_CODE (to) == RESULT_DECL
3906 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3911 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3913 if (GET_CODE (to_rtx) == PARALLEL)
3914 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
3916 emit_move_insn (to_rtx, temp);
3918 preserve_temp_slots (to_rtx);
3921 return want_value ? to_rtx : NULL_RTX;
3924 /* In case we are returning the contents of an object which overlaps
3925 the place the value is being stored, use a safe function when copying
3926 a value through a pointer into a structure value return block. */
3927 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3928 && current_function_returns_struct
3929 && !current_function_returns_pcc_struct)
3934 size = expr_size (from);
3935 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3936 EXPAND_MEMORY_USE_DONT);
3938 /* Copy the rights of the bitmap. */
3939 if (current_function_check_memory_usage)
3940 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3941 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3942 XEXP (from_rtx, 0), Pmode,
3943 convert_to_mode (TYPE_MODE (sizetype),
3944 size, TREE_UNSIGNED (sizetype)),
3945 TYPE_MODE (sizetype));
3947 #ifdef TARGET_MEM_FUNCTIONS
3948 emit_library_call (memmove_libfunc, LCT_NORMAL,
3949 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3950 XEXP (from_rtx, 0), Pmode,
3951 convert_to_mode (TYPE_MODE (sizetype),
3952 size, TREE_UNSIGNED (sizetype)),
3953 TYPE_MODE (sizetype));
3955 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3956 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3957 XEXP (to_rtx, 0), Pmode,
3958 convert_to_mode (TYPE_MODE (integer_type_node),
3959 size, TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
3963 preserve_temp_slots (to_rtx);
3966 return want_value ? to_rtx : NULL_RTX;
3969 /* Compute FROM and store the value in the rtx we got. */
3972 result = store_expr (from, to_rtx, want_value);
3973 preserve_temp_slots (result);
3976 return want_value ? result : NULL_RTX;
3979 /* Generate code for computing expression EXP,
3980 and storing the value into TARGET.
3981 TARGET may contain a QUEUED rtx.
3983 If WANT_VALUE is nonzero, return a copy of the value
3984 not in TARGET, so that we can be sure to use the proper
3985 value in a containing expression even if TARGET has something
3986 else stored in it. If possible, we copy the value through a pseudo
3987 and return that pseudo. Or, if the value is constant, we try to
3988 return the constant. In some cases, we return a pseudo
3989 copied *from* TARGET.
3991 If the mode is BLKmode then we may return TARGET itself.
3992 It turns out that in BLKmode it doesn't cause a problem.
3993 because C has no operators that could combine two different
3994 assignments into the same BLKmode object with different values
3995 with no sequence point. Will other languages need this to
3998 If WANT_VALUE is 0, we return NULL, to make sure
3999 to catch quickly any cases where the caller uses the value
4000 and fails to set WANT_VALUE. */
4003 store_expr (exp, target, want_value)
4009 int dont_return_target = 0;
4010 int dont_store_target = 0;
4012 if (TREE_CODE (exp) == COMPOUND_EXPR)
4014 /* Perform first part of compound expression, then assign from second
4016 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4018 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4020 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4022 /* For conditional expression, get safe form of the target. Then
4023 test the condition, doing the appropriate assignment on either
4024 side. This avoids the creation of unnecessary temporaries.
4025 For non-BLKmode, it is more efficient not to do this. */
4027 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4030 target = protect_from_queue (target, 1);
4032 do_pending_stack_adjust ();
4034 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4035 start_cleanup_deferral ();
4036 store_expr (TREE_OPERAND (exp, 1), target, 0);
4037 end_cleanup_deferral ();
4039 emit_jump_insn (gen_jump (lab2));
4042 start_cleanup_deferral ();
4043 store_expr (TREE_OPERAND (exp, 2), target, 0);
4044 end_cleanup_deferral ();
4049 return want_value ? target : NULL_RTX;
4051 else if (queued_subexp_p (target))
4052 /* If target contains a postincrement, let's not risk
4053 using it as the place to generate the rhs. */
4055 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4057 /* Expand EXP into a new pseudo. */
4058 temp = gen_reg_rtx (GET_MODE (target));
4059 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4062 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4064 /* If target is volatile, ANSI requires accessing the value
4065 *from* the target, if it is accessed. So make that happen.
4066 In no case return the target itself. */
4067 if (! MEM_VOLATILE_P (target) && want_value)
4068 dont_return_target = 1;
4070 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4071 && GET_MODE (target) != BLKmode)
4072 /* If target is in memory and caller wants value in a register instead,
4073 arrange that. Pass TARGET as target for expand_expr so that,
4074 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4075 We know expand_expr will not use the target in that case.
4076 Don't do this if TARGET is volatile because we are supposed
4077 to write it and then read it. */
4079 temp = expand_expr (exp, target, GET_MODE (target), 0);
4080 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4082 /* If TEMP is already in the desired TARGET, only copy it from
4083 memory and don't store it there again. */
4085 || (rtx_equal_p (temp, target)
4086 && ! side_effects_p (temp) && ! side_effects_p (target)))
4087 dont_store_target = 1;
4088 temp = copy_to_reg (temp);
4090 dont_return_target = 1;
4092 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4093 /* If this is an scalar in a register that is stored in a wider mode
4094 than the declared mode, compute the result into its declared mode
4095 and then convert to the wider mode. Our value is the computed
4098 /* If we don't want a value, we can do the conversion inside EXP,
4099 which will often result in some optimizations. Do the conversion
4100 in two steps: first change the signedness, if needed, then
4101 the extend. But don't do this if the type of EXP is a subtype
4102 of something else since then the conversion might involve
4103 more than just converting modes. */
4104 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4105 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4107 if (TREE_UNSIGNED (TREE_TYPE (exp))
4108 != SUBREG_PROMOTED_UNSIGNED_P (target))
4111 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4115 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4116 SUBREG_PROMOTED_UNSIGNED_P (target)),
4120 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4122 /* If TEMP is a volatile MEM and we want a result value, make
4123 the access now so it gets done only once. Likewise if
4124 it contains TARGET. */
4125 if (GET_CODE (temp) == MEM && want_value
4126 && (MEM_VOLATILE_P (temp)
4127 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4128 temp = copy_to_reg (temp);
4130 /* If TEMP is a VOIDmode constant, use convert_modes to make
4131 sure that we properly convert it. */
4132 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4134 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4135 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4136 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4137 GET_MODE (target), temp,
4138 SUBREG_PROMOTED_UNSIGNED_P (target));
4141 convert_move (SUBREG_REG (target), temp,
4142 SUBREG_PROMOTED_UNSIGNED_P (target));
4144 /* If we promoted a constant, change the mode back down to match
4145 target. Otherwise, the caller might get confused by a result whose
4146 mode is larger than expected. */
4148 if (want_value && GET_MODE (temp) != GET_MODE (target)
4149 && GET_MODE (temp) != VOIDmode)
4151 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4152 SUBREG_PROMOTED_VAR_P (temp) = 1;
4153 SUBREG_PROMOTED_UNSIGNED_P (temp)
4154 = SUBREG_PROMOTED_UNSIGNED_P (target);
4157 return want_value ? temp : NULL_RTX;
4161 temp = expand_expr (exp, target, GET_MODE (target), 0);
4162 /* Return TARGET if it's a specified hardware register.
4163 If TARGET is a volatile mem ref, either return TARGET
4164 or return a reg copied *from* TARGET; ANSI requires this.
4166 Otherwise, if TEMP is not TARGET, return TEMP
4167 if it is constant (for efficiency),
4168 or if we really want the correct value. */
4169 if (!(target && GET_CODE (target) == REG
4170 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4171 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4172 && ! rtx_equal_p (temp, target)
4173 && (CONSTANT_P (temp) || want_value))
4174 dont_return_target = 1;
4177 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4178 the same as that of TARGET, adjust the constant. This is needed, for
4179 example, in case it is a CONST_DOUBLE and we want only a word-sized
4181 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4182 && TREE_CODE (exp) != ERROR_MARK
4183 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4184 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4185 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4187 if (current_function_check_memory_usage
4188 && GET_CODE (target) == MEM
4189 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4191 in_check_memory_usage = 1;
4192 if (GET_CODE (temp) == MEM)
4193 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4194 VOIDmode, 3, XEXP (target, 0), Pmode,
4195 XEXP (temp, 0), Pmode,
4196 expr_size (exp), TYPE_MODE (sizetype));
4198 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4199 VOIDmode, 3, XEXP (target, 0), Pmode,
4200 expr_size (exp), TYPE_MODE (sizetype),
4201 GEN_INT (MEMORY_USE_WO),
4202 TYPE_MODE (integer_type_node));
4203 in_check_memory_usage = 0;
4206 /* If value was not generated in the target, store it there.
4207 Convert the value to TARGET's type first if nec. */
4208 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4209 one or both of them are volatile memory refs, we have to distinguish
4211 - expand_expr has used TARGET. In this case, we must not generate
4212 another copy. This can be detected by TARGET being equal according
4214 - expand_expr has not used TARGET - that means that the source just
4215 happens to have the same RTX form. Since temp will have been created
4216 by expand_expr, it will compare unequal according to == .
4217 We must generate a copy in this case, to reach the correct number
4218 of volatile memory references. */
4220 if ((! rtx_equal_p (temp, target)
4221 || (temp != target && (side_effects_p (temp)
4222 || side_effects_p (target))))
4223 && TREE_CODE (exp) != ERROR_MARK
4224 && ! dont_store_target)
4226 target = protect_from_queue (target, 1);
4227 if (GET_MODE (temp) != GET_MODE (target)
4228 && GET_MODE (temp) != VOIDmode)
4230 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4231 if (dont_return_target)
4233 /* In this case, we will return TEMP,
4234 so make sure it has the proper mode.
4235 But don't forget to store the value into TARGET. */
4236 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4237 emit_move_insn (target, temp);
4240 convert_move (target, temp, unsignedp);
4243 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4245 /* Handle copying a string constant into an array.
4246 The string constant may be shorter than the array.
4247 So copy just the string's actual length, and clear the rest. */
4251 /* Get the size of the data type of the string,
4252 which is actually the size of the target. */
4253 size = expr_size (exp);
4254 if (GET_CODE (size) == CONST_INT
4255 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4256 emit_block_move (target, temp, size);
4259 /* Compute the size of the data to copy from the string. */
4261 = size_binop (MIN_EXPR,
4262 make_tree (sizetype, size),
4263 size_int (TREE_STRING_LENGTH (exp)));
4264 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4268 /* Copy that much. */
4269 emit_block_move (target, temp, copy_size_rtx);
4271 /* Figure out how much is left in TARGET that we have to clear.
4272 Do all calculations in ptr_mode. */
4274 addr = XEXP (target, 0);
4275 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4277 if (GET_CODE (copy_size_rtx) == CONST_INT)
4279 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4280 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4284 addr = force_reg (ptr_mode, addr);
4285 addr = expand_binop (ptr_mode, add_optab, addr,
4286 copy_size_rtx, NULL_RTX, 0,
4289 size = expand_binop (ptr_mode, sub_optab, size,
4290 copy_size_rtx, NULL_RTX, 0,
4293 label = gen_label_rtx ();
4294 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4295 GET_MODE (size), 0, label);
4298 if (size != const0_rtx)
4300 rtx dest = gen_rtx_MEM (BLKmode, addr);
4302 MEM_COPY_ATTRIBUTES (dest, target);
4304 /* Be sure we can write on ADDR. */
4305 in_check_memory_usage = 1;
4306 if (current_function_check_memory_usage)
4307 emit_library_call (chkr_check_addr_libfunc,
4308 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4310 size, TYPE_MODE (sizetype),
4311 GEN_INT (MEMORY_USE_WO),
4312 TYPE_MODE (integer_type_node));
4313 in_check_memory_usage = 0;
4314 clear_storage (dest, size);
4321 /* Handle calls that return values in multiple non-contiguous locations.
4322 The Irix 6 ABI has examples of this. */
4323 else if (GET_CODE (target) == PARALLEL)
4324 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4325 else if (GET_MODE (temp) == BLKmode)
4326 emit_block_move (target, temp, expr_size (exp));
4328 emit_move_insn (target, temp);
4331 /* If we don't want a value, return NULL_RTX. */
4335 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4336 ??? The latter test doesn't seem to make sense. */
4337 else if (dont_return_target && GET_CODE (temp) != MEM)
4340 /* Return TARGET itself if it is a hard register. */
4341 else if (want_value && GET_MODE (target) != BLKmode
4342 && ! (GET_CODE (target) == REG
4343 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4344 return copy_to_reg (target);
4350 /* Return 1 if EXP just contains zeros. */
4358 switch (TREE_CODE (exp))
4362 case NON_LVALUE_EXPR:
4363 case VIEW_CONVERT_EXPR:
4364 return is_zeros_p (TREE_OPERAND (exp, 0));
4367 return integer_zerop (exp);
4371 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4374 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4377 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4378 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4379 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4380 if (! is_zeros_p (TREE_VALUE (elt)))
4390 /* Return 1 if EXP contains mostly (3/4) zeros. */
4393 mostly_zeros_p (exp)
4396 if (TREE_CODE (exp) == CONSTRUCTOR)
4398 int elts = 0, zeros = 0;
4399 tree elt = CONSTRUCTOR_ELTS (exp);
4400 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4402 /* If there are no ranges of true bits, it is all zero. */
4403 return elt == NULL_TREE;
4405 for (; elt; elt = TREE_CHAIN (elt))
4407 /* We do not handle the case where the index is a RANGE_EXPR,
4408 so the statistic will be somewhat inaccurate.
4409 We do make a more accurate count in store_constructor itself,
4410 so since this function is only used for nested array elements,
4411 this should be close enough. */
4412 if (mostly_zeros_p (TREE_VALUE (elt)))
4417 return 4 * zeros >= 3 * elts;
4420 return is_zeros_p (exp);
4423 /* Helper function for store_constructor.
4424 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4425 TYPE is the type of the CONSTRUCTOR, not the element type.
4426 CLEARED is as for store_constructor.
4427 ALIAS_SET is the alias set to use for any stores.
4429 This provides a recursive shortcut back to store_constructor when it isn't
4430 necessary to go through store_field. This is so that we can pass through
4431 the cleared field to let store_constructor know that we may not have to
4432 clear a substructure if the outer structure has already been cleared. */
4435 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4438 unsigned HOST_WIDE_INT bitsize;
4439 HOST_WIDE_INT bitpos;
4440 enum machine_mode mode;
4445 if (TREE_CODE (exp) == CONSTRUCTOR
4446 && bitpos % BITS_PER_UNIT == 0
4447 /* If we have a non-zero bitpos for a register target, then we just
4448 let store_field do the bitfield handling. This is unlikely to
4449 generate unnecessary clear instructions anyways. */
4450 && (bitpos == 0 || GET_CODE (target) == MEM))
4452 if (GET_CODE (target) == MEM)
4454 = adjust_address (target,
4455 GET_MODE (target) == BLKmode
4457 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4458 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4461 /* Update the alias set, if required. */
4462 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4463 && MEM_ALIAS_SET (target) != 0)
4465 target = copy_rtx (target);
4466 set_mem_alias_set (target, alias_set);
4469 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4472 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4476 /* Store the value of constructor EXP into the rtx TARGET.
4477 TARGET is either a REG or a MEM; we know it cannot conflict, since
4478 safe_from_p has been called.
4479 CLEARED is true if TARGET is known to have been zero'd.
4480 SIZE is the number of bytes of TARGET we are allowed to modify: this
4481 may not be the same as the size of EXP if we are assigning to a field
4482 which has been packed to exclude padding bits. */
4485 store_constructor (exp, target, cleared, size)
4491 tree type = TREE_TYPE (exp);
4492 #ifdef WORD_REGISTER_OPERATIONS
4493 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4496 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4497 || TREE_CODE (type) == QUAL_UNION_TYPE)
4501 /* We either clear the aggregate or indicate the value is dead. */
4502 if ((TREE_CODE (type) == UNION_TYPE
4503 || TREE_CODE (type) == QUAL_UNION_TYPE)
4505 && ! CONSTRUCTOR_ELTS (exp))
4506 /* If the constructor is empty, clear the union. */
4508 clear_storage (target, expr_size (exp));
4512 /* If we are building a static constructor into a register,
4513 set the initial value as zero so we can fold the value into
4514 a constant. But if more than one register is involved,
4515 this probably loses. */
4516 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4517 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4519 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4523 /* If the constructor has fewer fields than the structure
4524 or if we are initializing the structure to mostly zeros,
4525 clear the whole structure first. Don't do this if TARGET is a
4526 register whose mode size isn't equal to SIZE since clear_storage
4527 can't handle this case. */
4528 else if (! cleared && size > 0
4529 && ((list_length (CONSTRUCTOR_ELTS (exp))
4530 != fields_length (type))
4531 || mostly_zeros_p (exp))
4532 && (GET_CODE (target) != REG
4533 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4536 clear_storage (target, GEN_INT (size));
4541 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4543 /* Store each element of the constructor into
4544 the corresponding field of TARGET. */
4546 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4548 tree field = TREE_PURPOSE (elt);
4549 #ifdef WORD_REGISTER_OPERATIONS
4550 tree value = TREE_VALUE (elt);
4552 enum machine_mode mode;
4553 HOST_WIDE_INT bitsize;
4554 HOST_WIDE_INT bitpos = 0;
4557 rtx to_rtx = target;
4559 /* Just ignore missing fields.
4560 We cleared the whole structure, above,
4561 if any fields are missing. */
4565 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4568 if (host_integerp (DECL_SIZE (field), 1))
4569 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4573 unsignedp = TREE_UNSIGNED (field);
4574 mode = DECL_MODE (field);
4575 if (DECL_BIT_FIELD (field))
4578 offset = DECL_FIELD_OFFSET (field);
4579 if (host_integerp (offset, 0)
4580 && host_integerp (bit_position (field), 0))
4582 bitpos = int_bit_position (field);
4586 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4592 if (contains_placeholder_p (offset))
4593 offset = build (WITH_RECORD_EXPR, sizetype,
4594 offset, make_tree (TREE_TYPE (exp), target));
4596 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4597 if (GET_CODE (to_rtx) != MEM)
4600 if (GET_MODE (offset_rtx) != ptr_mode)
4601 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4603 #ifdef POINTERS_EXTEND_UNSIGNED
4604 if (GET_MODE (offset_rtx) != Pmode)
4605 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4608 to_rtx = offset_address (to_rtx, offset_rtx,
4609 highest_pow2_factor (offset));
4612 if (TREE_READONLY (field))
4614 if (GET_CODE (to_rtx) == MEM)
4615 to_rtx = copy_rtx (to_rtx);
4617 RTX_UNCHANGING_P (to_rtx) = 1;
4620 #ifdef WORD_REGISTER_OPERATIONS
4621 /* If this initializes a field that is smaller than a word, at the
4622 start of a word, try to widen it to a full word.
4623 This special case allows us to output C++ member function
4624 initializations in a form that the optimizers can understand. */
4625 if (GET_CODE (target) == REG
4626 && bitsize < BITS_PER_WORD
4627 && bitpos % BITS_PER_WORD == 0
4628 && GET_MODE_CLASS (mode) == MODE_INT
4629 && TREE_CODE (value) == INTEGER_CST
4631 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4633 tree type = TREE_TYPE (value);
4635 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4637 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4638 value = convert (type, value);
4641 if (BYTES_BIG_ENDIAN)
4643 = fold (build (LSHIFT_EXPR, type, value,
4644 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4645 bitsize = BITS_PER_WORD;
4650 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4651 && DECL_NONADDRESSABLE_P (field))
4653 to_rtx = copy_rtx (to_rtx);
4654 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4657 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4658 TREE_VALUE (elt), type, cleared,
4659 get_alias_set (TREE_TYPE (field)));
4662 else if (TREE_CODE (type) == ARRAY_TYPE)
4667 tree domain = TYPE_DOMAIN (type);
4668 tree elttype = TREE_TYPE (type);
4669 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4670 && TYPE_MAX_VALUE (domain)
4671 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4672 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4673 HOST_WIDE_INT minelt = 0;
4674 HOST_WIDE_INT maxelt = 0;
4676 /* If we have constant bounds for the range of the type, get them. */
4679 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4680 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4683 /* If the constructor has fewer elements than the array,
4684 clear the whole array first. Similarly if this is
4685 static constructor of a non-BLKmode object. */
4686 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4690 HOST_WIDE_INT count = 0, zero_count = 0;
4691 need_to_clear = ! const_bounds_p;
4693 /* This loop is a more accurate version of the loop in
4694 mostly_zeros_p (it handles RANGE_EXPR in an index).
4695 It is also needed to check for missing elements. */
4696 for (elt = CONSTRUCTOR_ELTS (exp);
4697 elt != NULL_TREE && ! need_to_clear;
4698 elt = TREE_CHAIN (elt))
4700 tree index = TREE_PURPOSE (elt);
4701 HOST_WIDE_INT this_node_count;
4703 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4705 tree lo_index = TREE_OPERAND (index, 0);
4706 tree hi_index = TREE_OPERAND (index, 1);
4708 if (! host_integerp (lo_index, 1)
4709 || ! host_integerp (hi_index, 1))
4715 this_node_count = (tree_low_cst (hi_index, 1)
4716 - tree_low_cst (lo_index, 1) + 1);
4719 this_node_count = 1;
4721 count += this_node_count;
4722 if (mostly_zeros_p (TREE_VALUE (elt)))
4723 zero_count += this_node_count;
4726 /* Clear the entire array first if there are any missing elements,
4727 or if the incidence of zero elements is >= 75%. */
4729 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4733 if (need_to_clear && size > 0)
4736 clear_storage (target, GEN_INT (size));
4739 else if (REG_P (target))
4740 /* Inform later passes that the old value is dead. */
4741 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4743 /* Store each element of the constructor into
4744 the corresponding element of TARGET, determined
4745 by counting the elements. */
4746 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4748 elt = TREE_CHAIN (elt), i++)
4750 enum machine_mode mode;
4751 HOST_WIDE_INT bitsize;
4752 HOST_WIDE_INT bitpos;
4754 tree value = TREE_VALUE (elt);
4755 tree index = TREE_PURPOSE (elt);
4756 rtx xtarget = target;
4758 if (cleared && is_zeros_p (value))
4761 unsignedp = TREE_UNSIGNED (elttype);
4762 mode = TYPE_MODE (elttype);
4763 if (mode == BLKmode)
4764 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4765 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4768 bitsize = GET_MODE_BITSIZE (mode);
4770 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4772 tree lo_index = TREE_OPERAND (index, 0);
4773 tree hi_index = TREE_OPERAND (index, 1);
4774 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
4775 struct nesting *loop;
4776 HOST_WIDE_INT lo, hi, count;
4779 /* If the range is constant and "small", unroll the loop. */
4781 && host_integerp (lo_index, 0)
4782 && host_integerp (hi_index, 0)
4783 && (lo = tree_low_cst (lo_index, 0),
4784 hi = tree_low_cst (hi_index, 0),
4785 count = hi - lo + 1,
4786 (GET_CODE (target) != MEM
4788 || (host_integerp (TYPE_SIZE (elttype), 1)
4789 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4792 lo -= minelt; hi -= minelt;
4793 for (; lo <= hi; lo++)
4795 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4797 if (GET_CODE (target) == MEM
4798 && !MEM_KEEP_ALIAS_SET_P (target)
4799 && TYPE_NONALIASED_COMPONENT (type))
4801 target = copy_rtx (target);
4802 MEM_KEEP_ALIAS_SET_P (target) = 1;
4805 store_constructor_field
4806 (target, bitsize, bitpos, mode, value, type, cleared,
4807 get_alias_set (elttype));
4812 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4813 loop_top = gen_label_rtx ();
4814 loop_end = gen_label_rtx ();
4816 unsignedp = TREE_UNSIGNED (domain);
4818 index = build_decl (VAR_DECL, NULL_TREE, domain);
4821 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4823 SET_DECL_RTL (index, index_r);
4824 if (TREE_CODE (value) == SAVE_EXPR
4825 && SAVE_EXPR_RTL (value) == 0)
4827 /* Make sure value gets expanded once before the
4829 expand_expr (value, const0_rtx, VOIDmode, 0);
4832 store_expr (lo_index, index_r, 0);
4833 loop = expand_start_loop (0);
4835 /* Assign value to element index. */
4837 = convert (ssizetype,
4838 fold (build (MINUS_EXPR, TREE_TYPE (index),
4839 index, TYPE_MIN_VALUE (domain))));
4840 position = size_binop (MULT_EXPR, position,
4842 TYPE_SIZE_UNIT (elttype)));
4844 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4845 xtarget = offset_address (target, pos_rtx,
4846 highest_pow2_factor (position));
4847 xtarget = adjust_address (xtarget, mode, 0);
4848 if (TREE_CODE (value) == CONSTRUCTOR)
4849 store_constructor (value, xtarget, cleared,
4850 bitsize / BITS_PER_UNIT);
4852 store_expr (value, xtarget, 0);
4854 expand_exit_loop_if_false (loop,
4855 build (LT_EXPR, integer_type_node,
4858 expand_increment (build (PREINCREMENT_EXPR,
4860 index, integer_one_node), 0, 0);
4862 emit_label (loop_end);
4865 else if ((index != 0 && ! host_integerp (index, 0))
4866 || ! host_integerp (TYPE_SIZE (elttype), 1))
4871 index = ssize_int (1);
4874 index = convert (ssizetype,
4875 fold (build (MINUS_EXPR, index,
4876 TYPE_MIN_VALUE (domain))));
4878 position = size_binop (MULT_EXPR, index,
4880 TYPE_SIZE_UNIT (elttype)));
4881 xtarget = offset_address (target,
4882 expand_expr (position, 0, VOIDmode, 0),
4883 highest_pow2_factor (position));
4884 xtarget = adjust_address (xtarget, mode, 0);
4885 store_expr (value, xtarget, 0);
4890 bitpos = ((tree_low_cst (index, 0) - minelt)
4891 * tree_low_cst (TYPE_SIZE (elttype), 1));
4893 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4895 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4896 && TYPE_NONALIASED_COMPONENT (type))
4898 target = copy_rtx (target);
4899 MEM_KEEP_ALIAS_SET_P (target) = 1;
4902 store_constructor_field (target, bitsize, bitpos, mode, value,
4903 type, cleared, get_alias_set (elttype));
4909 /* Set constructor assignments. */
4910 else if (TREE_CODE (type) == SET_TYPE)
4912 tree elt = CONSTRUCTOR_ELTS (exp);
4913 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4914 tree domain = TYPE_DOMAIN (type);
4915 tree domain_min, domain_max, bitlength;
4917 /* The default implementation strategy is to extract the constant
4918 parts of the constructor, use that to initialize the target,
4919 and then "or" in whatever non-constant ranges we need in addition.
4921 If a large set is all zero or all ones, it is
4922 probably better to set it using memset (if available) or bzero.
4923 Also, if a large set has just a single range, it may also be
4924 better to first clear all the first clear the set (using
4925 bzero/memset), and set the bits we want. */
4927 /* Check for all zeros. */
4928 if (elt == NULL_TREE && size > 0)
4931 clear_storage (target, GEN_INT (size));
4935 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4936 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4937 bitlength = size_binop (PLUS_EXPR,
4938 size_diffop (domain_max, domain_min),
4941 nbits = tree_low_cst (bitlength, 1);
4943 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4944 are "complicated" (more than one range), initialize (the
4945 constant parts) by copying from a constant. */
4946 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4947 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4949 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4950 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4951 char *bit_buffer = (char *) alloca (nbits);
4952 HOST_WIDE_INT word = 0;
4953 unsigned int bit_pos = 0;
4954 unsigned int ibit = 0;
4955 unsigned int offset = 0; /* In bytes from beginning of set. */
4957 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4960 if (bit_buffer[ibit])
4962 if (BYTES_BIG_ENDIAN)
4963 word |= (1 << (set_word_size - 1 - bit_pos));
4965 word |= 1 << bit_pos;
4969 if (bit_pos >= set_word_size || ibit == nbits)
4971 if (word != 0 || ! cleared)
4973 rtx datum = GEN_INT (word);
4976 /* The assumption here is that it is safe to use
4977 XEXP if the set is multi-word, but not if
4978 it's single-word. */
4979 if (GET_CODE (target) == MEM)
4980 to_rtx = adjust_address (target, mode, offset);
4981 else if (offset == 0)
4985 emit_move_insn (to_rtx, datum);
4992 offset += set_word_size / BITS_PER_UNIT;
4997 /* Don't bother clearing storage if the set is all ones. */
4998 if (TREE_CHAIN (elt) != NULL_TREE
4999 || (TREE_PURPOSE (elt) == NULL_TREE
5001 : ( ! host_integerp (TREE_VALUE (elt), 0)
5002 || ! host_integerp (TREE_PURPOSE (elt), 0)
5003 || (tree_low_cst (TREE_VALUE (elt), 0)
5004 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5005 != (HOST_WIDE_INT) nbits))))
5006 clear_storage (target, expr_size (exp));
5008 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5010 /* Start of range of element or NULL. */
5011 tree startbit = TREE_PURPOSE (elt);
5012 /* End of range of element, or element value. */
5013 tree endbit = TREE_VALUE (elt);
5014 #ifdef TARGET_MEM_FUNCTIONS
5015 HOST_WIDE_INT startb, endb;
5017 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5019 bitlength_rtx = expand_expr (bitlength,
5020 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5022 /* Handle non-range tuple element like [ expr ]. */
5023 if (startbit == NULL_TREE)
5025 startbit = save_expr (endbit);
5029 startbit = convert (sizetype, startbit);
5030 endbit = convert (sizetype, endbit);
5031 if (! integer_zerop (domain_min))
5033 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5034 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5036 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5037 EXPAND_CONST_ADDRESS);
5038 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5039 EXPAND_CONST_ADDRESS);
5045 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5048 emit_move_insn (targetx, target);
5051 else if (GET_CODE (target) == MEM)
5056 #ifdef TARGET_MEM_FUNCTIONS
5057 /* Optimization: If startbit and endbit are
5058 constants divisible by BITS_PER_UNIT,
5059 call memset instead. */
5060 if (TREE_CODE (startbit) == INTEGER_CST
5061 && TREE_CODE (endbit) == INTEGER_CST
5062 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5063 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5065 emit_library_call (memset_libfunc, LCT_NORMAL,
5067 plus_constant (XEXP (targetx, 0),
5068 startb / BITS_PER_UNIT),
5070 constm1_rtx, TYPE_MODE (integer_type_node),
5071 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5072 TYPE_MODE (sizetype));
5076 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5077 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5078 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5079 startbit_rtx, TYPE_MODE (sizetype),
5080 endbit_rtx, TYPE_MODE (sizetype));
5083 emit_move_insn (target, targetx);
5091 /* Store the value of EXP (an expression tree)
5092 into a subfield of TARGET which has mode MODE and occupies
5093 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5094 If MODE is VOIDmode, it means that we are storing into a bit-field.
5096 If VALUE_MODE is VOIDmode, return nothing in particular.
5097 UNSIGNEDP is not used in this case.
5099 Otherwise, return an rtx for the value stored. This rtx
5100 has mode VALUE_MODE if that is convenient to do.
5101 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5103 TYPE is the type of the underlying object,
5105 ALIAS_SET is the alias set for the destination. This value will
5106 (in general) be different from that for TARGET, since TARGET is a
5107 reference to the containing structure. */
5110 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5113 HOST_WIDE_INT bitsize;
5114 HOST_WIDE_INT bitpos;
5115 enum machine_mode mode;
5117 enum machine_mode value_mode;
5122 HOST_WIDE_INT width_mask = 0;
5124 if (TREE_CODE (exp) == ERROR_MARK)
5127 /* If we have nothing to store, do nothing unless the expression has
5130 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5131 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5132 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5134 /* If we are storing into an unaligned field of an aligned union that is
5135 in a register, we may have the mode of TARGET being an integer mode but
5136 MODE == BLKmode. In that case, get an aligned object whose size and
5137 alignment are the same as TARGET and store TARGET into it (we can avoid
5138 the store if the field being stored is the entire width of TARGET). Then
5139 call ourselves recursively to store the field into a BLKmode version of
5140 that object. Finally, load from the object into TARGET. This is not
5141 very efficient in general, but should only be slightly more expensive
5142 than the otherwise-required unaligned accesses. Perhaps this can be
5143 cleaned up later. */
5146 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5150 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5152 rtx blk_object = copy_rtx (object);
5154 PUT_MODE (blk_object, BLKmode);
5156 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5157 emit_move_insn (object, target);
5159 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5162 emit_move_insn (target, object);
5164 /* We want to return the BLKmode version of the data. */
5168 if (GET_CODE (target) == CONCAT)
5170 /* We're storing into a struct containing a single __complex. */
5174 return store_expr (exp, target, 0);
5177 /* If the structure is in a register or if the component
5178 is a bit field, we cannot use addressing to access it.
5179 Use bit-field techniques or SUBREG to store in it. */
5181 if (mode == VOIDmode
5182 || (mode != BLKmode && ! direct_store[(int) mode]
5183 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5184 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5185 || GET_CODE (target) == REG
5186 || GET_CODE (target) == SUBREG
5187 /* If the field isn't aligned enough to store as an ordinary memref,
5188 store it as a bit field. */
5189 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5190 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5191 || bitpos % GET_MODE_ALIGNMENT (mode)))
5192 /* If the RHS and field are a constant size and the size of the
5193 RHS isn't the same size as the bitfield, we must use bitfield
5196 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5197 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5199 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5201 /* If BITSIZE is narrower than the size of the type of EXP
5202 we will be narrowing TEMP. Normally, what's wanted are the
5203 low-order bits. However, if EXP's type is a record and this is
5204 big-endian machine, we want the upper BITSIZE bits. */
5205 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5206 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5207 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5208 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5209 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5213 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5215 if (mode != VOIDmode && mode != BLKmode
5216 && mode != TYPE_MODE (TREE_TYPE (exp)))
5217 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5219 /* If the modes of TARGET and TEMP are both BLKmode, both
5220 must be in memory and BITPOS must be aligned on a byte
5221 boundary. If so, we simply do a block copy. */
5222 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5224 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5225 || bitpos % BITS_PER_UNIT != 0)
5228 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5229 emit_block_move (target, temp,
5230 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5233 return value_mode == VOIDmode ? const0_rtx : target;
5236 /* Store the value in the bitfield. */
5237 store_bit_field (target, bitsize, bitpos, mode, temp,
5238 int_size_in_bytes (type));
5240 if (value_mode != VOIDmode)
5242 /* The caller wants an rtx for the value.
5243 If possible, avoid refetching from the bitfield itself. */
5245 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5248 enum machine_mode tmode;
5251 return expand_and (temp,
5255 GET_MODE (temp) == VOIDmode
5257 : GET_MODE (temp))), NULL_RTX);
5259 tmode = GET_MODE (temp);
5260 if (tmode == VOIDmode)
5262 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5263 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5264 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5267 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5268 NULL_RTX, value_mode, VOIDmode,
5269 int_size_in_bytes (type));
5275 rtx addr = XEXP (target, 0);
5276 rtx to_rtx = target;
5278 /* If a value is wanted, it must be the lhs;
5279 so make the address stable for multiple use. */
5281 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5282 && ! CONSTANT_ADDRESS_P (addr)
5283 /* A frame-pointer reference is already stable. */
5284 && ! (GET_CODE (addr) == PLUS
5285 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5286 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5287 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5288 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5290 /* Now build a reference to just the desired component. */
5292 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5294 if (to_rtx == target)
5295 to_rtx = copy_rtx (to_rtx);
5297 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5298 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5299 set_mem_alias_set (to_rtx, alias_set);
5301 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5305 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5306 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5307 codes and find the ultimate containing object, which we return.
5309 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5310 bit position, and *PUNSIGNEDP to the signedness of the field.
5311 If the position of the field is variable, we store a tree
5312 giving the variable offset (in units) in *POFFSET.
5313 This offset is in addition to the bit position.
5314 If the position is not variable, we store 0 in *POFFSET.
5316 If any of the extraction expressions is volatile,
5317 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5319 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5320 is a mode that can be used to access the field. In that case, *PBITSIZE
5323 If the field describes a variable-sized object, *PMODE is set to
5324 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5325 this case, but the address of the object can be found. */
5328 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5329 punsignedp, pvolatilep)
5331 HOST_WIDE_INT *pbitsize;
5332 HOST_WIDE_INT *pbitpos;
5334 enum machine_mode *pmode;
5339 enum machine_mode mode = VOIDmode;
5340 tree offset = size_zero_node;
5341 tree bit_offset = bitsize_zero_node;
5342 tree placeholder_ptr = 0;
5345 /* First get the mode, signedness, and size. We do this from just the
5346 outermost expression. */
5347 if (TREE_CODE (exp) == COMPONENT_REF)
5349 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5350 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5351 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5353 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5355 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5357 size_tree = TREE_OPERAND (exp, 1);
5358 *punsignedp = TREE_UNSIGNED (exp);
5362 mode = TYPE_MODE (TREE_TYPE (exp));
5363 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5365 if (mode == BLKmode)
5366 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5368 *pbitsize = GET_MODE_BITSIZE (mode);
5373 if (! host_integerp (size_tree, 1))
5374 mode = BLKmode, *pbitsize = -1;
5376 *pbitsize = tree_low_cst (size_tree, 1);
5379 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5380 and find the ultimate containing object. */
5383 if (TREE_CODE (exp) == BIT_FIELD_REF)
5384 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5385 else if (TREE_CODE (exp) == COMPONENT_REF)
5387 tree field = TREE_OPERAND (exp, 1);
5388 tree this_offset = DECL_FIELD_OFFSET (field);
5390 /* If this field hasn't been filled in yet, don't go
5391 past it. This should only happen when folding expressions
5392 made during type construction. */
5393 if (this_offset == 0)
5395 else if (! TREE_CONSTANT (this_offset)
5396 && contains_placeholder_p (this_offset))
5397 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5399 offset = size_binop (PLUS_EXPR, offset, this_offset);
5400 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5401 DECL_FIELD_BIT_OFFSET (field));
5403 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5406 else if (TREE_CODE (exp) == ARRAY_REF
5407 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5409 tree index = TREE_OPERAND (exp, 1);
5410 tree array = TREE_OPERAND (exp, 0);
5411 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5412 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5413 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5415 /* We assume all arrays have sizes that are a multiple of a byte.
5416 First subtract the lower bound, if any, in the type of the
5417 index, then convert to sizetype and multiply by the size of the
5419 if (low_bound != 0 && ! integer_zerop (low_bound))
5420 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5423 /* If the index has a self-referential type, pass it to a
5424 WITH_RECORD_EXPR; if the component size is, pass our
5425 component to one. */
5426 if (! TREE_CONSTANT (index)
5427 && contains_placeholder_p (index))
5428 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5429 if (! TREE_CONSTANT (unit_size)
5430 && contains_placeholder_p (unit_size))
5431 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5433 offset = size_binop (PLUS_EXPR, offset,
5434 size_binop (MULT_EXPR,
5435 convert (sizetype, index),
5439 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5441 tree new = find_placeholder (exp, &placeholder_ptr);
5443 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5444 We might have been called from tree optimization where we
5445 haven't set up an object yet. */
5453 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5454 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5455 && ! ((TREE_CODE (exp) == NOP_EXPR
5456 || TREE_CODE (exp) == CONVERT_EXPR)
5457 && (TYPE_MODE (TREE_TYPE (exp))
5458 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5461 /* If any reference in the chain is volatile, the effect is volatile. */
5462 if (TREE_THIS_VOLATILE (exp))
5465 exp = TREE_OPERAND (exp, 0);
5468 /* If OFFSET is constant, see if we can return the whole thing as a
5469 constant bit position. Otherwise, split it up. */
5470 if (host_integerp (offset, 0)
5471 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5473 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5474 && host_integerp (tem, 0))
5475 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5477 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5483 /* Return 1 if T is an expression that get_inner_reference handles. */
5486 handled_component_p (t)
5489 switch (TREE_CODE (t))
5494 case ARRAY_RANGE_REF:
5495 case NON_LVALUE_EXPR:
5496 case VIEW_CONVERT_EXPR:
5501 return (TYPE_MODE (TREE_TYPE (t))
5502 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5509 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5511 static enum memory_use_mode
5512 get_memory_usage_from_modifier (modifier)
5513 enum expand_modifier modifier;
5519 return MEMORY_USE_RO;
5521 case EXPAND_MEMORY_USE_WO:
5522 return MEMORY_USE_WO;
5524 case EXPAND_MEMORY_USE_RW:
5525 return MEMORY_USE_RW;
5527 case EXPAND_MEMORY_USE_DONT:
5528 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5529 MEMORY_USE_DONT, because they are modifiers to a call of
5530 expand_expr in the ADDR_EXPR case of expand_expr. */
5531 case EXPAND_CONST_ADDRESS:
5532 case EXPAND_INITIALIZER:
5533 return MEMORY_USE_DONT;
5534 case EXPAND_MEMORY_USE_BAD:
5540 /* Given an rtx VALUE that may contain additions and multiplications, return
5541 an equivalent value that just refers to a register, memory, or constant.
5542 This is done by generating instructions to perform the arithmetic and
5543 returning a pseudo-register containing the value.
5545 The returned value may be a REG, SUBREG, MEM or constant. */
5548 force_operand (value, target)
5552 /* Use a temporary to force order of execution of calls to
5556 /* Use subtarget as the target for operand 0 of a binary operation. */
5557 rtx subtarget = get_subtarget (target);
5559 /* Check for a PIC address load. */
5561 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5562 && XEXP (value, 0) == pic_offset_table_rtx
5563 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5564 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5565 || GET_CODE (XEXP (value, 1)) == CONST))
5568 subtarget = gen_reg_rtx (GET_MODE (value));
5569 emit_move_insn (subtarget, value);
5573 if (GET_CODE (value) == PLUS)
5574 binoptab = add_optab;
5575 else if (GET_CODE (value) == MINUS)
5576 binoptab = sub_optab;
5577 else if (GET_CODE (value) == MULT)
5579 op2 = XEXP (value, 1);
5580 if (!CONSTANT_P (op2)
5581 && !(GET_CODE (op2) == REG && op2 != subtarget))
5583 tmp = force_operand (XEXP (value, 0), subtarget);
5584 return expand_mult (GET_MODE (value), tmp,
5585 force_operand (op2, NULL_RTX),
5591 op2 = XEXP (value, 1);
5592 if (!CONSTANT_P (op2)
5593 && !(GET_CODE (op2) == REG && op2 != subtarget))
5595 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5597 binoptab = add_optab;
5598 op2 = negate_rtx (GET_MODE (value), op2);
5601 /* Check for an addition with OP2 a constant integer and our first
5602 operand a PLUS of a virtual register and something else. In that
5603 case, we want to emit the sum of the virtual register and the
5604 constant first and then add the other value. This allows virtual
5605 register instantiation to simply modify the constant rather than
5606 creating another one around this addition. */
5607 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5608 && GET_CODE (XEXP (value, 0)) == PLUS
5609 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5610 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5611 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5613 rtx temp = expand_binop (GET_MODE (value), binoptab,
5614 XEXP (XEXP (value, 0), 0), op2,
5615 subtarget, 0, OPTAB_LIB_WIDEN);
5616 return expand_binop (GET_MODE (value), binoptab, temp,
5617 force_operand (XEXP (XEXP (value, 0), 1), 0),
5618 target, 0, OPTAB_LIB_WIDEN);
5621 tmp = force_operand (XEXP (value, 0), subtarget);
5622 return expand_binop (GET_MODE (value), binoptab, tmp,
5623 force_operand (op2, NULL_RTX),
5624 target, 0, OPTAB_LIB_WIDEN);
5625 /* We give UNSIGNEDP = 0 to expand_binop
5626 because the only operations we are expanding here are signed ones. */
5629 #ifdef INSN_SCHEDULING
5630 /* On machines that have insn scheduling, we want all memory reference to be
5631 explicit, so we need to deal with such paradoxical SUBREGs. */
5632 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5633 && (GET_MODE_SIZE (GET_MODE (value))
5634 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5636 = simplify_gen_subreg (GET_MODE (value),
5637 force_reg (GET_MODE (SUBREG_REG (value)),
5638 force_operand (SUBREG_REG (value),
5640 GET_MODE (SUBREG_REG (value)),
5641 SUBREG_BYTE (value));
5647 /* Subroutine of expand_expr: return nonzero iff there is no way that
5648 EXP can reference X, which is being modified. TOP_P is nonzero if this
5649 call is going to be used to determine whether we need a temporary
5650 for EXP, as opposed to a recursive call to this function.
5652 It is always safe for this routine to return zero since it merely
5653 searches for optimization opportunities. */
5656 safe_from_p (x, exp, top_p)
5663 static tree save_expr_list;
5666 /* If EXP has varying size, we MUST use a target since we currently
5667 have no way of allocating temporaries of variable size
5668 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5669 So we assume here that something at a higher level has prevented a
5670 clash. This is somewhat bogus, but the best we can do. Only
5671 do this when X is BLKmode and when we are at the top level. */
5672 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5673 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5674 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5675 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5676 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5678 && GET_MODE (x) == BLKmode)
5679 /* If X is in the outgoing argument area, it is always safe. */
5680 || (GET_CODE (x) == MEM
5681 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5682 || (GET_CODE (XEXP (x, 0)) == PLUS
5683 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5686 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5687 find the underlying pseudo. */
5688 if (GET_CODE (x) == SUBREG)
5691 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5695 /* A SAVE_EXPR might appear many times in the expression passed to the
5696 top-level safe_from_p call, and if it has a complex subexpression,
5697 examining it multiple times could result in a combinatorial explosion.
5698 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5699 with optimization took about 28 minutes to compile -- even though it was
5700 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5701 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5702 we have processed. Note that the only test of top_p was above. */
5711 rtn = safe_from_p (x, exp, 0);
5713 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5714 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5719 /* Now look at our tree code and possibly recurse. */
5720 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5723 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5730 if (TREE_CODE (exp) == TREE_LIST)
5731 return ((TREE_VALUE (exp) == 0
5732 || safe_from_p (x, TREE_VALUE (exp), 0))
5733 && (TREE_CHAIN (exp) == 0
5734 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5735 else if (TREE_CODE (exp) == ERROR_MARK)
5736 return 1; /* An already-visited SAVE_EXPR? */
5741 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5745 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5746 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5750 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5751 the expression. If it is set, we conflict iff we are that rtx or
5752 both are in memory. Otherwise, we check all operands of the
5753 expression recursively. */
5755 switch (TREE_CODE (exp))
5758 /* If the operand is static or we are static, we can't conflict.
5759 Likewise if we don't conflict with the operand at all. */
5760 if (staticp (TREE_OPERAND (exp, 0))
5761 || TREE_STATIC (exp)
5762 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5765 /* Otherwise, the only way this can conflict is if we are taking
5766 the address of a DECL a that address if part of X, which is
5768 exp = TREE_OPERAND (exp, 0);
5771 if (!DECL_RTL_SET_P (exp)
5772 || GET_CODE (DECL_RTL (exp)) != MEM)
5775 exp_rtl = XEXP (DECL_RTL (exp), 0);
5780 if (GET_CODE (x) == MEM
5781 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5782 get_alias_set (exp)))
5787 /* Assume that the call will clobber all hard registers and
5789 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5790 || GET_CODE (x) == MEM)
5795 /* If a sequence exists, we would have to scan every instruction
5796 in the sequence to see if it was safe. This is probably not
5798 if (RTL_EXPR_SEQUENCE (exp))
5801 exp_rtl = RTL_EXPR_RTL (exp);
5804 case WITH_CLEANUP_EXPR:
5805 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5808 case CLEANUP_POINT_EXPR:
5809 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5812 exp_rtl = SAVE_EXPR_RTL (exp);
5816 /* If we've already scanned this, don't do it again. Otherwise,
5817 show we've scanned it and record for clearing the flag if we're
5819 if (TREE_PRIVATE (exp))
5822 TREE_PRIVATE (exp) = 1;
5823 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5825 TREE_PRIVATE (exp) = 0;
5829 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5833 /* The only operand we look at is operand 1. The rest aren't
5834 part of the expression. */
5835 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5837 case METHOD_CALL_EXPR:
5838 /* This takes an rtx argument, but shouldn't appear here. */
5845 /* If we have an rtx, we do not need to scan our operands. */
5849 nops = first_rtl_op (TREE_CODE (exp));
5850 for (i = 0; i < nops; i++)
5851 if (TREE_OPERAND (exp, i) != 0
5852 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5855 /* If this is a language-specific tree code, it may require
5856 special handling. */
5857 if ((unsigned int) TREE_CODE (exp)
5858 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5860 && !(*lang_safe_from_p) (x, exp))
5864 /* If we have an rtl, find any enclosed object. Then see if we conflict
5868 if (GET_CODE (exp_rtl) == SUBREG)
5870 exp_rtl = SUBREG_REG (exp_rtl);
5871 if (GET_CODE (exp_rtl) == REG
5872 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5876 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5877 are memory and they conflict. */
5878 return ! (rtx_equal_p (x, exp_rtl)
5879 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5880 && true_dependence (exp_rtl, GET_MODE (x), x,
5881 rtx_addr_varies_p)));
5884 /* If we reach here, it is safe. */
5888 /* Subroutine of expand_expr: return rtx if EXP is a
5889 variable or parameter; else return 0. */
5896 switch (TREE_CODE (exp))
5900 return DECL_RTL (exp);
5906 #ifdef MAX_INTEGER_COMPUTATION_MODE
5909 check_max_integer_computation_mode (exp)
5912 enum tree_code code;
5913 enum machine_mode mode;
5915 /* Strip any NOPs that don't change the mode. */
5917 code = TREE_CODE (exp);
5919 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5920 if (code == NOP_EXPR
5921 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5924 /* First check the type of the overall operation. We need only look at
5925 unary, binary and relational operations. */
5926 if (TREE_CODE_CLASS (code) == '1'
5927 || TREE_CODE_CLASS (code) == '2'
5928 || TREE_CODE_CLASS (code) == '<')
5930 mode = TYPE_MODE (TREE_TYPE (exp));
5931 if (GET_MODE_CLASS (mode) == MODE_INT
5932 && mode > MAX_INTEGER_COMPUTATION_MODE)
5933 internal_error ("unsupported wide integer operation");
5936 /* Check operand of a unary op. */
5937 if (TREE_CODE_CLASS (code) == '1')
5939 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5940 if (GET_MODE_CLASS (mode) == MODE_INT
5941 && mode > MAX_INTEGER_COMPUTATION_MODE)
5942 internal_error ("unsupported wide integer operation");
5945 /* Check operands of a binary/comparison op. */
5946 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5948 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5949 if (GET_MODE_CLASS (mode) == MODE_INT
5950 && mode > MAX_INTEGER_COMPUTATION_MODE)
5951 internal_error ("unsupported wide integer operation");
5953 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5954 if (GET_MODE_CLASS (mode) == MODE_INT
5955 && mode > MAX_INTEGER_COMPUTATION_MODE)
5956 internal_error ("unsupported wide integer operation");
5961 /* Return the highest power of two that EXP is known to be a multiple of.
5962 This is used in updating alignment of MEMs in array references. */
5964 static HOST_WIDE_INT
5965 highest_pow2_factor (exp)
5968 HOST_WIDE_INT c0, c1;
5970 switch (TREE_CODE (exp))
5973 /* If the integer is expressable in a HOST_WIDE_INT, we can find the
5974 lowest bit that's a one. If the result is zero, pessimize by
5975 returning 1. This is overly-conservative, but such things should not
5976 happen in the offset expressions that we are called with. */
5977 if (host_integerp (exp, 0))
5979 c0 = tree_low_cst (exp, 0);
5980 c0 = c0 < 0 ? - c0 : c0;
5981 return c0 != 0 ? c0 & -c0 : 1;
5985 case PLUS_EXPR: case MINUS_EXPR:
5986 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5987 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5988 return MIN (c0, c1);
5991 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5992 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5995 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
5997 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5998 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5999 return MAX (1, c0 / c1);
6001 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6002 case COMPOUND_EXPR: case SAVE_EXPR: case WITH_RECORD_EXPR:
6003 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6006 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6007 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6008 return MIN (c0, c1);
6017 /* Return an object on the placeholder list that matches EXP, a
6018 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6019 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6020 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6021 is a location which initially points to a starting location in the
6022 placeholder list (zero means start of the list) and where a pointer into
6023 the placeholder list at which the object is found is placed. */
6026 find_placeholder (exp, plist)
6030 tree type = TREE_TYPE (exp);
6031 tree placeholder_expr;
6033 for (placeholder_expr
6034 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6035 placeholder_expr != 0;
6036 placeholder_expr = TREE_CHAIN (placeholder_expr))
6038 tree need_type = TYPE_MAIN_VARIANT (type);
6041 /* Find the outermost reference that is of the type we want. If none,
6042 see if any object has a type that is a pointer to the type we
6044 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6045 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6046 || TREE_CODE (elt) == COND_EXPR)
6047 ? TREE_OPERAND (elt, 1)
6048 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6049 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6050 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6051 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6052 ? TREE_OPERAND (elt, 0) : 0))
6053 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6056 *plist = placeholder_expr;
6060 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6062 = ((TREE_CODE (elt) == COMPOUND_EXPR
6063 || TREE_CODE (elt) == COND_EXPR)
6064 ? TREE_OPERAND (elt, 1)
6065 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6066 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6067 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6068 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6069 ? TREE_OPERAND (elt, 0) : 0))
6070 if (POINTER_TYPE_P (TREE_TYPE (elt))
6071 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6075 *plist = placeholder_expr;
6076 return build1 (INDIRECT_REF, need_type, elt);
6083 /* expand_expr: generate code for computing expression EXP.
6084 An rtx for the computed value is returned. The value is never null.
6085 In the case of a void EXP, const0_rtx is returned.
6087 The value may be stored in TARGET if TARGET is nonzero.
6088 TARGET is just a suggestion; callers must assume that
6089 the rtx returned may not be the same as TARGET.
6091 If TARGET is CONST0_RTX, it means that the value will be ignored.
6093 If TMODE is not VOIDmode, it suggests generating the
6094 result in mode TMODE. But this is done only when convenient.
6095 Otherwise, TMODE is ignored and the value generated in its natural mode.
6096 TMODE is just a suggestion; callers must assume that
6097 the rtx returned may not have mode TMODE.
6099 Note that TARGET may have neither TMODE nor MODE. In that case, it
6100 probably will not be used.
6102 If MODIFIER is EXPAND_SUM then when EXP is an addition
6103 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6104 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6105 products as above, or REG or MEM, or constant.
6106 Ordinarily in such cases we would output mul or add instructions
6107 and then return a pseudo reg containing the sum.
6109 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6110 it also marks a label as absolutely required (it can't be dead).
6111 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6112 This is used for outputting expressions used in initializers.
6114 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6115 with a constant address even if that address is not normally legitimate.
6116 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6119 expand_expr (exp, target, tmode, modifier)
6122 enum machine_mode tmode;
6123 enum expand_modifier modifier;
6126 tree type = TREE_TYPE (exp);
6127 int unsignedp = TREE_UNSIGNED (type);
6128 enum machine_mode mode;
6129 enum tree_code code = TREE_CODE (exp);
6131 rtx subtarget, original_target;
6134 /* Used by check-memory-usage to make modifier read only. */
6135 enum expand_modifier ro_modifier;
6137 /* Handle ERROR_MARK before anybody tries to access its type. */
6138 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6140 op0 = CONST0_RTX (tmode);
6146 mode = TYPE_MODE (type);
6147 /* Use subtarget as the target for operand 0 of a binary operation. */
6148 subtarget = get_subtarget (target);
6149 original_target = target;
6150 ignore = (target == const0_rtx
6151 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6152 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6153 || code == COND_EXPR)
6154 && TREE_CODE (type) == VOID_TYPE));
6156 /* Make a read-only version of the modifier. */
6157 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6158 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6159 ro_modifier = modifier;
6161 ro_modifier = EXPAND_NORMAL;
6163 /* If we are going to ignore this result, we need only do something
6164 if there is a side-effect somewhere in the expression. If there
6165 is, short-circuit the most common cases here. Note that we must
6166 not call expand_expr with anything but const0_rtx in case this
6167 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6171 if (! TREE_SIDE_EFFECTS (exp))
6174 /* Ensure we reference a volatile object even if value is ignored, but
6175 don't do this if all we are doing is taking its address. */
6176 if (TREE_THIS_VOLATILE (exp)
6177 && TREE_CODE (exp) != FUNCTION_DECL
6178 && mode != VOIDmode && mode != BLKmode
6179 && modifier != EXPAND_CONST_ADDRESS)
6181 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6182 if (GET_CODE (temp) == MEM)
6183 temp = copy_to_reg (temp);
6187 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6188 || code == INDIRECT_REF || code == BUFFER_REF)
6189 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6190 VOIDmode, ro_modifier);
6191 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6192 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6194 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6196 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6200 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6201 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6202 /* If the second operand has no side effects, just evaluate
6204 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6205 VOIDmode, ro_modifier);
6206 else if (code == BIT_FIELD_REF)
6208 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6210 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6212 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6220 #ifdef MAX_INTEGER_COMPUTATION_MODE
6221 /* Only check stuff here if the mode we want is different from the mode
6222 of the expression; if it's the same, check_max_integer_computiation_mode
6223 will handle it. Do we really need to check this stuff at all? */
6226 && GET_MODE (target) != mode
6227 && TREE_CODE (exp) != INTEGER_CST
6228 && TREE_CODE (exp) != PARM_DECL
6229 && TREE_CODE (exp) != ARRAY_REF
6230 && TREE_CODE (exp) != ARRAY_RANGE_REF
6231 && TREE_CODE (exp) != COMPONENT_REF
6232 && TREE_CODE (exp) != BIT_FIELD_REF
6233 && TREE_CODE (exp) != INDIRECT_REF
6234 && TREE_CODE (exp) != CALL_EXPR
6235 && TREE_CODE (exp) != VAR_DECL
6236 && TREE_CODE (exp) != RTL_EXPR)
6238 enum machine_mode mode = GET_MODE (target);
6240 if (GET_MODE_CLASS (mode) == MODE_INT
6241 && mode > MAX_INTEGER_COMPUTATION_MODE)
6242 internal_error ("unsupported wide integer operation");
6246 && TREE_CODE (exp) != INTEGER_CST
6247 && TREE_CODE (exp) != PARM_DECL
6248 && TREE_CODE (exp) != ARRAY_REF
6249 && TREE_CODE (exp) != ARRAY_RANGE_REF
6250 && TREE_CODE (exp) != COMPONENT_REF
6251 && TREE_CODE (exp) != BIT_FIELD_REF
6252 && TREE_CODE (exp) != INDIRECT_REF
6253 && TREE_CODE (exp) != VAR_DECL
6254 && TREE_CODE (exp) != CALL_EXPR
6255 && TREE_CODE (exp) != RTL_EXPR
6256 && GET_MODE_CLASS (tmode) == MODE_INT
6257 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6258 internal_error ("unsupported wide integer operation");
6260 check_max_integer_computation_mode (exp);
6263 /* If will do cse, generate all results into pseudo registers
6264 since 1) that allows cse to find more things
6265 and 2) otherwise cse could produce an insn the machine
6268 if (! cse_not_expected && mode != BLKmode && target
6269 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6276 tree function = decl_function_context (exp);
6277 /* Handle using a label in a containing function. */
6278 if (function != current_function_decl
6279 && function != inline_function_decl && function != 0)
6281 struct function *p = find_function_data (function);
6282 p->expr->x_forced_labels
6283 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6284 p->expr->x_forced_labels);
6288 if (modifier == EXPAND_INITIALIZER)
6289 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6294 temp = gen_rtx_MEM (FUNCTION_MODE,
6295 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6296 if (function != current_function_decl
6297 && function != inline_function_decl && function != 0)
6298 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6303 if (DECL_RTL (exp) == 0)
6305 error_with_decl (exp, "prior parameter's size depends on `%s'");
6306 return CONST0_RTX (mode);
6309 /* ... fall through ... */
6312 /* If a static var's type was incomplete when the decl was written,
6313 but the type is complete now, lay out the decl now. */
6314 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6315 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6317 rtx value = DECL_RTL_IF_SET (exp);
6319 layout_decl (exp, 0);
6321 /* If the RTL was already set, update its mode and memory
6325 PUT_MODE (value, DECL_MODE (exp));
6326 SET_DECL_RTL (exp, 0);
6327 set_mem_attributes (value, exp, 1);
6328 SET_DECL_RTL (exp, value);
6332 /* Although static-storage variables start off initialized, according to
6333 ANSI C, a memcpy could overwrite them with uninitialized values. So
6334 we check them too. This also lets us check for read-only variables
6335 accessed via a non-const declaration, in case it won't be detected
6336 any other way (e.g., in an embedded system or OS kernel without
6339 Aggregates are not checked here; they're handled elsewhere. */
6340 if (cfun && current_function_check_memory_usage
6342 && GET_CODE (DECL_RTL (exp)) == MEM
6343 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6345 enum memory_use_mode memory_usage;
6346 memory_usage = get_memory_usage_from_modifier (modifier);
6348 in_check_memory_usage = 1;
6349 if (memory_usage != MEMORY_USE_DONT)
6350 emit_library_call (chkr_check_addr_libfunc,
6351 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6352 XEXP (DECL_RTL (exp), 0), Pmode,
6353 GEN_INT (int_size_in_bytes (type)),
6354 TYPE_MODE (sizetype),
6355 GEN_INT (memory_usage),
6356 TYPE_MODE (integer_type_node));
6357 in_check_memory_usage = 0;
6360 /* ... fall through ... */
6364 if (DECL_RTL (exp) == 0)
6367 /* Ensure variable marked as used even if it doesn't go through
6368 a parser. If it hasn't be used yet, write out an external
6370 if (! TREE_USED (exp))
6372 assemble_external (exp);
6373 TREE_USED (exp) = 1;
6376 /* Show we haven't gotten RTL for this yet. */
6379 /* Handle variables inherited from containing functions. */
6380 context = decl_function_context (exp);
6382 /* We treat inline_function_decl as an alias for the current function
6383 because that is the inline function whose vars, types, etc.
6384 are being merged into the current function.
6385 See expand_inline_function. */
6387 if (context != 0 && context != current_function_decl
6388 && context != inline_function_decl
6389 /* If var is static, we don't need a static chain to access it. */
6390 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6391 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6395 /* Mark as non-local and addressable. */
6396 DECL_NONLOCAL (exp) = 1;
6397 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6399 mark_addressable (exp);
6400 if (GET_CODE (DECL_RTL (exp)) != MEM)
6402 addr = XEXP (DECL_RTL (exp), 0);
6403 if (GET_CODE (addr) == MEM)
6405 = replace_equiv_address (addr,
6406 fix_lexical_addr (XEXP (addr, 0), exp));
6408 addr = fix_lexical_addr (addr, exp);
6410 temp = replace_equiv_address (DECL_RTL (exp), addr);
6413 /* This is the case of an array whose size is to be determined
6414 from its initializer, while the initializer is still being parsed.
6417 else if (GET_CODE (DECL_RTL (exp)) == MEM
6418 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6419 temp = validize_mem (DECL_RTL (exp));
6421 /* If DECL_RTL is memory, we are in the normal case and either
6422 the address is not valid or it is not a register and -fforce-addr
6423 is specified, get the address into a register. */
6425 else if (GET_CODE (DECL_RTL (exp)) == MEM
6426 && modifier != EXPAND_CONST_ADDRESS
6427 && modifier != EXPAND_SUM
6428 && modifier != EXPAND_INITIALIZER
6429 && (! memory_address_p (DECL_MODE (exp),
6430 XEXP (DECL_RTL (exp), 0))
6432 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6433 temp = replace_equiv_address (DECL_RTL (exp),
6434 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6436 /* If we got something, return it. But first, set the alignment
6437 if the address is a register. */
6440 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6441 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6446 /* If the mode of DECL_RTL does not match that of the decl, it
6447 must be a promoted value. We return a SUBREG of the wanted mode,
6448 but mark it so that we know that it was already extended. */
6450 if (GET_CODE (DECL_RTL (exp)) == REG
6451 && GET_MODE (DECL_RTL (exp)) != mode)
6453 /* Get the signedness used for this variable. Ensure we get the
6454 same mode we got when the variable was declared. */
6455 if (GET_MODE (DECL_RTL (exp))
6456 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6459 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6460 SUBREG_PROMOTED_VAR_P (temp) = 1;
6461 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6465 return DECL_RTL (exp);
6468 return immed_double_const (TREE_INT_CST_LOW (exp),
6469 TREE_INT_CST_HIGH (exp), mode);
6472 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6473 EXPAND_MEMORY_USE_BAD);
6476 /* If optimized, generate immediate CONST_DOUBLE
6477 which will be turned into memory by reload if necessary.
6479 We used to force a register so that loop.c could see it. But
6480 this does not allow gen_* patterns to perform optimizations with
6481 the constants. It also produces two insns in cases like "x = 1.0;".
6482 On most machines, floating-point constants are not permitted in
6483 many insns, so we'd end up copying it to a register in any case.
6485 Now, we do the copying in expand_binop, if appropriate. */
6486 return immed_real_const (exp);
6490 if (! TREE_CST_RTL (exp))
6491 output_constant_def (exp, 1);
6493 /* TREE_CST_RTL probably contains a constant address.
6494 On RISC machines where a constant address isn't valid,
6495 make some insns to get that address into a register. */
6496 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6497 && modifier != EXPAND_CONST_ADDRESS
6498 && modifier != EXPAND_INITIALIZER
6499 && modifier != EXPAND_SUM
6500 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6502 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6503 return replace_equiv_address (TREE_CST_RTL (exp),
6504 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6505 return TREE_CST_RTL (exp);
6507 case EXPR_WITH_FILE_LOCATION:
6510 const char *saved_input_filename = input_filename;
6511 int saved_lineno = lineno;
6512 input_filename = EXPR_WFL_FILENAME (exp);
6513 lineno = EXPR_WFL_LINENO (exp);
6514 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6515 emit_line_note (input_filename, lineno);
6516 /* Possibly avoid switching back and forth here. */
6517 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6518 input_filename = saved_input_filename;
6519 lineno = saved_lineno;
6524 context = decl_function_context (exp);
6526 /* If this SAVE_EXPR was at global context, assume we are an
6527 initialization function and move it into our context. */
6529 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6531 /* We treat inline_function_decl as an alias for the current function
6532 because that is the inline function whose vars, types, etc.
6533 are being merged into the current function.
6534 See expand_inline_function. */
6535 if (context == current_function_decl || context == inline_function_decl)
6538 /* If this is non-local, handle it. */
6541 /* The following call just exists to abort if the context is
6542 not of a containing function. */
6543 find_function_data (context);
6545 temp = SAVE_EXPR_RTL (exp);
6546 if (temp && GET_CODE (temp) == REG)
6548 put_var_into_stack (exp);
6549 temp = SAVE_EXPR_RTL (exp);
6551 if (temp == 0 || GET_CODE (temp) != MEM)
6554 replace_equiv_address (temp,
6555 fix_lexical_addr (XEXP (temp, 0), exp));
6557 if (SAVE_EXPR_RTL (exp) == 0)
6559 if (mode == VOIDmode)
6562 temp = assign_temp (build_qualified_type (type,
6564 | TYPE_QUAL_CONST)),
6567 SAVE_EXPR_RTL (exp) = temp;
6568 if (!optimize && GET_CODE (temp) == REG)
6569 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6572 /* If the mode of TEMP does not match that of the expression, it
6573 must be a promoted value. We pass store_expr a SUBREG of the
6574 wanted mode but mark it so that we know that it was already
6575 extended. Note that `unsignedp' was modified above in
6578 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6580 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6581 SUBREG_PROMOTED_VAR_P (temp) = 1;
6582 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6585 if (temp == const0_rtx)
6586 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6587 EXPAND_MEMORY_USE_BAD);
6589 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6591 TREE_USED (exp) = 1;
6594 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6595 must be a promoted value. We return a SUBREG of the wanted mode,
6596 but mark it so that we know that it was already extended. */
6598 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6599 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6601 /* Compute the signedness and make the proper SUBREG. */
6602 promote_mode (type, mode, &unsignedp, 0);
6603 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6604 SUBREG_PROMOTED_VAR_P (temp) = 1;
6605 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6609 return SAVE_EXPR_RTL (exp);
6614 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6615 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6619 case PLACEHOLDER_EXPR:
6621 tree old_list = placeholder_list;
6622 tree placeholder_expr = 0;
6624 exp = find_placeholder (exp, &placeholder_expr);
6628 placeholder_list = TREE_CHAIN (placeholder_expr);
6629 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6630 placeholder_list = old_list;
6634 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6637 case WITH_RECORD_EXPR:
6638 /* Put the object on the placeholder list, expand our first operand,
6639 and pop the list. */
6640 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6642 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6643 tmode, ro_modifier);
6644 placeholder_list = TREE_CHAIN (placeholder_list);
6648 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6649 expand_goto (TREE_OPERAND (exp, 0));
6651 expand_computed_goto (TREE_OPERAND (exp, 0));
6655 expand_exit_loop_if_false (NULL,
6656 invert_truthvalue (TREE_OPERAND (exp, 0)));
6659 case LABELED_BLOCK_EXPR:
6660 if (LABELED_BLOCK_BODY (exp))
6661 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6662 /* Should perhaps use expand_label, but this is simpler and safer. */
6663 do_pending_stack_adjust ();
6664 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6667 case EXIT_BLOCK_EXPR:
6668 if (EXIT_BLOCK_RETURN (exp))
6669 sorry ("returned value in block_exit_expr");
6670 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6675 expand_start_loop (1);
6676 expand_expr_stmt (TREE_OPERAND (exp, 0));
6684 tree vars = TREE_OPERAND (exp, 0);
6685 int vars_need_expansion = 0;
6687 /* Need to open a binding contour here because
6688 if there are any cleanups they must be contained here. */
6689 expand_start_bindings (2);
6691 /* Mark the corresponding BLOCK for output in its proper place. */
6692 if (TREE_OPERAND (exp, 2) != 0
6693 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6694 insert_block (TREE_OPERAND (exp, 2));
6696 /* If VARS have not yet been expanded, expand them now. */
6699 if (!DECL_RTL_SET_P (vars))
6701 vars_need_expansion = 1;
6704 expand_decl_init (vars);
6705 vars = TREE_CHAIN (vars);
6708 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6710 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6716 if (RTL_EXPR_SEQUENCE (exp))
6718 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6720 emit_insns (RTL_EXPR_SEQUENCE (exp));
6721 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6723 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6724 free_temps_for_rtl_expr (exp);
6725 return RTL_EXPR_RTL (exp);
6728 /* If we don't need the result, just ensure we evaluate any
6733 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6734 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6735 EXPAND_MEMORY_USE_BAD);
6739 /* All elts simple constants => refer to a constant in memory. But
6740 if this is a non-BLKmode mode, let it store a field at a time
6741 since that should make a CONST_INT or CONST_DOUBLE when we
6742 fold. Likewise, if we have a target we can use, it is best to
6743 store directly into the target unless the type is large enough
6744 that memcpy will be used. If we are making an initializer and
6745 all operands are constant, put it in memory as well. */
6746 else if ((TREE_STATIC (exp)
6747 && ((mode == BLKmode
6748 && ! (target != 0 && safe_from_p (target, exp, 1)))
6749 || TREE_ADDRESSABLE (exp)
6750 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6751 && (! MOVE_BY_PIECES_P
6752 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6754 && ! mostly_zeros_p (exp))))
6755 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6757 rtx constructor = output_constant_def (exp, 1);
6759 if (modifier != EXPAND_CONST_ADDRESS
6760 && modifier != EXPAND_INITIALIZER
6761 && modifier != EXPAND_SUM)
6762 constructor = validize_mem (constructor);
6768 /* Handle calls that pass values in multiple non-contiguous
6769 locations. The Irix 6 ABI has examples of this. */
6770 if (target == 0 || ! safe_from_p (target, exp, 1)
6771 || GET_CODE (target) == PARALLEL)
6773 = assign_temp (build_qualified_type (type,
6775 | (TREE_READONLY (exp)
6776 * TYPE_QUAL_CONST))),
6777 TREE_ADDRESSABLE (exp), 1, 1);
6779 store_constructor (exp, target, 0,
6780 int_size_in_bytes (TREE_TYPE (exp)));
6786 tree exp1 = TREE_OPERAND (exp, 0);
6788 tree string = string_constant (exp1, &index);
6790 /* Try to optimize reads from const strings. */
6792 && TREE_CODE (string) == STRING_CST
6793 && TREE_CODE (index) == INTEGER_CST
6794 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6795 && GET_MODE_CLASS (mode) == MODE_INT
6796 && GET_MODE_SIZE (mode) == 1
6797 && modifier != EXPAND_MEMORY_USE_WO)
6799 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6801 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6802 op0 = memory_address (mode, op0);
6804 if (cfun && current_function_check_memory_usage
6805 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6807 enum memory_use_mode memory_usage;
6808 memory_usage = get_memory_usage_from_modifier (modifier);
6810 if (memory_usage != MEMORY_USE_DONT)
6812 in_check_memory_usage = 1;
6813 emit_library_call (chkr_check_addr_libfunc,
6814 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6815 Pmode, GEN_INT (int_size_in_bytes (type)),
6816 TYPE_MODE (sizetype),
6817 GEN_INT (memory_usage),
6818 TYPE_MODE (integer_type_node));
6819 in_check_memory_usage = 0;
6823 temp = gen_rtx_MEM (mode, op0);
6824 set_mem_attributes (temp, exp, 0);
6826 /* If we are writing to this object and its type is a record with
6827 readonly fields, we must mark it as readonly so it will
6828 conflict with readonly references to those fields. */
6829 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6830 RTX_UNCHANGING_P (temp) = 1;
6836 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6840 tree array = TREE_OPERAND (exp, 0);
6841 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6842 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6843 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6846 /* Optimize the special-case of a zero lower bound.
6848 We convert the low_bound to sizetype to avoid some problems
6849 with constant folding. (E.g. suppose the lower bound is 1,
6850 and its mode is QI. Without the conversion, (ARRAY
6851 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6852 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6854 if (! integer_zerop (low_bound))
6855 index = size_diffop (index, convert (sizetype, low_bound));
6857 /* Fold an expression like: "foo"[2].
6858 This is not done in fold so it won't happen inside &.
6859 Don't fold if this is for wide characters since it's too
6860 difficult to do correctly and this is a very rare case. */
6862 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6863 && TREE_CODE (array) == STRING_CST
6864 && TREE_CODE (index) == INTEGER_CST
6865 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6866 && GET_MODE_CLASS (mode) == MODE_INT
6867 && GET_MODE_SIZE (mode) == 1)
6869 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6871 /* If this is a constant index into a constant array,
6872 just get the value from the array. Handle both the cases when
6873 we have an explicit constructor and when our operand is a variable
6874 that was declared const. */
6876 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6877 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6878 && TREE_CODE (index) == INTEGER_CST
6879 && 0 > compare_tree_int (index,
6880 list_length (CONSTRUCTOR_ELTS
6881 (TREE_OPERAND (exp, 0)))))
6885 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6886 i = TREE_INT_CST_LOW (index);
6887 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6891 return expand_expr (fold (TREE_VALUE (elem)), target,
6892 tmode, ro_modifier);
6895 else if (optimize >= 1
6896 && modifier != EXPAND_CONST_ADDRESS
6897 && modifier != EXPAND_INITIALIZER
6898 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6899 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6900 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6902 if (TREE_CODE (index) == INTEGER_CST)
6904 tree init = DECL_INITIAL (array);
6906 if (TREE_CODE (init) == CONSTRUCTOR)
6910 for (elem = CONSTRUCTOR_ELTS (init);
6912 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6913 elem = TREE_CHAIN (elem))
6916 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6917 return expand_expr (fold (TREE_VALUE (elem)), target,
6918 tmode, ro_modifier);
6920 else if (TREE_CODE (init) == STRING_CST
6921 && 0 > compare_tree_int (index,
6922 TREE_STRING_LENGTH (init)))
6924 tree type = TREE_TYPE (TREE_TYPE (init));
6925 enum machine_mode mode = TYPE_MODE (type);
6927 if (GET_MODE_CLASS (mode) == MODE_INT
6928 && GET_MODE_SIZE (mode) == 1)
6930 (TREE_STRING_POINTER
6931 (init)[TREE_INT_CST_LOW (index)]));
6940 case ARRAY_RANGE_REF:
6941 /* If the operand is a CONSTRUCTOR, we can just extract the
6942 appropriate field if it is present. Don't do this if we have
6943 already written the data since we want to refer to that copy
6944 and varasm.c assumes that's what we'll do. */
6945 if (code == COMPONENT_REF
6946 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6947 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6951 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6952 elt = TREE_CHAIN (elt))
6953 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6954 /* We can normally use the value of the field in the
6955 CONSTRUCTOR. However, if this is a bitfield in
6956 an integral mode that we can fit in a HOST_WIDE_INT,
6957 we must mask only the number of bits in the bitfield,
6958 since this is done implicitly by the constructor. If
6959 the bitfield does not meet either of those conditions,
6960 we can't do this optimization. */
6961 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6962 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6964 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6965 <= HOST_BITS_PER_WIDE_INT))))
6967 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6968 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6970 HOST_WIDE_INT bitsize
6971 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6973 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6975 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6976 op0 = expand_and (op0, op1, target);
6980 enum machine_mode imode
6981 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6983 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6986 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6988 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6998 enum machine_mode mode1;
6999 HOST_WIDE_INT bitsize, bitpos;
7002 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7003 &mode1, &unsignedp, &volatilep);
7006 /* If we got back the original object, something is wrong. Perhaps
7007 we are evaluating an expression too early. In any event, don't
7008 infinitely recurse. */
7012 /* If TEM's type is a union of variable size, pass TARGET to the inner
7013 computation, since it will need a temporary and TARGET is known
7014 to have to do. This occurs in unchecked conversion in Ada. */
7018 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7019 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7021 ? target : NULL_RTX),
7023 (modifier == EXPAND_INITIALIZER
7024 || modifier == EXPAND_CONST_ADDRESS)
7025 ? modifier : EXPAND_NORMAL);
7027 /* If this is a constant, put it into a register if it is a
7028 legitimate constant and OFFSET is 0 and memory if it isn't. */
7029 if (CONSTANT_P (op0))
7031 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7032 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7034 op0 = force_reg (mode, op0);
7036 op0 = validize_mem (force_const_mem (mode, op0));
7041 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7043 /* If this object is in a register, put it into memory.
7044 This case can't occur in C, but can in Ada if we have
7045 unchecked conversion of an expression from a scalar type to
7046 an array or record type. */
7047 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7048 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7050 /* If the operand is a SAVE_EXPR, we can deal with this by
7051 forcing the SAVE_EXPR into memory. */
7052 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7054 put_var_into_stack (TREE_OPERAND (exp, 0));
7055 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7060 = build_qualified_type (TREE_TYPE (tem),
7061 (TYPE_QUALS (TREE_TYPE (tem))
7062 | TYPE_QUAL_CONST));
7063 rtx memloc = assign_temp (nt, 1, 1, 1);
7065 emit_move_insn (memloc, op0);
7070 if (GET_CODE (op0) != MEM)
7073 if (GET_MODE (offset_rtx) != ptr_mode)
7074 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7076 #ifdef POINTERS_EXTEND_UNSIGNED
7077 if (GET_MODE (offset_rtx) != Pmode)
7078 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7081 /* A constant address in OP0 can have VOIDmode, we must not try
7082 to call force_reg for that case. Avoid that case. */
7083 if (GET_CODE (op0) == MEM
7084 && GET_MODE (op0) == BLKmode
7085 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7087 && (bitpos % bitsize) == 0
7088 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7089 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7091 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7093 if (GET_CODE (XEXP (temp, 0)) == REG)
7096 op0 = (replace_equiv_address
7098 force_reg (GET_MODE (XEXP (temp, 0)),
7103 op0 = offset_address (op0, offset_rtx,
7104 highest_pow2_factor (offset));
7107 /* Don't forget about volatility even if this is a bitfield. */
7108 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7110 if (op0 == orig_op0)
7111 op0 = copy_rtx (op0);
7113 MEM_VOLATILE_P (op0) = 1;
7116 /* Check the access. */
7117 if (cfun != 0 && current_function_check_memory_usage
7118 && GET_CODE (op0) == MEM)
7120 enum memory_use_mode memory_usage;
7121 memory_usage = get_memory_usage_from_modifier (modifier);
7123 if (memory_usage != MEMORY_USE_DONT)
7128 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7129 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7131 /* Check the access right of the pointer. */
7132 in_check_memory_usage = 1;
7133 if (size > BITS_PER_UNIT)
7134 emit_library_call (chkr_check_addr_libfunc,
7135 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7136 Pmode, GEN_INT (size / BITS_PER_UNIT),
7137 TYPE_MODE (sizetype),
7138 GEN_INT (memory_usage),
7139 TYPE_MODE (integer_type_node));
7140 in_check_memory_usage = 0;
7144 /* In cases where an aligned union has an unaligned object
7145 as a field, we might be extracting a BLKmode value from
7146 an integer-mode (e.g., SImode) object. Handle this case
7147 by doing the extract into an object as wide as the field
7148 (which we know to be the width of a basic mode), then
7149 storing into memory, and changing the mode to BLKmode. */
7150 if (mode1 == VOIDmode
7151 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7152 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7153 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7154 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7155 && modifier != EXPAND_CONST_ADDRESS
7156 && modifier != EXPAND_INITIALIZER)
7157 /* If the field isn't aligned enough to fetch as a memref,
7158 fetch it as a bit field. */
7159 || (mode1 != BLKmode
7160 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7161 && ((TYPE_ALIGN (TREE_TYPE (tem))
7162 < GET_MODE_ALIGNMENT (mode))
7163 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7164 /* If the type and the field are a constant size and the
7165 size of the type isn't the same size as the bitfield,
7166 we must use bitfield operations. */
7168 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7170 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7173 enum machine_mode ext_mode = mode;
7175 if (ext_mode == BLKmode
7176 && ! (target != 0 && GET_CODE (op0) == MEM
7177 && GET_CODE (target) == MEM
7178 && bitpos % BITS_PER_UNIT == 0))
7179 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7181 if (ext_mode == BLKmode)
7183 /* In this case, BITPOS must start at a byte boundary and
7184 TARGET, if specified, must be a MEM. */
7185 if (GET_CODE (op0) != MEM
7186 || (target != 0 && GET_CODE (target) != MEM)
7187 || bitpos % BITS_PER_UNIT != 0)
7190 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7192 target = assign_temp (type, 0, 1, 1);
7194 emit_block_move (target, op0,
7195 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7201 op0 = validize_mem (op0);
7203 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7204 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7206 op0 = extract_bit_field (op0, bitsize, bitpos,
7207 unsignedp, target, ext_mode, ext_mode,
7208 int_size_in_bytes (TREE_TYPE (tem)));
7210 /* If the result is a record type and BITSIZE is narrower than
7211 the mode of OP0, an integral mode, and this is a big endian
7212 machine, we must put the field into the high-order bits. */
7213 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7214 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7215 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7216 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7217 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7221 if (mode == BLKmode)
7223 rtx new = assign_temp (build_qualified_type
7224 (type_for_mode (ext_mode, 0),
7225 TYPE_QUAL_CONST), 0, 1, 1);
7227 emit_move_insn (new, op0);
7228 op0 = copy_rtx (new);
7229 PUT_MODE (op0, BLKmode);
7230 set_mem_attributes (op0, exp, 1);
7236 /* If the result is BLKmode, use that to access the object
7238 if (mode == BLKmode)
7241 /* Get a reference to just this component. */
7242 if (modifier == EXPAND_CONST_ADDRESS
7243 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7244 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7246 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7248 if (op0 == orig_op0)
7249 op0 = copy_rtx (op0);
7251 set_mem_attributes (op0, exp, 0);
7252 if (GET_CODE (XEXP (op0, 0)) == REG)
7253 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7255 MEM_VOLATILE_P (op0) |= volatilep;
7256 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7257 || modifier == EXPAND_CONST_ADDRESS
7258 || modifier == EXPAND_INITIALIZER)
7260 else if (target == 0)
7261 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7263 convert_move (target, op0, unsignedp);
7269 rtx insn, before = get_last_insn (), vtbl_ref;
7271 /* Evaluate the interior expression. */
7272 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7275 /* Get or create an instruction off which to hang a note. */
7276 if (REG_P (subtarget))
7279 insn = get_last_insn ();
7282 if (! INSN_P (insn))
7283 insn = prev_nonnote_insn (insn);
7287 target = gen_reg_rtx (GET_MODE (subtarget));
7288 insn = emit_move_insn (target, subtarget);
7291 /* Collect the data for the note. */
7292 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7293 vtbl_ref = plus_constant (vtbl_ref,
7294 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7295 /* Discard the initial CONST that was added. */
7296 vtbl_ref = XEXP (vtbl_ref, 0);
7299 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7304 /* Intended for a reference to a buffer of a file-object in Pascal.
7305 But it's not certain that a special tree code will really be
7306 necessary for these. INDIRECT_REF might work for them. */
7312 /* Pascal set IN expression.
7315 rlo = set_low - (set_low%bits_per_word);
7316 the_word = set [ (index - rlo)/bits_per_word ];
7317 bit_index = index % bits_per_word;
7318 bitmask = 1 << bit_index;
7319 return !!(the_word & bitmask); */
7321 tree set = TREE_OPERAND (exp, 0);
7322 tree index = TREE_OPERAND (exp, 1);
7323 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7324 tree set_type = TREE_TYPE (set);
7325 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7326 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7327 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7328 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7329 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7330 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7331 rtx setaddr = XEXP (setval, 0);
7332 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7334 rtx diff, quo, rem, addr, bit, result;
7336 /* If domain is empty, answer is no. Likewise if index is constant
7337 and out of bounds. */
7338 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7339 && TREE_CODE (set_low_bound) == INTEGER_CST
7340 && tree_int_cst_lt (set_high_bound, set_low_bound))
7341 || (TREE_CODE (index) == INTEGER_CST
7342 && TREE_CODE (set_low_bound) == INTEGER_CST
7343 && tree_int_cst_lt (index, set_low_bound))
7344 || (TREE_CODE (set_high_bound) == INTEGER_CST
7345 && TREE_CODE (index) == INTEGER_CST
7346 && tree_int_cst_lt (set_high_bound, index))))
7350 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7352 /* If we get here, we have to generate the code for both cases
7353 (in range and out of range). */
7355 op0 = gen_label_rtx ();
7356 op1 = gen_label_rtx ();
7358 if (! (GET_CODE (index_val) == CONST_INT
7359 && GET_CODE (lo_r) == CONST_INT))
7360 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7361 GET_MODE (index_val), iunsignedp, op1);
7363 if (! (GET_CODE (index_val) == CONST_INT
7364 && GET_CODE (hi_r) == CONST_INT))
7365 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7366 GET_MODE (index_val), iunsignedp, op1);
7368 /* Calculate the element number of bit zero in the first word
7370 if (GET_CODE (lo_r) == CONST_INT)
7371 rlow = GEN_INT (INTVAL (lo_r)
7372 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7374 rlow = expand_binop (index_mode, and_optab, lo_r,
7375 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7376 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7378 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7379 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7381 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7382 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7383 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7384 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7386 addr = memory_address (byte_mode,
7387 expand_binop (index_mode, add_optab, diff,
7388 setaddr, NULL_RTX, iunsignedp,
7391 /* Extract the bit we want to examine. */
7392 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7393 gen_rtx_MEM (byte_mode, addr),
7394 make_tree (TREE_TYPE (index), rem),
7396 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7397 GET_MODE (target) == byte_mode ? target : 0,
7398 1, OPTAB_LIB_WIDEN);
7400 if (result != target)
7401 convert_move (target, result, 1);
7403 /* Output the code to handle the out-of-range case. */
7406 emit_move_insn (target, const0_rtx);
7411 case WITH_CLEANUP_EXPR:
7412 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7414 WITH_CLEANUP_EXPR_RTL (exp)
7415 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7416 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7418 /* That's it for this cleanup. */
7419 TREE_OPERAND (exp, 1) = 0;
7421 return WITH_CLEANUP_EXPR_RTL (exp);
7423 case CLEANUP_POINT_EXPR:
7425 /* Start a new binding layer that will keep track of all cleanup
7426 actions to be performed. */
7427 expand_start_bindings (2);
7429 target_temp_slot_level = temp_slot_level;
7431 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7432 /* If we're going to use this value, load it up now. */
7434 op0 = force_not_mem (op0);
7435 preserve_temp_slots (op0);
7436 expand_end_bindings (NULL_TREE, 0, 0);
7441 /* Check for a built-in function. */
7442 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7443 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7445 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7447 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7448 == BUILT_IN_FRONTEND)
7449 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7451 return expand_builtin (exp, target, subtarget, tmode, ignore);
7454 return expand_call (exp, target, ignore);
7456 case NON_LVALUE_EXPR:
7459 case REFERENCE_EXPR:
7460 if (TREE_OPERAND (exp, 0) == error_mark_node)
7463 if (TREE_CODE (type) == UNION_TYPE)
7465 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7467 /* If both input and output are BLKmode, this conversion isn't doing
7468 anything except possibly changing memory attribute. */
7469 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7471 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7474 result = copy_rtx (result);
7475 set_mem_attributes (result, exp, 0);
7480 target = assign_temp (type, 0, 1, 1);
7482 if (GET_CODE (target) == MEM)
7483 /* Store data into beginning of memory target. */
7484 store_expr (TREE_OPERAND (exp, 0),
7485 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7487 else if (GET_CODE (target) == REG)
7488 /* Store this field into a union of the proper type. */
7489 store_field (target,
7490 MIN ((int_size_in_bytes (TREE_TYPE
7491 (TREE_OPERAND (exp, 0)))
7493 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7494 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7495 VOIDmode, 0, type, 0);
7499 /* Return the entire union. */
7503 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7505 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7508 /* If the signedness of the conversion differs and OP0 is
7509 a promoted SUBREG, clear that indication since we now
7510 have to do the proper extension. */
7511 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7512 && GET_CODE (op0) == SUBREG)
7513 SUBREG_PROMOTED_VAR_P (op0) = 0;
7518 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7519 if (GET_MODE (op0) == mode)
7522 /* If OP0 is a constant, just convert it into the proper mode. */
7523 if (CONSTANT_P (op0))
7525 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7526 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7528 if (modifier == EXPAND_INITIALIZER)
7529 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7533 convert_to_mode (mode, op0,
7534 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7536 convert_move (target, op0,
7537 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7540 case VIEW_CONVERT_EXPR:
7541 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7543 /* If the input and output modes are both the same, we are done.
7544 Otherwise, if neither mode is BLKmode and both are within a word, we
7545 can use gen_lowpart. If neither is true, store the operand into
7546 memory and convert the MEM to the new mode. */
7547 if (TYPE_MODE (type) == GET_MODE (op0))
7549 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7550 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7551 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7552 op0 = gen_lowpart (TYPE_MODE (type), op0);
7555 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7556 enum machine_mode non_blkmode
7557 = GET_MODE (op0) == BLKmode ? TYPE_MODE (type) : GET_MODE (op0);
7559 if (CONSTANT_P (op0))
7560 op0 = validize_mem (force_const_mem (TYPE_MODE (inner_type), op0));
7563 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7565 = assign_stack_temp_for_type (TYPE_MODE (inner_type),
7566 GET_MODE_SIZE (non_blkmode),
7569 if (GET_MODE (target) == BLKmode)
7570 emit_block_move (target, op0,
7571 expr_size (TREE_OPERAND (exp, 0)));
7573 emit_move_insn (target, op0);
7579 if (GET_CODE (op0) == MEM)
7581 op0 = copy_rtx (op0);
7583 /* If the output type is such that the operand is known to be
7584 aligned, indicate that it is. Otherwise, we need only be
7585 concerned about alignment for non-BLKmode results. */
7586 if (TYPE_ALIGN_OK (type))
7587 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7588 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7589 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7591 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7592 HOST_WIDE_INT temp_size = MAX (int_size_in_bytes (inner_type),
7593 GET_MODE_SIZE (TYPE_MODE (type)));
7594 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7595 temp_size, 0, type);
7596 rtx new_with_op0_mode = copy_rtx (new);
7598 PUT_MODE (new_with_op0_mode, GET_MODE (op0));
7599 if (GET_MODE (op0) == BLKmode)
7600 emit_block_move (new_with_op0_mode, op0,
7601 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))));
7603 emit_move_insn (new_with_op0_mode, op0);
7608 PUT_MODE (op0, TYPE_MODE (type));
7614 /* We come here from MINUS_EXPR when the second operand is a
7617 this_optab = ! unsignedp && flag_trapv
7618 && (GET_MODE_CLASS(mode) == MODE_INT)
7619 ? addv_optab : add_optab;
7621 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7622 something else, make sure we add the register to the constant and
7623 then to the other thing. This case can occur during strength
7624 reduction and doing it this way will produce better code if the
7625 frame pointer or argument pointer is eliminated.
7627 fold-const.c will ensure that the constant is always in the inner
7628 PLUS_EXPR, so the only case we need to do anything about is if
7629 sp, ap, or fp is our second argument, in which case we must swap
7630 the innermost first argument and our second argument. */
7632 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7633 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7634 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7635 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7636 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7637 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7639 tree t = TREE_OPERAND (exp, 1);
7641 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7642 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7645 /* If the result is to be ptr_mode and we are adding an integer to
7646 something, we might be forming a constant. So try to use
7647 plus_constant. If it produces a sum and we can't accept it,
7648 use force_operand. This allows P = &ARR[const] to generate
7649 efficient code on machines where a SYMBOL_REF is not a valid
7652 If this is an EXPAND_SUM call, always return the sum. */
7653 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7654 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7656 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7657 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7658 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7662 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7664 /* Use immed_double_const to ensure that the constant is
7665 truncated according to the mode of OP1, then sign extended
7666 to a HOST_WIDE_INT. Using the constant directly can result
7667 in non-canonical RTL in a 64x32 cross compile. */
7669 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7671 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7672 op1 = plus_constant (op1, INTVAL (constant_part));
7673 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7674 op1 = force_operand (op1, target);
7678 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7679 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7680 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7684 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7686 if (! CONSTANT_P (op0))
7688 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7689 VOIDmode, modifier);
7690 /* Don't go to both_summands if modifier
7691 says it's not right to return a PLUS. */
7692 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7696 /* Use immed_double_const to ensure that the constant is
7697 truncated according to the mode of OP1, then sign extended
7698 to a HOST_WIDE_INT. Using the constant directly can result
7699 in non-canonical RTL in a 64x32 cross compile. */
7701 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7703 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7704 op0 = plus_constant (op0, INTVAL (constant_part));
7705 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7706 op0 = force_operand (op0, target);
7711 /* No sense saving up arithmetic to be done
7712 if it's all in the wrong mode to form part of an address.
7713 And force_operand won't know whether to sign-extend or
7715 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7716 || mode != ptr_mode)
7719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7722 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7723 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7726 /* Make sure any term that's a sum with a constant comes last. */
7727 if (GET_CODE (op0) == PLUS
7728 && CONSTANT_P (XEXP (op0, 1)))
7734 /* If adding to a sum including a constant,
7735 associate it to put the constant outside. */
7736 if (GET_CODE (op1) == PLUS
7737 && CONSTANT_P (XEXP (op1, 1)))
7739 rtx constant_term = const0_rtx;
7741 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7744 /* Ensure that MULT comes first if there is one. */
7745 else if (GET_CODE (op0) == MULT)
7746 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7748 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7750 /* Let's also eliminate constants from op0 if possible. */
7751 op0 = eliminate_constant_term (op0, &constant_term);
7753 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7754 their sum should be a constant. Form it into OP1, since the
7755 result we want will then be OP0 + OP1. */
7757 temp = simplify_binary_operation (PLUS, mode, constant_term,
7762 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7765 /* Put a constant term last and put a multiplication first. */
7766 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7767 temp = op1, op1 = op0, op0 = temp;
7769 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7770 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7773 /* For initializers, we are allowed to return a MINUS of two
7774 symbolic constants. Here we handle all cases when both operands
7776 /* Handle difference of two symbolic constants,
7777 for the sake of an initializer. */
7778 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7779 && really_constant_p (TREE_OPERAND (exp, 0))
7780 && really_constant_p (TREE_OPERAND (exp, 1)))
7782 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7783 VOIDmode, ro_modifier);
7784 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7785 VOIDmode, ro_modifier);
7787 /* If the last operand is a CONST_INT, use plus_constant of
7788 the negated constant. Else make the MINUS. */
7789 if (GET_CODE (op1) == CONST_INT)
7790 return plus_constant (op0, - INTVAL (op1));
7792 return gen_rtx_MINUS (mode, op0, op1);
7794 /* Convert A - const to A + (-const). */
7795 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7797 tree negated = fold (build1 (NEGATE_EXPR, type,
7798 TREE_OPERAND (exp, 1)));
7800 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7801 /* If we can't negate the constant in TYPE, leave it alone and
7802 expand_binop will negate it for us. We used to try to do it
7803 here in the signed version of TYPE, but that doesn't work
7804 on POINTER_TYPEs. */;
7807 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7811 this_optab = ! unsignedp && flag_trapv
7812 && (GET_MODE_CLASS(mode) == MODE_INT)
7813 ? subv_optab : sub_optab;
7817 /* If first operand is constant, swap them.
7818 Thus the following special case checks need only
7819 check the second operand. */
7820 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7822 tree t1 = TREE_OPERAND (exp, 0);
7823 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7824 TREE_OPERAND (exp, 1) = t1;
7827 /* Attempt to return something suitable for generating an
7828 indexed address, for machines that support that. */
7830 if (modifier == EXPAND_SUM && mode == ptr_mode
7831 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7832 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7834 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7837 /* Apply distributive law if OP0 is x+c. */
7838 if (GET_CODE (op0) == PLUS
7839 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7844 (mode, XEXP (op0, 0),
7845 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7846 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7847 * INTVAL (XEXP (op0, 1))));
7849 if (GET_CODE (op0) != REG)
7850 op0 = force_operand (op0, NULL_RTX);
7851 if (GET_CODE (op0) != REG)
7852 op0 = copy_to_mode_reg (mode, op0);
7855 gen_rtx_MULT (mode, op0,
7856 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7859 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7862 /* Check for multiplying things that have been extended
7863 from a narrower type. If this machine supports multiplying
7864 in that narrower type with a result in the desired type,
7865 do it that way, and avoid the explicit type-conversion. */
7866 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7867 && TREE_CODE (type) == INTEGER_TYPE
7868 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7869 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7870 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7871 && int_fits_type_p (TREE_OPERAND (exp, 1),
7872 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7873 /* Don't use a widening multiply if a shift will do. */
7874 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7875 > HOST_BITS_PER_WIDE_INT)
7876 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7878 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7879 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7881 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7882 /* If both operands are extended, they must either both
7883 be zero-extended or both be sign-extended. */
7884 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7886 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7888 enum machine_mode innermode
7889 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7890 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7891 ? smul_widen_optab : umul_widen_optab);
7892 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7893 ? umul_widen_optab : smul_widen_optab);
7894 if (mode == GET_MODE_WIDER_MODE (innermode))
7896 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7898 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7899 NULL_RTX, VOIDmode, 0);
7900 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7901 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7904 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7905 NULL_RTX, VOIDmode, 0);
7908 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7909 && innermode == word_mode)
7912 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7913 NULL_RTX, VOIDmode, 0);
7914 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7915 op1 = convert_modes (innermode, mode,
7916 expand_expr (TREE_OPERAND (exp, 1),
7917 NULL_RTX, VOIDmode, 0),
7920 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7921 NULL_RTX, VOIDmode, 0);
7922 temp = expand_binop (mode, other_optab, op0, op1, target,
7923 unsignedp, OPTAB_LIB_WIDEN);
7924 htem = expand_mult_highpart_adjust (innermode,
7925 gen_highpart (innermode, temp),
7927 gen_highpart (innermode, temp),
7929 emit_move_insn (gen_highpart (innermode, temp), htem);
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7935 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7936 return expand_mult (mode, op0, op1, target, unsignedp);
7938 case TRUNC_DIV_EXPR:
7939 case FLOOR_DIV_EXPR:
7941 case ROUND_DIV_EXPR:
7942 case EXACT_DIV_EXPR:
7943 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7945 /* Possible optimization: compute the dividend with EXPAND_SUM
7946 then if the divisor is constant can optimize the case
7947 where some terms of the dividend have coeffs divisible by it. */
7948 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7949 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7950 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7953 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7954 expensive divide. If not, combine will rebuild the original
7956 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7957 && !real_onep (TREE_OPERAND (exp, 0)))
7958 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7959 build (RDIV_EXPR, type,
7960 build_real (type, dconst1),
7961 TREE_OPERAND (exp, 1))),
7962 target, tmode, unsignedp);
7963 this_optab = sdiv_optab;
7966 case TRUNC_MOD_EXPR:
7967 case FLOOR_MOD_EXPR:
7969 case ROUND_MOD_EXPR:
7970 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7973 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7974 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7976 case FIX_ROUND_EXPR:
7977 case FIX_FLOOR_EXPR:
7979 abort (); /* Not used for C. */
7981 case FIX_TRUNC_EXPR:
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7984 target = gen_reg_rtx (mode);
7985 expand_fix (target, op0, unsignedp);
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7991 target = gen_reg_rtx (mode);
7992 /* expand_float can't figure out what to do if FROM has VOIDmode.
7993 So give it the correct mode. With -O, cse will optimize this. */
7994 if (GET_MODE (op0) == VOIDmode)
7995 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7997 expand_float (target, op0,
7998 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8003 temp = expand_unop (mode,
8004 ! unsignedp && flag_trapv
8005 && (GET_MODE_CLASS(mode) == MODE_INT)
8006 ? negv_optab : neg_optab, op0, target, 0);
8012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8014 /* Handle complex values specially. */
8015 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8016 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8017 return expand_complex_abs (mode, op0, target, unsignedp);
8019 /* Unsigned abs is simply the operand. Testing here means we don't
8020 risk generating incorrect code below. */
8021 if (TREE_UNSIGNED (type))
8024 return expand_abs (mode, op0, target, unsignedp,
8025 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8029 target = original_target;
8030 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8031 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8032 || GET_MODE (target) != mode
8033 || (GET_CODE (target) == REG
8034 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8035 target = gen_reg_rtx (mode);
8036 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8037 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8039 /* First try to do it with a special MIN or MAX instruction.
8040 If that does not win, use a conditional jump to select the proper
8042 this_optab = (TREE_UNSIGNED (type)
8043 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8044 : (code == MIN_EXPR ? smin_optab : smax_optab));
8046 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8051 /* At this point, a MEM target is no longer useful; we will get better
8054 if (GET_CODE (target) == MEM)
8055 target = gen_reg_rtx (mode);
8058 emit_move_insn (target, op0);
8060 op0 = gen_label_rtx ();
8062 /* If this mode is an integer too wide to compare properly,
8063 compare word by word. Rely on cse to optimize constant cases. */
8064 if (GET_MODE_CLASS (mode) == MODE_INT
8065 && ! can_compare_p (GE, mode, ccp_jump))
8067 if (code == MAX_EXPR)
8068 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8069 target, op1, NULL_RTX, op0);
8071 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8072 op1, target, NULL_RTX, op0);
8076 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8077 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8078 unsignedp, mode, NULL_RTX, NULL_RTX,
8081 emit_move_insn (target, op1);
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8093 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8094 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8099 /* ??? Can optimize bitwise operations with one arg constant.
8100 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8101 and (a bitwise1 b) bitwise2 b (etc)
8102 but that is probably not worth while. */
8104 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8105 boolean values when we want in all cases to compute both of them. In
8106 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8107 as actual zero-or-1 values and then bitwise anding. In cases where
8108 there cannot be any side effects, better code would be made by
8109 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8110 how to recognize those cases. */
8112 case TRUTH_AND_EXPR:
8114 this_optab = and_optab;
8119 this_optab = ior_optab;
8122 case TRUTH_XOR_EXPR:
8124 this_optab = xor_optab;
8131 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8133 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8134 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8137 /* Could determine the answer when only additive constants differ. Also,
8138 the addition of one can be handled by changing the condition. */
8145 case UNORDERED_EXPR:
8152 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8156 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8157 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8159 && GET_CODE (original_target) == REG
8160 && (GET_MODE (original_target)
8161 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8163 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8166 if (temp != original_target)
8167 temp = copy_to_reg (temp);
8169 op1 = gen_label_rtx ();
8170 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8171 GET_MODE (temp), unsignedp, op1);
8172 emit_move_insn (temp, const1_rtx);
8177 /* If no set-flag instruction, must generate a conditional
8178 store into a temporary variable. Drop through
8179 and handle this like && and ||. */
8181 case TRUTH_ANDIF_EXPR:
8182 case TRUTH_ORIF_EXPR:
8184 && (target == 0 || ! safe_from_p (target, exp, 1)
8185 /* Make sure we don't have a hard reg (such as function's return
8186 value) live across basic blocks, if not optimizing. */
8187 || (!optimize && GET_CODE (target) == REG
8188 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8189 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8192 emit_clr_insn (target);
8194 op1 = gen_label_rtx ();
8195 jumpifnot (exp, op1);
8198 emit_0_to_1_insn (target);
8201 return ignore ? const0_rtx : target;
8203 case TRUTH_NOT_EXPR:
8204 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8205 /* The parser is careful to generate TRUTH_NOT_EXPR
8206 only with operands that are always zero or one. */
8207 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8208 target, 1, OPTAB_LIB_WIDEN);
8214 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8216 return expand_expr (TREE_OPERAND (exp, 1),
8217 (ignore ? const0_rtx : target),
8221 /* If we would have a "singleton" (see below) were it not for a
8222 conversion in each arm, bring that conversion back out. */
8223 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8224 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8225 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8226 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8228 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8229 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8231 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8232 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8233 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8234 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8235 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8236 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8237 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8238 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8239 return expand_expr (build1 (NOP_EXPR, type,
8240 build (COND_EXPR, TREE_TYPE (iftrue),
8241 TREE_OPERAND (exp, 0),
8243 target, tmode, modifier);
8247 /* Note that COND_EXPRs whose type is a structure or union
8248 are required to be constructed to contain assignments of
8249 a temporary variable, so that we can evaluate them here
8250 for side effect only. If type is void, we must do likewise. */
8252 /* If an arm of the branch requires a cleanup,
8253 only that cleanup is performed. */
8256 tree binary_op = 0, unary_op = 0;
8258 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8259 convert it to our mode, if necessary. */
8260 if (integer_onep (TREE_OPERAND (exp, 1))
8261 && integer_zerop (TREE_OPERAND (exp, 2))
8262 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8266 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8271 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8272 if (GET_MODE (op0) == mode)
8276 target = gen_reg_rtx (mode);
8277 convert_move (target, op0, unsignedp);
8281 /* Check for X ? A + B : A. If we have this, we can copy A to the
8282 output and conditionally add B. Similarly for unary operations.
8283 Don't do this if X has side-effects because those side effects
8284 might affect A or B and the "?" operation is a sequence point in
8285 ANSI. (operand_equal_p tests for side effects.) */
8287 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8288 && operand_equal_p (TREE_OPERAND (exp, 2),
8289 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8290 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8291 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8292 && operand_equal_p (TREE_OPERAND (exp, 1),
8293 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8294 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8295 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8296 && operand_equal_p (TREE_OPERAND (exp, 2),
8297 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8298 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8299 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8300 && operand_equal_p (TREE_OPERAND (exp, 1),
8301 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8302 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8304 /* If we are not to produce a result, we have no target. Otherwise,
8305 if a target was specified use it; it will not be used as an
8306 intermediate target unless it is safe. If no target, use a
8311 else if (original_target
8312 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8313 || (singleton && GET_CODE (original_target) == REG
8314 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8315 && original_target == var_rtx (singleton)))
8316 && GET_MODE (original_target) == mode
8317 #ifdef HAVE_conditional_move
8318 && (! can_conditionally_move_p (mode)
8319 || GET_CODE (original_target) == REG
8320 || TREE_ADDRESSABLE (type))
8322 && (GET_CODE (original_target) != MEM
8323 || TREE_ADDRESSABLE (type)))
8324 temp = original_target;
8325 else if (TREE_ADDRESSABLE (type))
8328 temp = assign_temp (type, 0, 0, 1);
8330 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8331 do the test of X as a store-flag operation, do this as
8332 A + ((X != 0) << log C). Similarly for other simple binary
8333 operators. Only do for C == 1 if BRANCH_COST is low. */
8334 if (temp && singleton && binary_op
8335 && (TREE_CODE (binary_op) == PLUS_EXPR
8336 || TREE_CODE (binary_op) == MINUS_EXPR
8337 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8338 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8339 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8340 : integer_onep (TREE_OPERAND (binary_op, 1)))
8341 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8344 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8345 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8346 ? addv_optab : add_optab)
8347 : TREE_CODE (binary_op) == MINUS_EXPR
8348 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8349 ? subv_optab : sub_optab)
8350 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8353 /* If we had X ? A : A + 1, do this as A + (X == 0).
8355 We have to invert the truth value here and then put it
8356 back later if do_store_flag fails. We cannot simply copy
8357 TREE_OPERAND (exp, 0) to another variable and modify that
8358 because invert_truthvalue can modify the tree pointed to
8360 if (singleton == TREE_OPERAND (exp, 1))
8361 TREE_OPERAND (exp, 0)
8362 = invert_truthvalue (TREE_OPERAND (exp, 0));
8364 result = do_store_flag (TREE_OPERAND (exp, 0),
8365 (safe_from_p (temp, singleton, 1)
8367 mode, BRANCH_COST <= 1);
8369 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8370 result = expand_shift (LSHIFT_EXPR, mode, result,
8371 build_int_2 (tree_log2
8375 (safe_from_p (temp, singleton, 1)
8376 ? temp : NULL_RTX), 0);
8380 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8381 return expand_binop (mode, boptab, op1, result, temp,
8382 unsignedp, OPTAB_LIB_WIDEN);
8384 else if (singleton == TREE_OPERAND (exp, 1))
8385 TREE_OPERAND (exp, 0)
8386 = invert_truthvalue (TREE_OPERAND (exp, 0));
8389 do_pending_stack_adjust ();
8391 op0 = gen_label_rtx ();
8393 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8397 /* If the target conflicts with the other operand of the
8398 binary op, we can't use it. Also, we can't use the target
8399 if it is a hard register, because evaluating the condition
8400 might clobber it. */
8402 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8403 || (GET_CODE (temp) == REG
8404 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8405 temp = gen_reg_rtx (mode);
8406 store_expr (singleton, temp, 0);
8409 expand_expr (singleton,
8410 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8411 if (singleton == TREE_OPERAND (exp, 1))
8412 jumpif (TREE_OPERAND (exp, 0), op0);
8414 jumpifnot (TREE_OPERAND (exp, 0), op0);
8416 start_cleanup_deferral ();
8417 if (binary_op && temp == 0)
8418 /* Just touch the other operand. */
8419 expand_expr (TREE_OPERAND (binary_op, 1),
8420 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8422 store_expr (build (TREE_CODE (binary_op), type,
8423 make_tree (type, temp),
8424 TREE_OPERAND (binary_op, 1)),
8427 store_expr (build1 (TREE_CODE (unary_op), type,
8428 make_tree (type, temp)),
8432 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8433 comparison operator. If we have one of these cases, set the
8434 output to A, branch on A (cse will merge these two references),
8435 then set the output to FOO. */
8437 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8438 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8439 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8440 TREE_OPERAND (exp, 1), 0)
8441 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8442 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8443 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8445 if (GET_CODE (temp) == REG
8446 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8447 temp = gen_reg_rtx (mode);
8448 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8449 jumpif (TREE_OPERAND (exp, 0), op0);
8451 start_cleanup_deferral ();
8452 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8456 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8457 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8458 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8459 TREE_OPERAND (exp, 2), 0)
8460 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8461 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8462 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8464 if (GET_CODE (temp) == REG
8465 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8466 temp = gen_reg_rtx (mode);
8467 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8468 jumpifnot (TREE_OPERAND (exp, 0), op0);
8470 start_cleanup_deferral ();
8471 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8476 op1 = gen_label_rtx ();
8477 jumpifnot (TREE_OPERAND (exp, 0), op0);
8479 start_cleanup_deferral ();
8481 /* One branch of the cond can be void, if it never returns. For
8482 example A ? throw : E */
8484 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8485 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8487 expand_expr (TREE_OPERAND (exp, 1),
8488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8489 end_cleanup_deferral ();
8491 emit_jump_insn (gen_jump (op1));
8494 start_cleanup_deferral ();
8496 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8497 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8499 expand_expr (TREE_OPERAND (exp, 2),
8500 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8503 end_cleanup_deferral ();
8514 /* Something needs to be initialized, but we didn't know
8515 where that thing was when building the tree. For example,
8516 it could be the return value of a function, or a parameter
8517 to a function which lays down in the stack, or a temporary
8518 variable which must be passed by reference.
8520 We guarantee that the expression will either be constructed
8521 or copied into our original target. */
8523 tree slot = TREE_OPERAND (exp, 0);
8524 tree cleanups = NULL_TREE;
8527 if (TREE_CODE (slot) != VAR_DECL)
8531 target = original_target;
8533 /* Set this here so that if we get a target that refers to a
8534 register variable that's already been used, put_reg_into_stack
8535 knows that it should fix up those uses. */
8536 TREE_USED (slot) = 1;
8540 if (DECL_RTL_SET_P (slot))
8542 target = DECL_RTL (slot);
8543 /* If we have already expanded the slot, so don't do
8545 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8550 target = assign_temp (type, 2, 0, 1);
8551 /* All temp slots at this level must not conflict. */
8552 preserve_temp_slots (target);
8553 SET_DECL_RTL (slot, target);
8554 if (TREE_ADDRESSABLE (slot))
8555 put_var_into_stack (slot);
8557 /* Since SLOT is not known to the called function
8558 to belong to its stack frame, we must build an explicit
8559 cleanup. This case occurs when we must build up a reference
8560 to pass the reference as an argument. In this case,
8561 it is very likely that such a reference need not be
8564 if (TREE_OPERAND (exp, 2) == 0)
8565 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8566 cleanups = TREE_OPERAND (exp, 2);
8571 /* This case does occur, when expanding a parameter which
8572 needs to be constructed on the stack. The target
8573 is the actual stack address that we want to initialize.
8574 The function we call will perform the cleanup in this case. */
8576 /* If we have already assigned it space, use that space,
8577 not target that we were passed in, as our target
8578 parameter is only a hint. */
8579 if (DECL_RTL_SET_P (slot))
8581 target = DECL_RTL (slot);
8582 /* If we have already expanded the slot, so don't do
8584 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8589 SET_DECL_RTL (slot, target);
8590 /* If we must have an addressable slot, then make sure that
8591 the RTL that we just stored in slot is OK. */
8592 if (TREE_ADDRESSABLE (slot))
8593 put_var_into_stack (slot);
8597 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8598 /* Mark it as expanded. */
8599 TREE_OPERAND (exp, 1) = NULL_TREE;
8601 store_expr (exp1, target, 0);
8603 expand_decl_cleanup (NULL_TREE, cleanups);
8610 tree lhs = TREE_OPERAND (exp, 0);
8611 tree rhs = TREE_OPERAND (exp, 1);
8613 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8619 /* If lhs is complex, expand calls in rhs before computing it.
8620 That's so we don't compute a pointer and save it over a
8621 call. If lhs is simple, compute it first so we can give it
8622 as a target if the rhs is just a call. This avoids an
8623 extra temp and copy and that prevents a partial-subsumption
8624 which makes bad code. Actually we could treat
8625 component_ref's of vars like vars. */
8627 tree lhs = TREE_OPERAND (exp, 0);
8628 tree rhs = TREE_OPERAND (exp, 1);
8632 /* Check for |= or &= of a bitfield of size one into another bitfield
8633 of size 1. In this case, (unless we need the result of the
8634 assignment) we can do this more efficiently with a
8635 test followed by an assignment, if necessary.
8637 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8638 things change so we do, this code should be enhanced to
8641 && TREE_CODE (lhs) == COMPONENT_REF
8642 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8643 || TREE_CODE (rhs) == BIT_AND_EXPR)
8644 && TREE_OPERAND (rhs, 0) == lhs
8645 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8646 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8647 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8649 rtx label = gen_label_rtx ();
8651 do_jump (TREE_OPERAND (rhs, 1),
8652 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8653 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8654 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8655 (TREE_CODE (rhs) == BIT_IOR_EXPR
8657 : integer_zero_node)),
8659 do_pending_stack_adjust ();
8664 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8670 if (!TREE_OPERAND (exp, 0))
8671 expand_null_return ();
8673 expand_return (TREE_OPERAND (exp, 0));
8676 case PREINCREMENT_EXPR:
8677 case PREDECREMENT_EXPR:
8678 return expand_increment (exp, 0, ignore);
8680 case POSTINCREMENT_EXPR:
8681 case POSTDECREMENT_EXPR:
8682 /* Faster to treat as pre-increment if result is not used. */
8683 return expand_increment (exp, ! ignore, ignore);
8686 /* Are we taking the address of a nested function? */
8687 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8688 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8689 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8690 && ! TREE_STATIC (exp))
8692 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8693 op0 = force_operand (op0, target);
8695 /* If we are taking the address of something erroneous, just
8697 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8699 /* If we are taking the address of a constant and are at the
8700 top level, we have to use output_constant_def since we can't
8701 call force_const_mem at top level. */
8703 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8704 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8706 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8709 /* We make sure to pass const0_rtx down if we came in with
8710 ignore set, to avoid doing the cleanups twice for something. */
8711 op0 = expand_expr (TREE_OPERAND (exp, 0),
8712 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8713 (modifier == EXPAND_INITIALIZER
8714 ? modifier : EXPAND_CONST_ADDRESS));
8716 /* If we are going to ignore the result, OP0 will have been set
8717 to const0_rtx, so just return it. Don't get confused and
8718 think we are taking the address of the constant. */
8722 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8723 clever and returns a REG when given a MEM. */
8724 op0 = protect_from_queue (op0, 1);
8726 /* We would like the object in memory. If it is a constant, we can
8727 have it be statically allocated into memory. For a non-constant,
8728 we need to allocate some memory and store the value into it. */
8730 if (CONSTANT_P (op0))
8731 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8733 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8734 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8735 || GET_CODE (op0) == PARALLEL)
8737 /* If this object is in a register, it must can't be BLKmode. */
8738 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8739 tree nt = build_qualified_type (inner_type,
8740 (TYPE_QUALS (inner_type)
8741 | TYPE_QUAL_CONST));
8742 rtx memloc = assign_temp (nt, 1, 1, 1);
8744 if (GET_CODE (op0) == PARALLEL)
8745 /* Handle calls that pass values in multiple non-contiguous
8746 locations. The Irix 6 ABI has examples of this. */
8747 emit_group_store (memloc, op0, int_size_in_bytes (inner_type));
8749 emit_move_insn (memloc, op0);
8754 if (GET_CODE (op0) != MEM)
8757 mark_temp_addr_taken (op0);
8758 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8760 op0 = XEXP (op0, 0);
8761 #ifdef POINTERS_EXTEND_UNSIGNED
8762 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8763 && mode == ptr_mode)
8764 op0 = convert_memory_address (ptr_mode, op0);
8769 /* If OP0 is not aligned as least as much as the type requires, we
8770 need to make a temporary, copy OP0 to it, and take the address of
8771 the temporary. We want to use the alignment of the type, not of
8772 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8773 the test for BLKmode means that can't happen. The test for
8774 BLKmode is because we never make mis-aligned MEMs with
8777 We don't need to do this at all if the machine doesn't have
8778 strict alignment. */
8779 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8780 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8782 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8784 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8786 = assign_stack_temp_for_type
8787 (TYPE_MODE (inner_type),
8788 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8789 : int_size_in_bytes (inner_type),
8790 1, build_qualified_type (inner_type,
8791 (TYPE_QUALS (inner_type)
8792 | TYPE_QUAL_CONST)));
8794 if (TYPE_ALIGN_OK (inner_type))
8797 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)));
8801 op0 = force_operand (XEXP (op0, 0), target);
8804 if (flag_force_addr && GET_CODE (op0) != REG)
8805 op0 = force_reg (Pmode, op0);
8807 if (GET_CODE (op0) == REG
8808 && ! REG_USERVAR_P (op0))
8809 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8811 #ifdef POINTERS_EXTEND_UNSIGNED
8812 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8813 && mode == ptr_mode)
8814 op0 = convert_memory_address (ptr_mode, op0);
8819 case ENTRY_VALUE_EXPR:
8822 /* COMPLEX type for Extended Pascal & Fortran */
8825 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8828 /* Get the rtx code of the operands. */
8829 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8830 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8833 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8837 /* Move the real (op0) and imaginary (op1) parts to their location. */
8838 emit_move_insn (gen_realpart (mode, target), op0);
8839 emit_move_insn (gen_imagpart (mode, target), op1);
8841 insns = get_insns ();
8844 /* Complex construction should appear as a single unit. */
8845 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8846 each with a separate pseudo as destination.
8847 It's not correct for flow to treat them as a unit. */
8848 if (GET_CODE (target) != CONCAT)
8849 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8857 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8858 return gen_realpart (mode, op0);
8861 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8862 return gen_imagpart (mode, op0);
8866 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8870 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8873 target = gen_reg_rtx (mode);
8877 /* Store the realpart and the negated imagpart to target. */
8878 emit_move_insn (gen_realpart (partmode, target),
8879 gen_realpart (partmode, op0));
8881 imag_t = gen_imagpart (partmode, target);
8882 temp = expand_unop (partmode,
8883 ! unsignedp && flag_trapv
8884 && (GET_MODE_CLASS(partmode) == MODE_INT)
8885 ? negv_optab : neg_optab,
8886 gen_imagpart (partmode, op0), imag_t, 0);
8888 emit_move_insn (imag_t, temp);
8890 insns = get_insns ();
8893 /* Conjugate should appear as a single unit
8894 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8895 each with a separate pseudo as destination.
8896 It's not correct for flow to treat them as a unit. */
8897 if (GET_CODE (target) != CONCAT)
8898 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8905 case TRY_CATCH_EXPR:
8907 tree handler = TREE_OPERAND (exp, 1);
8909 expand_eh_region_start ();
8911 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8913 expand_eh_region_end_cleanup (handler);
8918 case TRY_FINALLY_EXPR:
8920 tree try_block = TREE_OPERAND (exp, 0);
8921 tree finally_block = TREE_OPERAND (exp, 1);
8922 rtx finally_label = gen_label_rtx ();
8923 rtx done_label = gen_label_rtx ();
8924 rtx return_link = gen_reg_rtx (Pmode);
8925 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8926 (tree) finally_label, (tree) return_link);
8927 TREE_SIDE_EFFECTS (cleanup) = 1;
8929 /* Start a new binding layer that will keep track of all cleanup
8930 actions to be performed. */
8931 expand_start_bindings (2);
8933 target_temp_slot_level = temp_slot_level;
8935 expand_decl_cleanup (NULL_TREE, cleanup);
8936 op0 = expand_expr (try_block, target, tmode, modifier);
8938 preserve_temp_slots (op0);
8939 expand_end_bindings (NULL_TREE, 0, 0);
8940 emit_jump (done_label);
8941 emit_label (finally_label);
8942 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8943 emit_indirect_jump (return_link);
8944 emit_label (done_label);
8948 case GOTO_SUBROUTINE_EXPR:
8950 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8951 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8952 rtx return_address = gen_label_rtx ();
8953 emit_move_insn (return_link,
8954 gen_rtx_LABEL_REF (Pmode, return_address));
8956 emit_label (return_address);
8961 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8964 return get_exception_pointer (cfun);
8967 /* Function descriptors are not valid except for as
8968 initialization constants, and should not be expanded. */
8972 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8975 /* Here to do an ordinary binary operator, generating an instruction
8976 from the optab already placed in `this_optab'. */
8978 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8980 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8981 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8983 temp = expand_binop (mode, this_optab, op0, op1, target,
8984 unsignedp, OPTAB_LIB_WIDEN);
8990 /* Return the tree node if a ARG corresponds to a string constant or zero
8991 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8992 in bytes within the string that ARG is accessing. The type of the
8993 offset will be `sizetype'. */
8996 string_constant (arg, ptr_offset)
9002 if (TREE_CODE (arg) == ADDR_EXPR
9003 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9005 *ptr_offset = size_zero_node;
9006 return TREE_OPERAND (arg, 0);
9008 else if (TREE_CODE (arg) == PLUS_EXPR)
9010 tree arg0 = TREE_OPERAND (arg, 0);
9011 tree arg1 = TREE_OPERAND (arg, 1);
9016 if (TREE_CODE (arg0) == ADDR_EXPR
9017 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9019 *ptr_offset = convert (sizetype, arg1);
9020 return TREE_OPERAND (arg0, 0);
9022 else if (TREE_CODE (arg1) == ADDR_EXPR
9023 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9025 *ptr_offset = convert (sizetype, arg0);
9026 return TREE_OPERAND (arg1, 0);
9033 /* Expand code for a post- or pre- increment or decrement
9034 and return the RTX for the result.
9035 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9038 expand_increment (exp, post, ignore)
9044 tree incremented = TREE_OPERAND (exp, 0);
9045 optab this_optab = add_optab;
9047 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9048 int op0_is_copy = 0;
9049 int single_insn = 0;
9050 /* 1 means we can't store into OP0 directly,
9051 because it is a subreg narrower than a word,
9052 and we don't dare clobber the rest of the word. */
9055 /* Stabilize any component ref that might need to be
9056 evaluated more than once below. */
9058 || TREE_CODE (incremented) == BIT_FIELD_REF
9059 || (TREE_CODE (incremented) == COMPONENT_REF
9060 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9061 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9062 incremented = stabilize_reference (incremented);
9063 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9064 ones into save exprs so that they don't accidentally get evaluated
9065 more than once by the code below. */
9066 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9067 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9068 incremented = save_expr (incremented);
9070 /* Compute the operands as RTX.
9071 Note whether OP0 is the actual lvalue or a copy of it:
9072 I believe it is a copy iff it is a register or subreg
9073 and insns were generated in computing it. */
9075 temp = get_last_insn ();
9076 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9078 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9079 in place but instead must do sign- or zero-extension during assignment,
9080 so we copy it into a new register and let the code below use it as
9083 Note that we can safely modify this SUBREG since it is know not to be
9084 shared (it was made by the expand_expr call above). */
9086 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9089 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9093 else if (GET_CODE (op0) == SUBREG
9094 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9096 /* We cannot increment this SUBREG in place. If we are
9097 post-incrementing, get a copy of the old value. Otherwise,
9098 just mark that we cannot increment in place. */
9100 op0 = copy_to_reg (op0);
9105 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9106 && temp != get_last_insn ());
9107 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9108 EXPAND_MEMORY_USE_BAD);
9110 /* Decide whether incrementing or decrementing. */
9111 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9112 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9113 this_optab = sub_optab;
9115 /* Convert decrement by a constant into a negative increment. */
9116 if (this_optab == sub_optab
9117 && GET_CODE (op1) == CONST_INT)
9119 op1 = GEN_INT (-INTVAL (op1));
9120 this_optab = add_optab;
9123 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9124 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9126 /* For a preincrement, see if we can do this with a single instruction. */
9129 icode = (int) this_optab->handlers[(int) mode].insn_code;
9130 if (icode != (int) CODE_FOR_nothing
9131 /* Make sure that OP0 is valid for operands 0 and 1
9132 of the insn we want to queue. */
9133 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9134 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9135 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9139 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9140 then we cannot just increment OP0. We must therefore contrive to
9141 increment the original value. Then, for postincrement, we can return
9142 OP0 since it is a copy of the old value. For preincrement, expand here
9143 unless we can do it with a single insn.
9145 Likewise if storing directly into OP0 would clobber high bits
9146 we need to preserve (bad_subreg). */
9147 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9149 /* This is the easiest way to increment the value wherever it is.
9150 Problems with multiple evaluation of INCREMENTED are prevented
9151 because either (1) it is a component_ref or preincrement,
9152 in which case it was stabilized above, or (2) it is an array_ref
9153 with constant index in an array in a register, which is
9154 safe to reevaluate. */
9155 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9156 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9157 ? MINUS_EXPR : PLUS_EXPR),
9160 TREE_OPERAND (exp, 1));
9162 while (TREE_CODE (incremented) == NOP_EXPR
9163 || TREE_CODE (incremented) == CONVERT_EXPR)
9165 newexp = convert (TREE_TYPE (incremented), newexp);
9166 incremented = TREE_OPERAND (incremented, 0);
9169 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9170 return post ? op0 : temp;
9175 /* We have a true reference to the value in OP0.
9176 If there is an insn to add or subtract in this mode, queue it.
9177 Queueing the increment insn avoids the register shuffling
9178 that often results if we must increment now and first save
9179 the old value for subsequent use. */
9181 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9182 op0 = stabilize (op0);
9185 icode = (int) this_optab->handlers[(int) mode].insn_code;
9186 if (icode != (int) CODE_FOR_nothing
9187 /* Make sure that OP0 is valid for operands 0 and 1
9188 of the insn we want to queue. */
9189 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9190 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9192 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9193 op1 = force_reg (mode, op1);
9195 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9197 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9199 rtx addr = (general_operand (XEXP (op0, 0), mode)
9200 ? force_reg (Pmode, XEXP (op0, 0))
9201 : copy_to_reg (XEXP (op0, 0)));
9204 op0 = replace_equiv_address (op0, addr);
9205 temp = force_reg (GET_MODE (op0), op0);
9206 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9207 op1 = force_reg (mode, op1);
9209 /* The increment queue is LIFO, thus we have to `queue'
9210 the instructions in reverse order. */
9211 enqueue_insn (op0, gen_move_insn (op0, temp));
9212 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9217 /* Preincrement, or we can't increment with one simple insn. */
9219 /* Save a copy of the value before inc or dec, to return it later. */
9220 temp = value = copy_to_reg (op0);
9222 /* Arrange to return the incremented value. */
9223 /* Copy the rtx because expand_binop will protect from the queue,
9224 and the results of that would be invalid for us to return
9225 if our caller does emit_queue before using our result. */
9226 temp = copy_rtx (value = op0);
9228 /* Increment however we can. */
9229 op1 = expand_binop (mode, this_optab, value, op1,
9230 current_function_check_memory_usage ? NULL_RTX : op0,
9231 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9232 /* Make sure the value is stored into OP0. */
9234 emit_move_insn (op0, op1);
9239 /* At the start of a function, record that we have no previously-pushed
9240 arguments waiting to be popped. */
9243 init_pending_stack_adjust ()
9245 pending_stack_adjust = 0;
9248 /* When exiting from function, if safe, clear out any pending stack adjust
9249 so the adjustment won't get done.
9251 Note, if the current function calls alloca, then it must have a
9252 frame pointer regardless of the value of flag_omit_frame_pointer. */
9255 clear_pending_stack_adjust ()
9257 #ifdef EXIT_IGNORE_STACK
9259 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9260 && EXIT_IGNORE_STACK
9261 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9262 && ! flag_inline_functions)
9264 stack_pointer_delta -= pending_stack_adjust,
9265 pending_stack_adjust = 0;
9270 /* Pop any previously-pushed arguments that have not been popped yet. */
9273 do_pending_stack_adjust ()
9275 if (inhibit_defer_pop == 0)
9277 if (pending_stack_adjust != 0)
9278 adjust_stack (GEN_INT (pending_stack_adjust));
9279 pending_stack_adjust = 0;
9283 /* Expand conditional expressions. */
9285 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9286 LABEL is an rtx of code CODE_LABEL, in this function and all the
9290 jumpifnot (exp, label)
9294 do_jump (exp, label, NULL_RTX);
9297 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9304 do_jump (exp, NULL_RTX, label);
9307 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9308 the result is zero, or IF_TRUE_LABEL if the result is one.
9309 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9310 meaning fall through in that case.
9312 do_jump always does any pending stack adjust except when it does not
9313 actually perform a jump. An example where there is no jump
9314 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9316 This function is responsible for optimizing cases such as
9317 &&, || and comparison operators in EXP. */
9320 do_jump (exp, if_false_label, if_true_label)
9322 rtx if_false_label, if_true_label;
9324 enum tree_code code = TREE_CODE (exp);
9325 /* Some cases need to create a label to jump to
9326 in order to properly fall through.
9327 These cases set DROP_THROUGH_LABEL nonzero. */
9328 rtx drop_through_label = 0;
9332 enum machine_mode mode;
9334 #ifdef MAX_INTEGER_COMPUTATION_MODE
9335 check_max_integer_computation_mode (exp);
9346 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9352 /* This is not true with #pragma weak */
9354 /* The address of something can never be zero. */
9356 emit_jump (if_true_label);
9361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9362 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9363 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9364 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9367 /* If we are narrowing the operand, we have to do the compare in the
9369 if ((TYPE_PRECISION (TREE_TYPE (exp))
9370 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9372 case NON_LVALUE_EXPR:
9373 case REFERENCE_EXPR:
9378 /* These cannot change zero->non-zero or vice versa. */
9379 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9382 case WITH_RECORD_EXPR:
9383 /* Put the object on the placeholder list, recurse through our first
9384 operand, and pop the list. */
9385 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9387 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9388 placeholder_list = TREE_CHAIN (placeholder_list);
9392 /* This is never less insns than evaluating the PLUS_EXPR followed by
9393 a test and can be longer if the test is eliminated. */
9395 /* Reduce to minus. */
9396 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9397 TREE_OPERAND (exp, 0),
9398 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9399 TREE_OPERAND (exp, 1))));
9400 /* Process as MINUS. */
9404 /* Non-zero iff operands of minus differ. */
9405 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9406 TREE_OPERAND (exp, 0),
9407 TREE_OPERAND (exp, 1)),
9408 NE, NE, if_false_label, if_true_label);
9412 /* If we are AND'ing with a small constant, do this comparison in the
9413 smallest type that fits. If the machine doesn't have comparisons
9414 that small, it will be converted back to the wider comparison.
9415 This helps if we are testing the sign bit of a narrower object.
9416 combine can't do this for us because it can't know whether a
9417 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9419 if (! SLOW_BYTE_ACCESS
9420 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9421 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9422 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9423 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9424 && (type = type_for_mode (mode, 1)) != 0
9425 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9426 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9427 != CODE_FOR_nothing))
9429 do_jump (convert (type, exp), if_false_label, if_true_label);
9434 case TRUTH_NOT_EXPR:
9435 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9438 case TRUTH_ANDIF_EXPR:
9439 if (if_false_label == 0)
9440 if_false_label = drop_through_label = gen_label_rtx ();
9441 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9442 start_cleanup_deferral ();
9443 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9444 end_cleanup_deferral ();
9447 case TRUTH_ORIF_EXPR:
9448 if (if_true_label == 0)
9449 if_true_label = drop_through_label = gen_label_rtx ();
9450 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9451 start_cleanup_deferral ();
9452 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9453 end_cleanup_deferral ();
9458 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9459 preserve_temp_slots (NULL_RTX);
9463 do_pending_stack_adjust ();
9464 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9470 case ARRAY_RANGE_REF:
9472 HOST_WIDE_INT bitsize, bitpos;
9474 enum machine_mode mode;
9479 /* Get description of this reference. We don't actually care
9480 about the underlying object here. */
9481 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9482 &unsignedp, &volatilep);
9484 type = type_for_size (bitsize, unsignedp);
9485 if (! SLOW_BYTE_ACCESS
9486 && type != 0 && bitsize >= 0
9487 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9488 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9489 != CODE_FOR_nothing))
9491 do_jump (convert (type, exp), if_false_label, if_true_label);
9498 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9499 if (integer_onep (TREE_OPERAND (exp, 1))
9500 && integer_zerop (TREE_OPERAND (exp, 2)))
9501 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9503 else if (integer_zerop (TREE_OPERAND (exp, 1))
9504 && integer_onep (TREE_OPERAND (exp, 2)))
9505 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9509 rtx label1 = gen_label_rtx ();
9510 drop_through_label = gen_label_rtx ();
9512 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9514 start_cleanup_deferral ();
9515 /* Now the THEN-expression. */
9516 do_jump (TREE_OPERAND (exp, 1),
9517 if_false_label ? if_false_label : drop_through_label,
9518 if_true_label ? if_true_label : drop_through_label);
9519 /* In case the do_jump just above never jumps. */
9520 do_pending_stack_adjust ();
9521 emit_label (label1);
9523 /* Now the ELSE-expression. */
9524 do_jump (TREE_OPERAND (exp, 2),
9525 if_false_label ? if_false_label : drop_through_label,
9526 if_true_label ? if_true_label : drop_through_label);
9527 end_cleanup_deferral ();
9533 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9535 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9536 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9538 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9539 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9542 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9543 fold (build (EQ_EXPR, TREE_TYPE (exp),
9544 fold (build1 (REALPART_EXPR,
9545 TREE_TYPE (inner_type),
9547 fold (build1 (REALPART_EXPR,
9548 TREE_TYPE (inner_type),
9550 fold (build (EQ_EXPR, TREE_TYPE (exp),
9551 fold (build1 (IMAGPART_EXPR,
9552 TREE_TYPE (inner_type),
9554 fold (build1 (IMAGPART_EXPR,
9555 TREE_TYPE (inner_type),
9557 if_false_label, if_true_label);
9560 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9561 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9563 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9564 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9565 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9567 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9573 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9575 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9576 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9578 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9579 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9582 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9583 fold (build (NE_EXPR, TREE_TYPE (exp),
9584 fold (build1 (REALPART_EXPR,
9585 TREE_TYPE (inner_type),
9587 fold (build1 (REALPART_EXPR,
9588 TREE_TYPE (inner_type),
9590 fold (build (NE_EXPR, TREE_TYPE (exp),
9591 fold (build1 (IMAGPART_EXPR,
9592 TREE_TYPE (inner_type),
9594 fold (build1 (IMAGPART_EXPR,
9595 TREE_TYPE (inner_type),
9597 if_false_label, if_true_label);
9600 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9601 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9603 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9604 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9605 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9607 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9612 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9613 if (GET_MODE_CLASS (mode) == MODE_INT
9614 && ! can_compare_p (LT, mode, ccp_jump))
9615 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9617 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9621 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9622 if (GET_MODE_CLASS (mode) == MODE_INT
9623 && ! can_compare_p (LE, mode, ccp_jump))
9624 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9626 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9630 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9631 if (GET_MODE_CLASS (mode) == MODE_INT
9632 && ! can_compare_p (GT, mode, ccp_jump))
9633 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9635 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9639 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9640 if (GET_MODE_CLASS (mode) == MODE_INT
9641 && ! can_compare_p (GE, mode, ccp_jump))
9642 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9644 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9647 case UNORDERED_EXPR:
9650 enum rtx_code cmp, rcmp;
9653 if (code == UNORDERED_EXPR)
9654 cmp = UNORDERED, rcmp = ORDERED;
9656 cmp = ORDERED, rcmp = UNORDERED;
9657 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9660 if (! can_compare_p (cmp, mode, ccp_jump)
9661 && (can_compare_p (rcmp, mode, ccp_jump)
9662 /* If the target doesn't provide either UNORDERED or ORDERED
9663 comparisons, canonicalize on UNORDERED for the library. */
9664 || rcmp == UNORDERED))
9668 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9670 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9675 enum rtx_code rcode1;
9676 enum tree_code tcode2;
9700 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9701 if (can_compare_p (rcode1, mode, ccp_jump))
9702 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9706 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9707 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9710 /* If the target doesn't support combined unordered
9711 compares, decompose into UNORDERED + comparison. */
9712 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9713 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9714 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9715 do_jump (exp, if_false_label, if_true_label);
9721 __builtin_expect (<test>, 0) and
9722 __builtin_expect (<test>, 1)
9724 We need to do this here, so that <test> is not converted to a SCC
9725 operation on machines that use condition code registers and COMPARE
9726 like the PowerPC, and then the jump is done based on whether the SCC
9727 operation produced a 1 or 0. */
9729 /* Check for a built-in function. */
9730 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9732 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9733 tree arglist = TREE_OPERAND (exp, 1);
9735 if (TREE_CODE (fndecl) == FUNCTION_DECL
9736 && DECL_BUILT_IN (fndecl)
9737 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9738 && arglist != NULL_TREE
9739 && TREE_CHAIN (arglist) != NULL_TREE)
9741 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9744 if (seq != NULL_RTX)
9751 /* fall through and generate the normal code. */
9755 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9757 /* This is not needed any more and causes poor code since it causes
9758 comparisons and tests from non-SI objects to have different code
9760 /* Copy to register to avoid generating bad insns by cse
9761 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9762 if (!cse_not_expected && GET_CODE (temp) == MEM)
9763 temp = copy_to_reg (temp);
9765 do_pending_stack_adjust ();
9766 /* Do any postincrements in the expression that was tested. */
9769 if (GET_CODE (temp) == CONST_INT
9770 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9771 || GET_CODE (temp) == LABEL_REF)
9773 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9777 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9778 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9779 /* Note swapping the labels gives us not-equal. */
9780 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9781 else if (GET_MODE (temp) != VOIDmode)
9782 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9783 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9784 GET_MODE (temp), NULL_RTX,
9785 if_false_label, if_true_label);
9790 if (drop_through_label)
9792 /* If do_jump produces code that might be jumped around,
9793 do any stack adjusts from that code, before the place
9794 where control merges in. */
9795 do_pending_stack_adjust ();
9796 emit_label (drop_through_label);
9800 /* Given a comparison expression EXP for values too wide to be compared
9801 with one insn, test the comparison and jump to the appropriate label.
9802 The code of EXP is ignored; we always test GT if SWAP is 0,
9803 and LT if SWAP is 1. */
9806 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9809 rtx if_false_label, if_true_label;
9811 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9812 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9813 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9814 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9816 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9819 /* Compare OP0 with OP1, word at a time, in mode MODE.
9820 UNSIGNEDP says to do unsigned comparison.
9821 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9824 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9825 enum machine_mode mode;
9828 rtx if_false_label, if_true_label;
9830 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9831 rtx drop_through_label = 0;
9834 if (! if_true_label || ! if_false_label)
9835 drop_through_label = gen_label_rtx ();
9836 if (! if_true_label)
9837 if_true_label = drop_through_label;
9838 if (! if_false_label)
9839 if_false_label = drop_through_label;
9841 /* Compare a word at a time, high order first. */
9842 for (i = 0; i < nwords; i++)
9844 rtx op0_word, op1_word;
9846 if (WORDS_BIG_ENDIAN)
9848 op0_word = operand_subword_force (op0, i, mode);
9849 op1_word = operand_subword_force (op1, i, mode);
9853 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9854 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9857 /* All but high-order word must be compared as unsigned. */
9858 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9859 (unsignedp || i > 0), word_mode, NULL_RTX,
9860 NULL_RTX, if_true_label);
9862 /* Consider lower words only if these are equal. */
9863 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9864 NULL_RTX, NULL_RTX, if_false_label);
9868 emit_jump (if_false_label);
9869 if (drop_through_label)
9870 emit_label (drop_through_label);
9873 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9874 with one insn, test the comparison and jump to the appropriate label. */
9877 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9879 rtx if_false_label, if_true_label;
9881 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9882 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9883 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9884 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9886 rtx drop_through_label = 0;
9888 if (! if_false_label)
9889 drop_through_label = if_false_label = gen_label_rtx ();
9891 for (i = 0; i < nwords; i++)
9892 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9893 operand_subword_force (op1, i, mode),
9894 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9895 word_mode, NULL_RTX, if_false_label, NULL_RTX);
9898 emit_jump (if_true_label);
9899 if (drop_through_label)
9900 emit_label (drop_through_label);
9903 /* Jump according to whether OP0 is 0.
9904 We assume that OP0 has an integer mode that is too wide
9905 for the available compare insns. */
9908 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9910 rtx if_false_label, if_true_label;
9912 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9915 rtx drop_through_label = 0;
9917 /* The fastest way of doing this comparison on almost any machine is to
9918 "or" all the words and compare the result. If all have to be loaded
9919 from memory and this is a very wide item, it's possible this may
9920 be slower, but that's highly unlikely. */
9922 part = gen_reg_rtx (word_mode);
9923 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9924 for (i = 1; i < nwords && part != 0; i++)
9925 part = expand_binop (word_mode, ior_optab, part,
9926 operand_subword_force (op0, i, GET_MODE (op0)),
9927 part, 1, OPTAB_WIDEN);
9931 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9932 NULL_RTX, if_false_label, if_true_label);
9937 /* If we couldn't do the "or" simply, do this with a series of compares. */
9938 if (! if_false_label)
9939 drop_through_label = if_false_label = gen_label_rtx ();
9941 for (i = 0; i < nwords; i++)
9942 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9943 const0_rtx, EQ, 1, word_mode, NULL_RTX,
9944 if_false_label, NULL_RTX);
9947 emit_jump (if_true_label);
9949 if (drop_through_label)
9950 emit_label (drop_through_label);
9953 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
9954 (including code to compute the values to be compared)
9955 and set (CC0) according to the result.
9956 The decision as to signed or unsigned comparison must be made by the caller.
9958 We force a stack adjustment unless there are currently
9959 things pushed on the stack that aren't yet used.
9961 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9965 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
9969 enum machine_mode mode;
9974 /* If one operand is constant, make it the second one. Only do this
9975 if the other operand is not constant as well. */
9977 if (swap_commutative_operands_p (op0, op1))
9982 code = swap_condition (code);
9987 op0 = force_not_mem (op0);
9988 op1 = force_not_mem (op1);
9991 do_pending_stack_adjust ();
9993 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9994 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9998 /* There's no need to do this now that combine.c can eliminate lots of
9999 sign extensions. This can be less efficient in certain cases on other
10002 /* If this is a signed equality comparison, we can do it as an
10003 unsigned comparison since zero-extension is cheaper than sign
10004 extension and comparisons with zero are done as unsigned. This is
10005 the case even on machines that can do fast sign extension, since
10006 zero-extension is easier to combine with other operations than
10007 sign-extension is. If we are comparing against a constant, we must
10008 convert it to what it would look like unsigned. */
10009 if ((code == EQ || code == NE) && ! unsignedp
10010 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10012 if (GET_CODE (op1) == CONST_INT
10013 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10014 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10019 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10021 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10024 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10025 The decision as to signed or unsigned comparison must be made by the caller.
10027 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10031 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10032 if_false_label, if_true_label)
10034 enum rtx_code code;
10036 enum machine_mode mode;
10038 rtx if_false_label, if_true_label;
10041 int dummy_true_label = 0;
10043 /* Reverse the comparison if that is safe and we want to jump if it is
10045 if (! if_true_label && ! FLOAT_MODE_P (mode))
10047 if_true_label = if_false_label;
10048 if_false_label = 0;
10049 code = reverse_condition (code);
10052 /* If one operand is constant, make it the second one. Only do this
10053 if the other operand is not constant as well. */
10055 if (swap_commutative_operands_p (op0, op1))
10060 code = swap_condition (code);
10063 if (flag_force_mem)
10065 op0 = force_not_mem (op0);
10066 op1 = force_not_mem (op1);
10069 do_pending_stack_adjust ();
10071 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10072 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10074 if (tem == const_true_rtx)
10077 emit_jump (if_true_label);
10081 if (if_false_label)
10082 emit_jump (if_false_label);
10088 /* There's no need to do this now that combine.c can eliminate lots of
10089 sign extensions. This can be less efficient in certain cases on other
10092 /* If this is a signed equality comparison, we can do it as an
10093 unsigned comparison since zero-extension is cheaper than sign
10094 extension and comparisons with zero are done as unsigned. This is
10095 the case even on machines that can do fast sign extension, since
10096 zero-extension is easier to combine with other operations than
10097 sign-extension is. If we are comparing against a constant, we must
10098 convert it to what it would look like unsigned. */
10099 if ((code == EQ || code == NE) && ! unsignedp
10100 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10102 if (GET_CODE (op1) == CONST_INT
10103 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10104 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10109 if (! if_true_label)
10111 dummy_true_label = 1;
10112 if_true_label = gen_label_rtx ();
10115 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10118 if (if_false_label)
10119 emit_jump (if_false_label);
10120 if (dummy_true_label)
10121 emit_label (if_true_label);
10124 /* Generate code for a comparison expression EXP (including code to compute
10125 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10126 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10127 generated code will drop through.
10128 SIGNED_CODE should be the rtx operation for this comparison for
10129 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10131 We force a stack adjustment unless there are currently
10132 things pushed on the stack that aren't yet used. */
10135 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10138 enum rtx_code signed_code, unsigned_code;
10139 rtx if_false_label, if_true_label;
10143 enum machine_mode mode;
10145 enum rtx_code code;
10147 /* Don't crash if the comparison was erroneous. */
10148 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10149 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10152 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10153 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10156 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10157 mode = TYPE_MODE (type);
10158 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10159 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10160 || (GET_MODE_BITSIZE (mode)
10161 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10164 /* op0 might have been replaced by promoted constant, in which
10165 case the type of second argument should be used. */
10166 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10167 mode = TYPE_MODE (type);
10169 unsignedp = TREE_UNSIGNED (type);
10170 code = unsignedp ? unsigned_code : signed_code;
10172 #ifdef HAVE_canonicalize_funcptr_for_compare
10173 /* If function pointers need to be "canonicalized" before they can
10174 be reliably compared, then canonicalize them. */
10175 if (HAVE_canonicalize_funcptr_for_compare
10176 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10177 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10180 rtx new_op0 = gen_reg_rtx (mode);
10182 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10186 if (HAVE_canonicalize_funcptr_for_compare
10187 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10188 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10191 rtx new_op1 = gen_reg_rtx (mode);
10193 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10198 /* Do any postincrements in the expression that was tested. */
10201 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10203 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10204 if_false_label, if_true_label);
10207 /* Generate code to calculate EXP using a store-flag instruction
10208 and return an rtx for the result. EXP is either a comparison
10209 or a TRUTH_NOT_EXPR whose operand is a comparison.
10211 If TARGET is nonzero, store the result there if convenient.
10213 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10216 Return zero if there is no suitable set-flag instruction
10217 available on this machine.
10219 Once expand_expr has been called on the arguments of the comparison,
10220 we are committed to doing the store flag, since it is not safe to
10221 re-evaluate the expression. We emit the store-flag insn by calling
10222 emit_store_flag, but only expand the arguments if we have a reason
10223 to believe that emit_store_flag will be successful. If we think that
10224 it will, but it isn't, we have to simulate the store-flag with a
10225 set/jump/set sequence. */
10228 do_store_flag (exp, target, mode, only_cheap)
10231 enum machine_mode mode;
10234 enum rtx_code code;
10235 tree arg0, arg1, type;
10237 enum machine_mode operand_mode;
10241 enum insn_code icode;
10242 rtx subtarget = target;
10245 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10246 result at the end. We can't simply invert the test since it would
10247 have already been inverted if it were valid. This case occurs for
10248 some floating-point comparisons. */
10250 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10251 invert = 1, exp = TREE_OPERAND (exp, 0);
10253 arg0 = TREE_OPERAND (exp, 0);
10254 arg1 = TREE_OPERAND (exp, 1);
10256 /* Don't crash if the comparison was erroneous. */
10257 if (arg0 == error_mark_node || arg1 == error_mark_node)
10260 type = TREE_TYPE (arg0);
10261 operand_mode = TYPE_MODE (type);
10262 unsignedp = TREE_UNSIGNED (type);
10264 /* We won't bother with BLKmode store-flag operations because it would mean
10265 passing a lot of information to emit_store_flag. */
10266 if (operand_mode == BLKmode)
10269 /* We won't bother with store-flag operations involving function pointers
10270 when function pointers must be canonicalized before comparisons. */
10271 #ifdef HAVE_canonicalize_funcptr_for_compare
10272 if (HAVE_canonicalize_funcptr_for_compare
10273 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10274 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10276 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10277 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10278 == FUNCTION_TYPE))))
10285 /* Get the rtx comparison code to use. We know that EXP is a comparison
10286 operation of some type. Some comparisons against 1 and -1 can be
10287 converted to comparisons with zero. Do so here so that the tests
10288 below will be aware that we have a comparison with zero. These
10289 tests will not catch constants in the first operand, but constants
10290 are rarely passed as the first operand. */
10292 switch (TREE_CODE (exp))
10301 if (integer_onep (arg1))
10302 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10304 code = unsignedp ? LTU : LT;
10307 if (! unsignedp && integer_all_onesp (arg1))
10308 arg1 = integer_zero_node, code = LT;
10310 code = unsignedp ? LEU : LE;
10313 if (! unsignedp && integer_all_onesp (arg1))
10314 arg1 = integer_zero_node, code = GE;
10316 code = unsignedp ? GTU : GT;
10319 if (integer_onep (arg1))
10320 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10322 code = unsignedp ? GEU : GE;
10325 case UNORDERED_EXPR:
10351 /* Put a constant second. */
10352 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10354 tem = arg0; arg0 = arg1; arg1 = tem;
10355 code = swap_condition (code);
10358 /* If this is an equality or inequality test of a single bit, we can
10359 do this by shifting the bit being tested to the low-order bit and
10360 masking the result with the constant 1. If the condition was EQ,
10361 we xor it with 1. This does not require an scc insn and is faster
10362 than an scc insn even if we have it. */
10364 if ((code == NE || code == EQ)
10365 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10366 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10368 tree inner = TREE_OPERAND (arg0, 0);
10369 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10372 /* If INNER is a right shift of a constant and it plus BITNUM does
10373 not overflow, adjust BITNUM and INNER. */
10375 if (TREE_CODE (inner) == RSHIFT_EXPR
10376 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10377 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10378 && bitnum < TYPE_PRECISION (type)
10379 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10380 bitnum - TYPE_PRECISION (type)))
10382 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10383 inner = TREE_OPERAND (inner, 0);
10386 /* If we are going to be able to omit the AND below, we must do our
10387 operations as unsigned. If we must use the AND, we have a choice.
10388 Normally unsigned is faster, but for some machines signed is. */
10389 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10390 #ifdef LOAD_EXTEND_OP
10391 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10397 if (! get_subtarget (subtarget)
10398 || GET_MODE (subtarget) != operand_mode
10399 || ! safe_from_p (subtarget, inner, 1))
10402 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10405 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10406 size_int (bitnum), subtarget, ops_unsignedp);
10408 if (GET_MODE (op0) != mode)
10409 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10411 if ((code == EQ && ! invert) || (code == NE && invert))
10412 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10413 ops_unsignedp, OPTAB_LIB_WIDEN);
10415 /* Put the AND last so it can combine with more things. */
10416 if (bitnum != TYPE_PRECISION (type) - 1)
10417 op0 = expand_and (op0, const1_rtx, subtarget);
10422 /* Now see if we are likely to be able to do this. Return if not. */
10423 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10426 icode = setcc_gen_code[(int) code];
10427 if (icode == CODE_FOR_nothing
10428 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10430 /* We can only do this if it is one of the special cases that
10431 can be handled without an scc insn. */
10432 if ((code == LT && integer_zerop (arg1))
10433 || (! only_cheap && code == GE && integer_zerop (arg1)))
10435 else if (BRANCH_COST >= 0
10436 && ! only_cheap && (code == NE || code == EQ)
10437 && TREE_CODE (type) != REAL_TYPE
10438 && ((abs_optab->handlers[(int) operand_mode].insn_code
10439 != CODE_FOR_nothing)
10440 || (ffs_optab->handlers[(int) operand_mode].insn_code
10441 != CODE_FOR_nothing)))
10447 if (! get_subtarget (target)
10448 || GET_MODE (subtarget) != operand_mode
10449 || ! safe_from_p (subtarget, arg1, 1))
10452 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10453 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10456 target = gen_reg_rtx (mode);
10458 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10459 because, if the emit_store_flag does anything it will succeed and
10460 OP0 and OP1 will not be used subsequently. */
10462 result = emit_store_flag (target, code,
10463 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10464 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10465 operand_mode, unsignedp, 1);
10470 result = expand_binop (mode, xor_optab, result, const1_rtx,
10471 result, 0, OPTAB_LIB_WIDEN);
10475 /* If this failed, we have to do this with set/compare/jump/set code. */
10476 if (GET_CODE (target) != REG
10477 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10478 target = gen_reg_rtx (GET_MODE (target));
10480 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10481 result = compare_from_rtx (op0, op1, code, unsignedp,
10482 operand_mode, NULL_RTX);
10483 if (GET_CODE (result) == CONST_INT)
10484 return (((result == const0_rtx && ! invert)
10485 || (result != const0_rtx && invert))
10486 ? const0_rtx : const1_rtx);
10488 label = gen_label_rtx ();
10489 if (bcc_gen_fctn[(int) code] == 0)
10492 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10493 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10494 emit_label (label);
10500 /* Stubs in case we haven't got a casesi insn. */
10501 #ifndef HAVE_casesi
10502 # define HAVE_casesi 0
10503 # define gen_casesi(a, b, c, d, e) (0)
10504 # define CODE_FOR_casesi CODE_FOR_nothing
10507 /* If the machine does not have a case insn that compares the bounds,
10508 this means extra overhead for dispatch tables, which raises the
10509 threshold for using them. */
10510 #ifndef CASE_VALUES_THRESHOLD
10511 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10512 #endif /* CASE_VALUES_THRESHOLD */
10515 case_values_threshold ()
10517 return CASE_VALUES_THRESHOLD;
10520 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10521 0 otherwise (i.e. if there is no casesi instruction). */
10523 try_casesi (index_type, index_expr, minval, range,
10524 table_label, default_label)
10525 tree index_type, index_expr, minval, range;
10526 rtx table_label ATTRIBUTE_UNUSED;
10529 enum machine_mode index_mode = SImode;
10530 int index_bits = GET_MODE_BITSIZE (index_mode);
10531 rtx op1, op2, index;
10532 enum machine_mode op_mode;
10537 /* Convert the index to SImode. */
10538 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10540 enum machine_mode omode = TYPE_MODE (index_type);
10541 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10543 /* We must handle the endpoints in the original mode. */
10544 index_expr = build (MINUS_EXPR, index_type,
10545 index_expr, minval);
10546 minval = integer_zero_node;
10547 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10548 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10549 omode, 1, default_label);
10550 /* Now we can safely truncate. */
10551 index = convert_to_mode (index_mode, index, 0);
10555 if (TYPE_MODE (index_type) != index_mode)
10557 index_expr = convert (type_for_size (index_bits, 0),
10559 index_type = TREE_TYPE (index_expr);
10562 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10565 index = protect_from_queue (index, 0);
10566 do_pending_stack_adjust ();
10568 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10569 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10571 index = copy_to_mode_reg (op_mode, index);
10573 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10575 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10576 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10577 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10578 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10580 op1 = copy_to_mode_reg (op_mode, op1);
10582 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10584 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10585 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10586 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10587 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10589 op2 = copy_to_mode_reg (op_mode, op2);
10591 emit_jump_insn (gen_casesi (index, op1, op2,
10592 table_label, default_label));
10596 /* Attempt to generate a tablejump instruction; same concept. */
10597 #ifndef HAVE_tablejump
10598 #define HAVE_tablejump 0
10599 #define gen_tablejump(x, y) (0)
10602 /* Subroutine of the next function.
10604 INDEX is the value being switched on, with the lowest value
10605 in the table already subtracted.
10606 MODE is its expected mode (needed if INDEX is constant).
10607 RANGE is the length of the jump table.
10608 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10610 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10611 index value is out of range. */
10614 do_tablejump (index, mode, range, table_label, default_label)
10615 rtx index, range, table_label, default_label;
10616 enum machine_mode mode;
10620 /* Do an unsigned comparison (in the proper mode) between the index
10621 expression and the value which represents the length of the range.
10622 Since we just finished subtracting the lower bound of the range
10623 from the index expression, this comparison allows us to simultaneously
10624 check that the original index expression value is both greater than
10625 or equal to the minimum value of the range and less than or equal to
10626 the maximum value of the range. */
10628 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10631 /* If index is in range, it must fit in Pmode.
10632 Convert to Pmode so we can index with it. */
10634 index = convert_to_mode (Pmode, index, 1);
10636 /* Don't let a MEM slip thru, because then INDEX that comes
10637 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10638 and break_out_memory_refs will go to work on it and mess it up. */
10639 #ifdef PIC_CASE_VECTOR_ADDRESS
10640 if (flag_pic && GET_CODE (index) != REG)
10641 index = copy_to_mode_reg (Pmode, index);
10644 /* If flag_force_addr were to affect this address
10645 it could interfere with the tricky assumptions made
10646 about addresses that contain label-refs,
10647 which may be valid only very near the tablejump itself. */
10648 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10649 GET_MODE_SIZE, because this indicates how large insns are. The other
10650 uses should all be Pmode, because they are addresses. This code
10651 could fail if addresses and insns are not the same size. */
10652 index = gen_rtx_PLUS (Pmode,
10653 gen_rtx_MULT (Pmode, index,
10654 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10655 gen_rtx_LABEL_REF (Pmode, table_label));
10656 #ifdef PIC_CASE_VECTOR_ADDRESS
10658 index = PIC_CASE_VECTOR_ADDRESS (index);
10661 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10662 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10663 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10664 RTX_UNCHANGING_P (vector) = 1;
10665 convert_move (temp, vector, 0);
10667 emit_jump_insn (gen_tablejump (temp, table_label));
10669 /* If we are generating PIC code or if the table is PC-relative, the
10670 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10671 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10676 try_tablejump (index_type, index_expr, minval, range,
10677 table_label, default_label)
10678 tree index_type, index_expr, minval, range;
10679 rtx table_label, default_label;
10683 if (! HAVE_tablejump)
10686 index_expr = fold (build (MINUS_EXPR, index_type,
10687 convert (index_type, index_expr),
10688 convert (index_type, minval)));
10689 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10691 index = protect_from_queue (index, 0);
10692 do_pending_stack_adjust ();
10694 do_tablejump (index, TYPE_MODE (index_type),
10695 convert_modes (TYPE_MODE (index_type),
10696 TYPE_MODE (TREE_TYPE (range)),
10697 expand_expr (range, NULL_RTX,
10699 TREE_UNSIGNED (TREE_TYPE (range))),
10700 table_label, default_label);