1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
224 enum machine_mode mode;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
237 pat = PATTERN (insn);
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
257 if (! HARD_REGNO_MODE_OK (regno, mode))
260 reg = gen_rtx_REG (mode, regno);
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
287 /* This is run at the start of compiling a function. */
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
295 pending_stack_adjust = 0;
296 stack_pointer_delta = 0;
297 inhibit_defer_pop = 0;
299 apply_args_value = 0;
305 struct expr_status *p;
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function ()
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (var, body)
346 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
347 body, pending_chain);
348 return pending_chain;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (x, modify)
371 register RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
456 register enum rtx_code code = GET_CODE (x);
462 return queued_subexp_p (XEXP (x, 0));
466 return (queued_subexp_p (XEXP (x, 0))
467 || queued_subexp_p (XEXP (x, 1)));
473 /* Perform all the pending incrementations. */
479 while ((p = pending_chain))
481 rtx body = QUEUED_BODY (p);
483 if (GET_CODE (body) == SEQUENCE)
485 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
486 emit_insn (QUEUED_BODY (p));
489 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
490 pending_chain = QUEUED_NEXT (p);
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
500 convert_move (to, from, unsignedp)
501 register rtx to, from;
504 enum machine_mode to_mode = GET_MODE (to);
505 enum machine_mode from_mode = GET_MODE (from);
506 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
507 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
514 to = protect_from_queue (to, 1);
515 from = protect_from_queue (from, 0);
517 if (to_real != from_real)
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
524 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
526 >= GET_MODE_SIZE (to_mode))
527 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
528 from = gen_lowpart (to_mode, from), from_mode = to_mode;
530 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
533 if (to_mode == from_mode
534 || (from_mode == VOIDmode && CONSTANT_P (from)))
536 emit_move_insn (to, from);
540 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
542 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
545 if (VECTOR_MODE_P (to_mode))
546 from = gen_rtx_SUBREG (to_mode, from, 0);
548 to = gen_rtx_SUBREG (from_mode, to, 0);
550 emit_move_insn (to, from);
554 if (to_real != from_real)
561 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
563 /* Try converting directly if the insn is supported. */
564 if ((code = can_extend_p (to_mode, from_mode, 0))
567 emit_unop_insn (code, to, from, UNKNOWN);
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
646 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
675 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
697 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
711 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
723 libcall = extendsfdf2_libfunc;
727 libcall = extendsfxf2_libfunc;
731 libcall = extendsftf2_libfunc;
743 libcall = truncdfsf2_libfunc;
747 libcall = extenddfxf2_libfunc;
751 libcall = extenddftf2_libfunc;
763 libcall = truncxfsf2_libfunc;
767 libcall = truncxfdf2_libfunc;
779 libcall = trunctfsf2_libfunc;
783 libcall = trunctfdf2_libfunc;
795 if (libcall == (rtx) 0)
796 /* This conversion is not implemented yet. */
800 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 insns = get_insns ();
804 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
809 /* Now both modes are integers. */
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
833 emit_unop_insn (code, to, from, equiv_code);
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
841 if (GET_CODE (to) == REG)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
849 /* No special multiword conversion insn; do it by hand. */
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
862 lowpart_mode = from_mode;
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
869 /* Compute the value to put in each remaining word. */
871 fill_value = const0_rtx;
876 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
877 && STORE_FLAG_VALUE == -1)
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
908 insns = get_insns ();
911 emit_no_conflict_block (insns, to, from, NULL_RTX,
912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
927 convert_move (to, gen_lowpart (word_mode, from), 0);
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode == PQImode)
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
943 #endif /* HAVE_truncqipqi2 */
947 if (from_mode == PQImode)
949 if (to_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
962 #endif /* HAVE_extendpqiqi2 */
967 if (to_mode == PSImode)
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 #endif /* HAVE_truncsipsi2 */
982 if (from_mode == PSImode)
984 if (to_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp && HAVE_extendpsisi2)
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp && HAVE_zero_extendpsisi2)
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1004 #endif /* HAVE_zero_extendpsisi2 */
1009 if (to_mode == PDImode)
1011 if (from_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2)
1017 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1020 #endif /* HAVE_truncdipdi2 */
1024 if (from_mode == PDImode)
1026 if (to_mode != DImode)
1028 from = convert_to_mode (DImode, from, unsignedp);
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2)
1036 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1039 #endif /* HAVE_extendpdidi2 */
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (from_mode)))
1052 if (!((GET_CODE (from) == MEM
1053 && ! MEM_VOLATILE_P (from)
1054 && direct_load[(int) to_mode]
1055 && ! mode_dependent_address_p (XEXP (from, 0)))
1056 || GET_CODE (from) == REG
1057 || GET_CODE (from) == SUBREG))
1058 from = force_reg (from_mode, from);
1059 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1061 from = copy_to_reg (from);
1062 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1069 /* Convert directly if that works. */
1070 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1071 != CODE_FOR_nothing)
1073 emit_unop_insn (code, to, from, equiv_code);
1078 enum machine_mode intermediate;
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1089 GET_MODE_BITSIZE (intermediate))))
1090 && (can_extend_p (intermediate, from_mode, unsignedp)
1091 != CODE_FOR_nothing))
1093 convert_move (to, convert_to_mode (intermediate, from,
1094 unsignedp), unsignedp);
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1101 - GET_MODE_BITSIZE (from_mode), 0);
1102 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1103 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1108 emit_move_insn (to, tmp);
1113 /* Support special truncate insns for certain modes. */
1115 if (from_mode == DImode && to_mode == SImode)
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2)
1120 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 if (from_mode == DImode && to_mode == HImode)
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2)
1133 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 if (from_mode == DImode && to_mode == QImode)
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2)
1146 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 if (from_mode == SImode && to_mode == HImode)
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2)
1159 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == SImode && to_mode == QImode)
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2)
1172 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 if (from_mode == HImode && to_mode == QImode)
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2)
1185 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == TImode && to_mode == DImode)
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2)
1198 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == TImode && to_mode == SImode)
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2)
1211 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == TImode && to_mode == HImode)
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2)
1224 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 if (from_mode == TImode && to_mode == QImode)
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2)
1237 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1250 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1251 emit_move_insn (to, temp);
1255 /* Mode combination is not recognized. */
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1270 convert_to_mode (mode, x, unsignedp)
1271 enum machine_mode mode;
1275 return convert_modes (mode, VOIDmode, x, unsignedp);
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1292 convert_modes (mode, oldmode, x, unsignedp)
1293 enum machine_mode mode, oldmode;
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1302 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1305 x = gen_lowpart (mode, x);
1307 if (GET_MODE (x) != VOIDmode)
1308 oldmode = GET_MODE (x);
1310 if (mode == oldmode)
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1319 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1320 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1323 HOST_WIDE_INT val = INTVAL (x);
1325 if (oldmode != VOIDmode
1326 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1328 int width = GET_MODE_BITSIZE (oldmode);
1330 /* We need to zero extend VAL. */
1331 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1334 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342 if ((GET_CODE (x) == CONST_INT
1343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1344 || (GET_MODE_CLASS (mode) == MODE_INT
1345 && GET_MODE_CLASS (oldmode) == MODE_INT
1346 && (GET_CODE (x) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1348 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1349 && direct_load[(int) mode])
1350 || (GET_CODE (x) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1352 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1358 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1360 HOST_WIDE_INT val = INTVAL (x);
1361 int width = GET_MODE_BITSIZE (oldmode);
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1368 val |= (HOST_WIDE_INT) (-1) << width;
1370 return GEN_INT (trunc_int_for_mode (val, mode));
1373 return gen_lowpart (mode, x);
1376 temp = gen_reg_rtx (mode);
1377 convert_move (temp, x, unsignedp);
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1399 ALIGN is maximum alignment we can assume. */
1402 move_by_pieces (to, from, len, align)
1404 unsigned HOST_WIDE_INT len;
1407 struct move_by_pieces data;
1408 rtx to_addr, from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1414 data.from_addr = from_addr;
1417 to_addr = XEXP (to, 0);
1420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1430 #ifdef STACK_GROWS_DOWNWARD
1436 data.to_addr = to_addr;
1439 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1440 || GET_CODE (from_addr) == POST_INC
1441 || GET_CODE (from_addr) == POST_DEC);
1443 data.explicit_inc_from = 0;
1444 data.explicit_inc_to = 0;
1445 if (data.reverse) data.offset = len;
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data.autinc_from && data.autinc_to)
1452 && move_by_pieces_ninsns (len, align) > 2)
1454 /* Find the mode of the largest move... */
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1460 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = -1;
1466 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 data.autinc_from = 1;
1470 data.explicit_inc_from = 1;
1472 if (!data.autinc_from && CONSTANT_P (from_addr))
1473 data.from_addr = copy_addr_to_reg (from_addr);
1474 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.explicit_inc_to = -1;
1480 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1482 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.explicit_inc_to = 1;
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1491 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1492 align = MOVE_MAX * BITS_PER_UNIT;
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size > 1)
1499 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1500 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1501 if (GET_MODE_SIZE (tmode) < max_size)
1504 if (mode == VOIDmode)
1507 icode = mov_optab->handlers[(int) mode].insn_code;
1508 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1509 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1511 max_size = GET_MODE_SIZE (mode);
1514 /* The code above should have handled everything. */
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l, align)
1524 unsigned HOST_WIDE_INT l;
1527 unsigned HOST_WIDE_INT n_insns = 0;
1528 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1531 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1532 align = MOVE_MAX * BITS_PER_UNIT;
1534 while (max_size > 1)
1536 enum machine_mode mode = VOIDmode, tmode;
1537 enum insn_code icode;
1539 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1540 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1541 if (GET_MODE_SIZE (tmode) < max_size)
1544 if (mode == VOIDmode)
1547 icode = mov_optab->handlers[(int) mode].insn_code;
1548 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551 max_size = GET_MODE_SIZE (mode);
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1564 move_by_pieces_1 (genfun, mode, data)
1565 rtx (*genfun) PARAMS ((rtx, ...));
1566 enum machine_mode mode;
1567 struct move_by_pieces *data;
1569 unsigned int size = GET_MODE_SIZE (mode);
1570 rtx to1 = NULL_RTX, from1;
1572 while (data->len >= size)
1575 data->offset -= size;
1579 if (data->autinc_to)
1581 to1 = replace_equiv_address (data->to, data->to_addr);
1582 to1 = adjust_address (to1, mode, 0);
1585 to1 = adjust_address (data->to, mode, data->offset);
1588 if (data->autinc_from)
1590 from1 = replace_equiv_address (data->from, data->from_addr);
1591 from1 = adjust_address (from1, mode, 0);
1594 from1 = adjust_address (data->from, mode, data->offset);
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1602 emit_insn ((*genfun) (to1, from1));
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode, from1, NULL);
1612 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1613 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1615 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1617 if (! data->reverse)
1618 data->offset += size;
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1633 Return the address of the new block, if memcpy is called and returns it,
1637 emit_block_move (x, y, size, align)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1648 if (GET_MODE (x) != BLKmode)
1651 if (GET_MODE (y) != BLKmode)
1654 x = protect_from_queue (x, 1);
1655 y = protect_from_queue (y, 0);
1656 size = protect_from_queue (size, 0);
1658 if (GET_CODE (x) != MEM)
1660 if (GET_CODE (y) != MEM)
1665 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1666 move_by_pieces (x, y, INTVAL (size), align);
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1673 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1674 enum machine_mode mode;
1676 /* Since this is a move insn, we don't care about volatility. */
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1682 enum insn_code code = movstr_optab[(int) mode];
1683 insn_operand_predicate_fn pred;
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1695 || (*pred) (x, BLKmode))
1696 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1697 || (*pred) (y, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1699 || (*pred) (opalign, VOIDmode)))
1702 rtx last = get_last_insn ();
1705 op2 = convert_to_mode (mode, size, 1);
1706 pred = insn_data[(int) code].operand[2].predicate;
1707 if (pred != 0 && ! (*pred) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1718 delete_insns_since (last);
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1745 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node));
1752 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn == NULL_TREE)
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn = get_identifier ("memcpy");
1774 fntype = build_pointer_type (void_type_node);
1775 fntype = build_function_type (fntype, NULL_TREE);
1776 fn = build_decl (FUNCTION_DECL, fn, fntype);
1777 ggc_add_tree_root (&fn, 1);
1778 DECL_EXTERNAL (fn) = 1;
1779 TREE_PUBLIC (fn) = 1;
1780 DECL_ARTIFICIAL (fn) = 1;
1781 TREE_NOTHROW (fn) = 1;
1782 make_decl_rtl (fn, NULL);
1783 assemble_external (fn);
1786 /* We need to make an argument list for the function call.
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), x));
1793 TREE_CHAIN (arg_list)
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), y));
1796 TREE_CHAIN (TREE_CHAIN (arg_list))
1797 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1802 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1803 call_expr, arg_list, NULL_TREE);
1804 TREE_SIDE_EFFECTS (call_expr) = 1;
1806 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1809 VOIDmode, 3, y, Pmode, x, Pmode,
1810 convert_to_mode (TYPE_MODE (integer_type_node), size,
1811 TREE_UNSIGNED (integer_type_node)),
1812 TYPE_MODE (integer_type_node));
1819 /* Copy all or part of a value X into registers starting at REGNO.
1820 The number of registers to be filled is NREGS. */
1823 move_block_to_reg (regno, x, nregs, mode)
1827 enum machine_mode mode;
1830 #ifdef HAVE_load_multiple
1838 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1839 x = validize_mem (force_const_mem (mode, x));
1841 /* See if the machine can do this with a load multiple insn. */
1842 #ifdef HAVE_load_multiple
1843 if (HAVE_load_multiple)
1845 last = get_last_insn ();
1846 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1854 delete_insns_since (last);
1858 for (i = 0; i < nregs; i++)
1859 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1860 operand_subword_force (x, i, mode));
1863 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1864 The number of registers to be filled is NREGS. SIZE indicates the number
1865 of bytes in the object X. */
1868 move_block_from_reg (regno, x, nregs, size)
1875 #ifdef HAVE_store_multiple
1879 enum machine_mode mode;
1884 /* If SIZE is that of a mode no bigger than a word, just use that
1885 mode's store operation. */
1886 if (size <= UNITS_PER_WORD
1887 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1889 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1893 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1894 to the left before storing to memory. Note that the previous test
1895 doesn't handle all cases (e.g. SIZE == 3). */
1896 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1898 rtx tem = operand_subword (x, 0, 1, BLKmode);
1904 shift = expand_shift (LSHIFT_EXPR, word_mode,
1905 gen_rtx_REG (word_mode, regno),
1906 build_int_2 ((UNITS_PER_WORD - size)
1907 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1908 emit_move_insn (tem, shift);
1912 /* See if the machine can do this with a store multiple insn. */
1913 #ifdef HAVE_store_multiple
1914 if (HAVE_store_multiple)
1916 last = get_last_insn ();
1917 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1925 delete_insns_since (last);
1929 for (i = 0; i < nregs; i++)
1931 rtx tem = operand_subword (x, i, 1, BLKmode);
1936 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1940 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1941 registers represented by a PARALLEL. SSIZE represents the total size of
1942 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 emit_group_load (dst, orig_src, ssize, align)
1960 if (GET_CODE (dst) != PARALLEL)
1963 /* Check for a NULL entry, used to indicate that the parameter goes
1964 both on the stack and in registers. */
1965 if (XEXP (XVECEXP (dst, 0, 0), 0))
1970 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1972 /* Process the pieces. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1975 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1976 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1977 unsigned int bytelen = GET_MODE_SIZE (mode);
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1983 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1984 bytelen = ssize - bytepos;
1989 /* If we won't be loading directly from memory, protect the real source
1990 from strange tricks we might play; but make sure that the source can
1991 be loaded directly into the destination. */
1993 if (GET_CODE (orig_src) != MEM
1994 && (!CONSTANT_P (orig_src)
1995 || (GET_MODE (orig_src) != mode
1996 && GET_MODE (orig_src) != VOIDmode)))
1998 if (GET_MODE (orig_src) == VOIDmode)
1999 src = gen_reg_rtx (mode);
2001 src = gen_reg_rtx (GET_MODE (orig_src));
2002 emit_move_insn (src, orig_src);
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src) == MEM
2007 && align >= GET_MODE_ALIGNMENT (mode)
2008 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2009 && bytelen == GET_MODE_SIZE (mode))
2011 tmps[i] = gen_reg_rtx (mode);
2012 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 else if (GET_CODE (src) == CONCAT)
2017 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2018 tmps[i] = XEXP (src, 0);
2019 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2021 tmps[i] = XEXP (src, 1);
2025 else if (CONSTANT_P (src)
2026 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2029 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2030 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 mode, mode, align, ssize);
2033 if (BYTES_BIG_ENDIAN && shift)
2034 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2035 tmps[i], 0, OPTAB_WIDEN);
2040 /* Copy the extracted pieces into the proper (probable) hard regs. */
2041 for (i = start; i < XVECLEN (dst, 0); i++)
2042 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2045 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2046 registers represented by a PARALLEL. SSIZE represents the total size of
2047 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2050 emit_group_store (orig_dst, src, ssize, align)
2058 if (GET_CODE (src) != PARALLEL)
2061 /* Check for a NULL entry, used to indicate that the parameter goes
2062 both on the stack and in registers. */
2063 if (XEXP (XVECEXP (src, 0, 0), 0))
2068 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2070 /* Copy the (probable) hard regs into pseudos. */
2071 for (i = start; i < XVECLEN (src, 0); i++)
2073 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2074 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2075 emit_move_insn (tmps[i], reg);
2079 /* If we won't be storing directly into memory, protect the real destination
2080 from strange tricks we might play. */
2082 if (GET_CODE (dst) == PARALLEL)
2086 /* We can get a PARALLEL dst if there is a conditional expression in
2087 a return statement. In that case, the dst and src are the same,
2088 so no action is necessary. */
2089 if (rtx_equal_p (dst, src))
2092 /* It is unclear if we can ever reach here, but we may as well handle
2093 it. Allocate a temporary, and split this into a store/load to/from
2096 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2097 emit_group_store (temp, src, ssize, align);
2098 emit_group_load (dst, temp, ssize, align);
2101 else if (GET_CODE (dst) != MEM)
2103 dst = gen_reg_rtx (GET_MODE (orig_dst));
2104 /* Make life a bit easier for combine. */
2105 emit_move_insn (dst, const0_rtx);
2108 /* Process the pieces. */
2109 for (i = start; i < XVECLEN (src, 0); i++)
2111 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2112 enum machine_mode mode = GET_MODE (tmps[i]);
2113 unsigned int bytelen = GET_MODE_SIZE (mode);
2115 /* Handle trailing fragments that run over the size of the struct. */
2116 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2118 if (BYTES_BIG_ENDIAN)
2120 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2121 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2122 tmps[i], 0, OPTAB_WIDEN);
2124 bytelen = ssize - bytepos;
2127 /* Optimize the access just a bit. */
2128 if (GET_CODE (dst) == MEM
2129 && align >= GET_MODE_ALIGNMENT (mode)
2130 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2131 && bytelen == GET_MODE_SIZE (mode))
2132 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2134 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2135 mode, tmps[i], align, ssize);
2140 /* Copy from the pseudo into the (probable) hard reg. */
2141 if (GET_CODE (dst) == REG)
2142 emit_move_insn (orig_dst, dst);
2145 /* Generate code to copy a BLKmode object of TYPE out of a
2146 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2147 is null, a stack temporary is created. TGTBLK is returned.
2149 The primary purpose of this routine is to handle functions
2150 that return BLKmode structures in registers. Some machines
2151 (the PA for example) want to return all small structures
2152 in registers regardless of the structure's alignment. */
2155 copy_blkmode_from_reg (tgtblk, srcreg, type)
2160 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2161 rtx src = NULL, dst = NULL;
2162 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2163 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2167 tgtblk = assign_temp (build_qualified_type (type,
2169 | TYPE_QUAL_CONST)),
2171 preserve_temp_slots (tgtblk);
2174 /* This code assumes srcreg is at least a full word. If it isn't,
2175 copy it into a new pseudo which is a full word. */
2176 if (GET_MODE (srcreg) != BLKmode
2177 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2180 /* Structures whose size is not a multiple of a word are aligned
2181 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2182 machine, this means we must skip the empty high order bytes when
2183 calculating the bit offset. */
2184 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2185 big_endian_correction
2186 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2188 /* Copy the structure BITSIZE bites at a time.
2190 We could probably emit more efficient code for machines which do not use
2191 strict alignment, but it doesn't seem worth the effort at the current
2193 for (bitpos = 0, xbitpos = big_endian_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == big_endian_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == big_endian_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 /* We need a new destination operand each time bitpos is on
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 xbitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, word_mode, word_mode,
2216 bitsize, BITS_PER_WORD),
2217 bitsize, BITS_PER_WORD);
2223 /* Add a USE expression for REG to the (possibly empty) list pointed
2224 to by CALL_FUSAGE. REG must denote a hard register. */
2227 use_reg (call_fusage, reg)
2228 rtx *call_fusage, reg;
2230 if (GET_CODE (reg) != REG
2231 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 = gen_rtx_EXPR_LIST (VOIDmode,
2236 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2239 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2240 starting at REGNO. All of these registers must be hard registers. */
2243 use_regs (call_fusage, regno, nregs)
2250 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 for (i = 0; i < nregs; i++)
2254 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2257 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2258 PARALLEL REGS. This is for calls that pass values in multiple
2259 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262 use_group_regs (call_fusage, regs)
2268 for (i = 0; i < XVECLEN (regs, 0); i++)
2270 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2272 /* A NULL entry means the parameter goes both on the stack and in
2273 registers. This can also be a MEM for targets that pass values
2274 partially on the stack and partially in registers. */
2275 if (reg != 0 && GET_CODE (reg) == REG)
2276 use_reg (call_fusage, reg);
2282 can_store_by_pieces (len, constfun, constfundata, align)
2283 unsigned HOST_WIDE_INT len;
2284 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 unsigned HOST_WIDE_INT max_size, l;
2289 HOST_WIDE_INT offset = 0;
2290 enum machine_mode mode, tmode;
2291 enum insn_code icode;
2295 if (! MOVE_BY_PIECES_P (len, align))
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2302 /* We would first store what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2306 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2311 max_size = MOVE_MAX_PIECES + 1;
2312 while (max_size > 1)
2314 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2316 if (GET_MODE_SIZE (tmode) < max_size)
2319 if (mode == VOIDmode)
2322 icode = mov_optab->handlers[(int) mode].insn_code;
2323 if (icode != CODE_FOR_nothing
2324 && align >= GET_MODE_ALIGNMENT (mode))
2326 unsigned int size = GET_MODE_SIZE (mode);
2333 cst = (*constfun) (constfundata, offset, mode);
2334 if (!LEGITIMATE_CONSTANT_P (cst))
2344 max_size = GET_MODE_SIZE (mode);
2347 /* The code above should have handled everything. */
2355 /* Generate several move instructions to store LEN bytes generated by
2356 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2357 pointer which will be passed as argument in every CONSTFUN call.
2358 ALIGN is maximum alignment we can assume. */
2361 store_by_pieces (to, len, constfun, constfundata, align)
2363 unsigned HOST_WIDE_INT len;
2364 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 struct store_by_pieces data;
2370 if (! MOVE_BY_PIECES_P (len, align))
2372 to = protect_from_queue (to, 1);
2373 data.constfun = constfun;
2374 data.constfundata = constfundata;
2377 store_by_pieces_1 (&data, align);
2380 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2381 rtx with BLKmode). The caller must pass TO through protect_from_queue
2382 before calling. ALIGN is maximum alignment we can assume. */
2385 clear_by_pieces (to, len, align)
2387 unsigned HOST_WIDE_INT len;
2390 struct store_by_pieces data;
2392 data.constfun = clear_by_pieces_1;
2393 data.constfundata = NULL;
2396 store_by_pieces_1 (&data, align);
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2403 clear_by_pieces_1 (data, offset, mode)
2404 PTR data ATTRIBUTE_UNUSED;
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2406 enum machine_mode mode ATTRIBUTE_UNUSED;
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). The caller must pass TO through protect_from_queue
2414 before calling. ALIGN is maximum alignment we can assume. */
2417 store_by_pieces_1 (data, align)
2418 struct store_by_pieces *data;
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2423 enum machine_mode mode = VOIDmode, tmode;
2424 enum insn_code icode;
2427 data->to_addr = to_addr;
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432 data->explicit_inc_to = 0;
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 data->offset = data->len;
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align) > 2)
2444 /* Determine the main mode we'll be using. */
2445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2447 if (GET_MODE_SIZE (tmode) < max_size)
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_addr_to_reg (to_addr);
2469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2471 align = MOVE_MAX * BITS_PER_UNIT;
2473 /* First store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2476 while (max_size > 1)
2478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2480 if (GET_MODE_SIZE (tmode) < max_size)
2483 if (mode == VOIDmode)
2486 icode = mov_optab->handlers[(int) mode].insn_code;
2487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2488 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2490 max_size = GET_MODE_SIZE (mode);
2493 /* The code above should have handled everything. */
2498 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2499 with move instructions for mode MODE. GENFUN is the gen_... function
2500 to make a move insn for that mode. DATA has all the other info. */
2503 store_by_pieces_2 (genfun, mode, data)
2504 rtx (*genfun) PARAMS ((rtx, ...));
2505 enum machine_mode mode;
2506 struct store_by_pieces *data;
2508 unsigned int size = GET_MODE_SIZE (mode);
2511 while (data->len >= size)
2514 data->offset -= size;
2516 if (data->autinc_to)
2518 to1 = replace_equiv_address (data->to, data->to_addr);
2519 to1 = adjust_address (to1, mode, 0);
2522 to1 = adjust_address (data->to, mode, data->offset);
2524 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2525 emit_insn (gen_add2_insn (data->to_addr,
2526 GEN_INT (-(HOST_WIDE_INT) size)));
2528 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2529 emit_insn ((*genfun) (to1, cst));
2531 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2532 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534 if (! data->reverse)
2535 data->offset += size;
2541 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2542 its length in bytes and ALIGN is the maximum alignment we can is has.
2544 If we call a function that returns the length of the block, return it. */
2547 clear_storage (object, size, align)
2552 #ifdef TARGET_MEM_FUNCTIONS
2554 tree call_expr, arg_list;
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2602 rtx last = get_last_insn ();
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2617 delete_insns_since (last);
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second a integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 temp = stack_pointer_rtx;
3005 x = change_address (x, VOIDmode, temp);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3039 if (xpart == 0 || ypart == 0)
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3044 last_insn = emit_move_insn (xpart, ypart);
3047 seq = gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size, extra, below)
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3094 temp = copy_to_mode_reg (Pmode, size);
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode, x, type)
3154 enum machine_mode mode;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3189 dest = gen_rtx_MEM (mode, dest_addr);
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3200 emit_move_insn (dest, x);
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 MODE is redundant except when X is a CONST_INT (since they don't
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3241 enum machine_mode mode;
3250 int reg_parm_stack_space;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-decrement.
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3272 xinner = x = protect_from_queue (x, 0);
3274 if (mode == BLKmode)
3276 /* Copy a block into the stack, entirely or partially. */
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3292 xinner = adjust_address (xinner, BLKmode, used);
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3306 && GET_CODE (size) == CONST_INT
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3350 #endif /* PUSH_ROUNDING */
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3357 /* Deduct words put into registers from the size we must copy. */
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3373 temp = push_block (size, extra, where_pad == downward);
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3406 target = gen_rtx_MEM (BLKmode, temp);
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3418 /* TEMP is the address of the block. Copy the data there. */
3419 if (GET_CODE (size) == CONST_INT
3420 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3422 move_by_pieces (target, xinner, INTVAL (size), align);
3427 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3428 enum machine_mode mode;
3430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 enum insn_code code = movstr_optab[(int) mode];
3435 insn_operand_predicate_fn pred;
3437 if (code != CODE_FOR_nothing
3438 && ((GET_CODE (size) == CONST_INT
3439 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3440 <= (GET_MODE_MASK (mode) >> 1)))
3441 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3442 && (!(pred = insn_data[(int) code].operand[0].predicate)
3443 || ((*pred) (target, BLKmode)))
3444 && (!(pred = insn_data[(int) code].operand[1].predicate)
3445 || ((*pred) (xinner, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[3].predicate)
3447 || ((*pred) (opalign, VOIDmode))))
3449 rtx op2 = convert_to_mode (mode, size, 1);
3450 rtx last = get_last_insn ();
3453 pred = insn_data[(int) code].operand[2].predicate;
3454 if (pred != 0 && ! (*pred) (op2, mode))
3455 op2 = copy_to_mode_reg (mode, op2);
3457 pat = GEN_FCN ((int) code) (target, xinner,
3465 delete_insns_since (last);
3470 if (!ACCUMULATE_OUTGOING_ARGS)
3472 /* If the source is referenced relative to the stack pointer,
3473 copy it to another register to stabilize it. We do not need
3474 to do this if we know that we won't be changing sp. */
3476 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3477 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3478 temp = copy_to_reg (temp);
3481 /* Make inhibit_defer_pop nonzero around the library call
3482 to force it to pop the bcopy-arguments right away. */
3484 #ifdef TARGET_MEM_FUNCTIONS
3485 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3486 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3487 convert_to_mode (TYPE_MODE (sizetype),
3488 size, TREE_UNSIGNED (sizetype)),
3489 TYPE_MODE (sizetype));
3491 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3492 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3493 convert_to_mode (TYPE_MODE (integer_type_node),
3495 TREE_UNSIGNED (integer_type_node)),
3496 TYPE_MODE (integer_type_node));
3501 else if (partial > 0)
3503 /* Scalar partly in registers. */
3505 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3508 /* # words of start of argument
3509 that we must make space for but need not store. */
3510 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3511 int args_offset = INTVAL (args_so_far);
3514 /* Push padding now if padding above and stack grows down,
3515 or if padding below and stack grows up.
3516 But if space already allocated, this has already been done. */
3517 if (extra && args_addr == 0
3518 && where_pad != none && where_pad != stack_direction)
3519 anti_adjust_stack (GEN_INT (extra));
3521 /* If we make space by pushing it, we might as well push
3522 the real data. Otherwise, we can leave OFFSET nonzero
3523 and leave the space uninitialized. */
3527 /* Now NOT_STACK gets the number of words that we don't need to
3528 allocate on the stack. */
3529 not_stack = partial - offset;
3531 /* If the partial register-part of the arg counts in its stack size,
3532 skip the part of stack space corresponding to the registers.
3533 Otherwise, start copying to the beginning of the stack space,
3534 by setting SKIP to 0. */
3535 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3537 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3538 x = validize_mem (force_const_mem (mode, x));
3540 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3541 SUBREGs of such registers are not allowed. */
3542 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3543 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3544 x = copy_to_reg (x);
3546 /* Loop over all the words allocated on the stack for this arg. */
3547 /* We can do it by words, because any scalar bigger than a word
3548 has a size a multiple of a word. */
3549 #ifndef PUSH_ARGS_REVERSED
3550 for (i = not_stack; i < size; i++)
3552 for (i = size - 1; i >= not_stack; i--)
3554 if (i >= not_stack + offset)
3555 emit_push_insn (operand_subword_force (x, i, mode),
3556 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3558 GEN_INT (args_offset + ((i - not_stack + skip)
3560 reg_parm_stack_space, alignment_pad);
3565 rtx target = NULL_RTX;
3568 /* Push padding now if padding above and stack grows down,
3569 or if padding below and stack grows up.
3570 But if space already allocated, this has already been done. */
3571 if (extra && args_addr == 0
3572 && where_pad != none && where_pad != stack_direction)
3573 anti_adjust_stack (GEN_INT (extra));
3575 #ifdef PUSH_ROUNDING
3576 if (args_addr == 0 && PUSH_ARGS)
3577 emit_single_push_insn (mode, x, type);
3581 if (GET_CODE (args_so_far) == CONST_INT)
3583 = memory_address (mode,
3584 plus_constant (args_addr,
3585 INTVAL (args_so_far)));
3587 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3590 dest = gen_rtx_MEM (mode, addr);
3593 set_mem_attributes (dest, type, 1);
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest, 0);
3601 emit_move_insn (dest, x);
3605 if (current_function_check_memory_usage && ! in_check_memory_usage)
3607 in_check_memory_usage = 1;
3609 target = get_push_address (GET_MODE_SIZE (mode));
3611 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3612 emit_library_call (chkr_copy_bitmap_libfunc,
3613 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3614 Pmode, XEXP (x, 0), Pmode,
3615 GEN_INT (GET_MODE_SIZE (mode)),
3616 TYPE_MODE (sizetype));
3618 emit_library_call (chkr_set_right_libfunc,
3619 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3620 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3621 TYPE_MODE (sizetype),
3622 GEN_INT (MEMORY_USE_RW),
3623 TYPE_MODE (integer_type_node));
3624 in_check_memory_usage = 0;
3629 /* If part should go in registers, copy that part
3630 into the appropriate registers. Do this now, at the end,
3631 since mem-to-mem copies above may do function calls. */
3632 if (partial > 0 && reg != 0)
3634 /* Handle calls that pass values in multiple non-contiguous locations.
3635 The Irix 6 ABI has examples of this. */
3636 if (GET_CODE (reg) == PARALLEL)
3637 emit_group_load (reg, x, -1, align); /* ??? size? */
3639 move_block_to_reg (REGNO (reg), x, partial, mode);
3642 if (extra && args_addr == 0 && where_pad == stack_direction)
3643 anti_adjust_stack (GEN_INT (extra));
3645 if (alignment_pad && args_addr == 0)
3646 anti_adjust_stack (alignment_pad);
3649 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3657 /* Only registers can be subtargets. */
3658 || GET_CODE (x) != REG
3659 /* If the register is readonly, it can't be set more than once. */
3660 || RTX_UNCHANGING_P (x)
3661 /* Don't use hard regs to avoid extending their life. */
3662 || REGNO (x) < FIRST_PSEUDO_REGISTER
3663 /* Avoid subtargets inside loops,
3664 since they hide some invariant expressions. */
3665 || preserve_subexpressions_p ())
3669 /* Expand an assignment that stores the value of FROM into TO.
3670 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3671 (This may contain a QUEUED rtx;
3672 if the value is constant, this rtx is a constant.)
3673 Otherwise, the returned value is NULL_RTX.
3675 SUGGEST_REG is no longer actually used.
3676 It used to mean, copy the value through a register
3677 and return that register, if that is possible.
3678 We now use WANT_VALUE to decide whether to do this. */
3681 expand_assignment (to, from, want_value, suggest_reg)
3684 int suggest_reg ATTRIBUTE_UNUSED;
3686 register rtx to_rtx = 0;
3689 /* Don't crash if the lhs of the assignment was erroneous. */
3691 if (TREE_CODE (to) == ERROR_MARK)
3693 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3694 return want_value ? result : NULL_RTX;
3697 /* Assignment of a structure component needs special treatment
3698 if the structure component's rtx is not simply a MEM.
3699 Assignment of an array element at a constant index, and assignment of
3700 an array element in an unaligned packed structure field, has the same
3703 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3704 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3706 enum machine_mode mode1;
3707 HOST_WIDE_INT bitsize, bitpos;
3712 unsigned int alignment;
3715 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3716 &unsignedp, &volatilep, &alignment);
3718 /* If we are going to use store_bit_field and extract_bit_field,
3719 make sure to_rtx will be safe for multiple use. */
3721 if (mode1 == VOIDmode && want_value)
3722 tem = stabilize_reference (tem);
3724 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3729 if (GET_CODE (to_rtx) != MEM)
3732 if (GET_MODE (offset_rtx) != ptr_mode)
3734 #ifdef POINTERS_EXTEND_UNSIGNED
3735 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3741 /* A constant address in TO_RTX can have VOIDmode, we must not try
3742 to call force_reg for that case. Avoid that case. */
3743 if (GET_CODE (to_rtx) == MEM
3744 && GET_MODE (to_rtx) == BLKmode
3745 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3747 && (bitpos % bitsize) == 0
3748 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3749 && alignment == GET_MODE_ALIGNMENT (mode1))
3752 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3754 if (GET_CODE (XEXP (temp, 0)) == REG)
3757 to_rtx = (replace_equiv_address
3758 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3763 to_rtx = change_address (to_rtx, VOIDmode,
3764 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3765 force_reg (ptr_mode,
3771 if (GET_CODE (to_rtx) == MEM)
3773 /* When the offset is zero, to_rtx is the address of the
3774 structure we are storing into, and hence may be shared.
3775 We must make a new MEM before setting the volatile bit. */
3777 to_rtx = copy_rtx (to_rtx);
3779 MEM_VOLATILE_P (to_rtx) = 1;
3781 #if 0 /* This was turned off because, when a field is volatile
3782 in an object which is not volatile, the object may be in a register,
3783 and then we would abort over here. */
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3793 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3798 /* Check the access. */
3799 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3804 enum machine_mode best_mode;
3806 best_mode = get_best_mode (bitsize, bitpos,
3807 TYPE_ALIGN (TREE_TYPE (tem)),
3809 if (best_mode == VOIDmode)
3812 best_mode_size = GET_MODE_BITSIZE (best_mode);
3813 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3814 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3815 size *= GET_MODE_SIZE (best_mode);
3817 /* Check the access right of the pointer. */
3818 in_check_memory_usage = 1;
3820 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3821 VOIDmode, 3, to_addr, Pmode,
3822 GEN_INT (size), TYPE_MODE (sizetype),
3823 GEN_INT (MEMORY_USE_WO),
3824 TYPE_MODE (integer_type_node));
3825 in_check_memory_usage = 0;
3828 /* If this is a varying-length object, we must get the address of
3829 the source and do an explicit block move. */
3832 unsigned int from_align;
3833 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3835 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3837 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3838 MIN (alignment, from_align));
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3853 int_size_in_bytes (TREE_TYPE (tem)),
3854 get_alias_set (to));
3856 preserve_temp_slots (result);
3860 /* If the value is meaningful, convert RESULT to the proper mode.
3861 Otherwise, return nothing. */
3862 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3863 TYPE_MODE (TREE_TYPE (from)),
3865 TREE_UNSIGNED (TREE_TYPE (to)))
3870 /* If the rhs is a function call and its value is not an aggregate,
3871 call the function before we start to compute the lhs.
3872 This is needed for correct code for cases such as
3873 val = setjmp (buf) on machines where reference to val
3874 requires loading up part of an address in a separate insn.
3876 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3877 since it might be a promoted variable where the zero- or sign- extension
3878 needs to be done. Handling this in the normal way is safe because no
3879 computation is done before the call. */
3880 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3882 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3883 && GET_CODE (DECL_RTL (to)) == REG))
3888 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3890 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3892 /* Handle calls that return values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (to_rtx) == PARALLEL)
3895 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from),
3899 TYPE_ALIGN (TREE_TYPE (from)));
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3904 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3907 emit_move_insn (to_rtx, value);
3909 preserve_temp_slots (to_rtx);
3912 return want_value ? to_rtx : NULL_RTX;
3915 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3916 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3921 if (GET_CODE (to_rtx) == MEM)
3922 set_mem_alias_set (to_rtx, get_alias_set (to));
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3936 TYPE_ALIGN (TREE_TYPE (from)));
3938 emit_move_insn (to_rtx, temp);
3940 preserve_temp_slots (to_rtx);
3943 return want_value ? to_rtx : NULL_RTX;
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3958 EXPAND_MEMORY_USE_DONT);
3960 /* Copy the rights of the bitmap. */
3961 if (current_function_check_memory_usage)
3962 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3963 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3964 XEXP (from_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype),
3966 size, TREE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 #ifdef TARGET_MEM_FUNCTIONS
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size, TREE_UNSIGNED (integer_type_node)),
3982 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4020 If WANT_VALUE is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE. */
4025 store_expr (exp, target, want_value)
4027 register rtx target;
4031 int dont_return_target = 0;
4032 int dont_store_target = 0;
4034 if (TREE_CODE (exp) == COMPOUND_EXPR)
4036 /* Perform first part of compound expression, then assign from second
4038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4040 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4042 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4044 /* For conditional expression, get safe form of the target. Then
4045 test the condition, doing the appropriate assignment on either
4046 side. This avoids the creation of unnecessary temporaries.
4047 For non-BLKmode, it is more efficient not to do this. */
4049 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4052 target = protect_from_queue (target, 1);
4054 do_pending_stack_adjust ();
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 start_cleanup_deferral ();
4058 store_expr (TREE_OPERAND (exp, 1), target, 0);
4059 end_cleanup_deferral ();
4061 emit_jump_insn (gen_jump (lab2));
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 2), target, 0);
4066 end_cleanup_deferral ();
4071 return want_value ? target : NULL_RTX;
4073 else if (queued_subexp_p (target))
4074 /* If target contains a postincrement, let's not risk
4075 using it as the place to generate the rhs. */
4077 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4079 /* Expand EXP into a new pseudo. */
4080 temp = gen_reg_rtx (GET_MODE (target));
4081 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4084 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4086 /* If target is volatile, ANSI requires accessing the value
4087 *from* the target, if it is accessed. So make that happen.
4088 In no case return the target itself. */
4089 if (! MEM_VOLATILE_P (target) && want_value)
4090 dont_return_target = 1;
4092 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4093 && GET_MODE (target) != BLKmode)
4094 /* If target is in memory and caller wants value in a register instead,
4095 arrange that. Pass TARGET as target for expand_expr so that,
4096 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4097 We know expand_expr will not use the target in that case.
4098 Don't do this if TARGET is volatile because we are supposed
4099 to write it and then read it. */
4101 temp = expand_expr (exp, target, GET_MODE (target), 0);
4102 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4104 /* If TEMP is already in the desired TARGET, only copy it from
4105 memory and don't store it there again. */
4107 || (rtx_equal_p (temp, target)
4108 && ! side_effects_p (temp) && ! side_effects_p (target)))
4109 dont_store_target = 1;
4110 temp = copy_to_reg (temp);
4112 dont_return_target = 1;
4114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4115 /* If this is an scalar in a register that is stored in a wider mode
4116 than the declared mode, compute the result into its declared mode
4117 and then convert to the wider mode. Our value is the computed
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4133 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4137 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4142 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4155 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4156 TYPE_MODE (TREE_TYPE (exp)), temp,
4157 SUBREG_PROMOTED_UNSIGNED_P (target));
4159 convert_move (SUBREG_REG (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 /* If we promoted a constant, change the mode back down to match
4163 target. Otherwise, the caller might get confused by a result whose
4164 mode is larger than expected. */
4166 if (want_value && GET_MODE (temp) != GET_MODE (target)
4167 && GET_MODE (temp) != VOIDmode)
4169 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4170 SUBREG_PROMOTED_VAR_P (temp) = 1;
4171 SUBREG_PROMOTED_UNSIGNED_P (temp)
4172 = SUBREG_PROMOTED_UNSIGNED_P (target);
4175 return want_value ? temp : NULL_RTX;
4179 temp = expand_expr (exp, target, GET_MODE (target), 0);
4180 /* Return TARGET if it's a specified hardware register.
4181 If TARGET is a volatile mem ref, either return TARGET
4182 or return a reg copied *from* TARGET; ANSI requires this.
4184 Otherwise, if TEMP is not TARGET, return TEMP
4185 if it is constant (for efficiency),
4186 or if we really want the correct value. */
4187 if (!(target && GET_CODE (target) == REG
4188 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4189 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4190 && ! rtx_equal_p (temp, target)
4191 && (CONSTANT_P (temp) || want_value))
4192 dont_return_target = 1;
4195 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4196 the same as that of TARGET, adjust the constant. This is needed, for
4197 example, in case it is a CONST_DOUBLE and we want only a word-sized
4199 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4200 && TREE_CODE (exp) != ERROR_MARK
4201 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4202 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4203 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4205 if (current_function_check_memory_usage
4206 && GET_CODE (target) == MEM
4207 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4209 in_check_memory_usage = 1;
4210 if (GET_CODE (temp) == MEM)
4211 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4212 VOIDmode, 3, XEXP (target, 0), Pmode,
4213 XEXP (temp, 0), Pmode,
4214 expr_size (exp), TYPE_MODE (sizetype));
4216 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4217 VOIDmode, 3, XEXP (target, 0), Pmode,
4218 expr_size (exp), TYPE_MODE (sizetype),
4219 GEN_INT (MEMORY_USE_WO),
4220 TYPE_MODE (integer_type_node));
4221 in_check_memory_usage = 0;
4224 /* If value was not generated in the target, store it there.
4225 Convert the value to TARGET's type first if nec. */
4226 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4227 one or both of them are volatile memory refs, we have to distinguish
4229 - expand_expr has used TARGET. In this case, we must not generate
4230 another copy. This can be detected by TARGET being equal according
4232 - expand_expr has not used TARGET - that means that the source just
4233 happens to have the same RTX form. Since temp will have been created
4234 by expand_expr, it will compare unequal according to == .
4235 We must generate a copy in this case, to reach the correct number
4236 of volatile memory references. */
4238 if ((! rtx_equal_p (temp, target)
4239 || (temp != target && (side_effects_p (temp)
4240 || side_effects_p (target))))
4241 && TREE_CODE (exp) != ERROR_MARK
4242 && ! dont_store_target)
4244 target = protect_from_queue (target, 1);
4245 if (GET_MODE (temp) != GET_MODE (target)
4246 && GET_MODE (temp) != VOIDmode)
4248 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4249 if (dont_return_target)
4251 /* In this case, we will return TEMP,
4252 so make sure it has the proper mode.
4253 But don't forget to store the value into TARGET. */
4254 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4255 emit_move_insn (target, temp);
4258 convert_move (target, temp, unsignedp);
4261 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4263 /* Handle copying a string constant into an array.
4264 The string constant may be shorter than the array.
4265 So copy just the string's actual length, and clear the rest. */
4269 /* Get the size of the data type of the string,
4270 which is actually the size of the target. */
4271 size = expr_size (exp);
4272 if (GET_CODE (size) == CONST_INT
4273 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4274 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4277 /* Compute the size of the data to copy from the string. */
4279 = size_binop (MIN_EXPR,
4280 make_tree (sizetype, size),
4281 size_int (TREE_STRING_LENGTH (exp)));
4282 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4283 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4287 /* Copy that much. */
4288 emit_block_move (target, temp, copy_size_rtx,
4289 TYPE_ALIGN (TREE_TYPE (exp)));
4291 /* Figure out how much is left in TARGET that we have to clear.
4292 Do all calculations in ptr_mode. */
4294 addr = XEXP (target, 0);
4295 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4297 if (GET_CODE (copy_size_rtx) == CONST_INT)
4299 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4300 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4302 (unsigned int) (BITS_PER_UNIT
4303 * (INTVAL (copy_size_rtx)
4304 & - INTVAL (copy_size_rtx))));
4308 addr = force_reg (ptr_mode, addr);
4309 addr = expand_binop (ptr_mode, add_optab, addr,
4310 copy_size_rtx, NULL_RTX, 0,
4313 size = expand_binop (ptr_mode, sub_optab, size,
4314 copy_size_rtx, NULL_RTX, 0,
4317 align = BITS_PER_UNIT;
4318 label = gen_label_rtx ();
4319 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4320 GET_MODE (size), 0, 0, label);
4322 align = MIN (align, expr_align (copy_size));
4324 if (size != const0_rtx)
4326 rtx dest = gen_rtx_MEM (BLKmode, addr);
4328 MEM_COPY_ATTRIBUTES (dest, target);
4330 /* Be sure we can write on ADDR. */
4331 in_check_memory_usage = 1;
4332 if (current_function_check_memory_usage)
4333 emit_library_call (chkr_check_addr_libfunc,
4334 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4336 size, TYPE_MODE (sizetype),
4337 GEN_INT (MEMORY_USE_WO),
4338 TYPE_MODE (integer_type_node));
4339 in_check_memory_usage = 0;
4340 clear_storage (dest, size, align);
4347 /* Handle calls that return values in multiple non-contiguous locations.
4348 The Irix 6 ABI has examples of this. */
4349 else if (GET_CODE (target) == PARALLEL)
4350 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4351 TYPE_ALIGN (TREE_TYPE (exp)));
4352 else if (GET_MODE (temp) == BLKmode)
4353 emit_block_move (target, temp, expr_size (exp),
4354 TYPE_ALIGN (TREE_TYPE (exp)));
4356 emit_move_insn (target, temp);
4359 /* If we don't want a value, return NULL_RTX. */
4363 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4364 ??? The latter test doesn't seem to make sense. */
4365 else if (dont_return_target && GET_CODE (temp) != MEM)
4368 /* Return TARGET itself if it is a hard register. */
4369 else if (want_value && GET_MODE (target) != BLKmode
4370 && ! (GET_CODE (target) == REG
4371 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4372 return copy_to_reg (target);
4378 /* Return 1 if EXP just contains zeros. */
4386 switch (TREE_CODE (exp))
4390 case NON_LVALUE_EXPR:
4391 return is_zeros_p (TREE_OPERAND (exp, 0));
4394 return integer_zerop (exp);
4398 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4401 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4404 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4405 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4406 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4407 if (! is_zeros_p (TREE_VALUE (elt)))
4417 /* Return 1 if EXP contains mostly (3/4) zeros. */
4420 mostly_zeros_p (exp)
4423 if (TREE_CODE (exp) == CONSTRUCTOR)
4425 int elts = 0, zeros = 0;
4426 tree elt = CONSTRUCTOR_ELTS (exp);
4427 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4429 /* If there are no ranges of true bits, it is all zero. */
4430 return elt == NULL_TREE;
4432 for (; elt; elt = TREE_CHAIN (elt))
4434 /* We do not handle the case where the index is a RANGE_EXPR,
4435 so the statistic will be somewhat inaccurate.
4436 We do make a more accurate count in store_constructor itself,
4437 so since this function is only used for nested array elements,
4438 this should be close enough. */
4439 if (mostly_zeros_p (TREE_VALUE (elt)))
4444 return 4 * zeros >= 3 * elts;
4447 return is_zeros_p (exp);
4450 /* Helper function for store_constructor.
4451 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4452 TYPE is the type of the CONSTRUCTOR, not the element type.
4453 ALIGN and CLEARED are as for store_constructor.
4454 ALIAS_SET is the alias set to use for any stores.
4456 This provides a recursive shortcut back to store_constructor when it isn't
4457 necessary to go through store_field. This is so that we can pass through
4458 the cleared field to let store_constructor know that we may not have to
4459 clear a substructure if the outer structure has already been cleared. */
4462 store_constructor_field (target, bitsize, bitpos,
4463 mode, exp, type, align, cleared, alias_set)
4465 unsigned HOST_WIDE_INT bitsize;
4466 HOST_WIDE_INT bitpos;
4467 enum machine_mode mode;
4473 if (TREE_CODE (exp) == CONSTRUCTOR
4474 && bitpos % BITS_PER_UNIT == 0
4475 /* If we have a non-zero bitpos for a register target, then we just
4476 let store_field do the bitfield handling. This is unlikely to
4477 generate unnecessary clear instructions anyways. */
4478 && (bitpos == 0 || GET_CODE (target) == MEM))
4482 = adjust_address (target,
4483 GET_MODE (target) == BLKmode
4485 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4486 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4489 /* Show the alignment may no longer be what it was and update the alias
4490 set, if required. */
4492 align = MIN (align, (unsigned int) bitpos & - bitpos);
4493 if (GET_CODE (target) == MEM)
4494 set_mem_alias_set (target, alias_set);
4496 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4499 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4500 int_size_in_bytes (type), alias_set);
4503 /* Store the value of constructor EXP into the rtx TARGET.
4504 TARGET is either a REG or a MEM.
4505 ALIGN is the maximum known alignment for TARGET.
4506 CLEARED is true if TARGET is known to have been zero'd.
4507 SIZE is the number of bytes of TARGET we are allowed to modify: this
4508 may not be the same as the size of EXP if we are assigning to a field
4509 which has been packed to exclude padding bits. */
4512 store_constructor (exp, target, align, cleared, size)
4519 tree type = TREE_TYPE (exp);
4520 #ifdef WORD_REGISTER_OPERATIONS
4521 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4524 /* We know our target cannot conflict, since safe_from_p has been called. */
4526 /* Don't try copying piece by piece into a hard register
4527 since that is vulnerable to being clobbered by EXP.
4528 Instead, construct in a pseudo register and then copy it all. */
4529 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4531 rtx temp = gen_reg_rtx (GET_MODE (target));
4532 store_constructor (exp, temp, align, cleared, size);
4533 emit_move_insn (target, temp);
4538 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4539 || TREE_CODE (type) == QUAL_UNION_TYPE)
4543 /* Inform later passes that the whole union value is dead. */
4544 if ((TREE_CODE (type) == UNION_TYPE
4545 || TREE_CODE (type) == QUAL_UNION_TYPE)
4548 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4550 /* If the constructor is empty, clear the union. */
4551 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4552 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4555 /* If we are building a static constructor into a register,
4556 set the initial value as zero so we can fold the value into
4557 a constant. But if more than one register is involved,
4558 this probably loses. */
4559 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4560 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4563 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4568 /* If the constructor has fewer fields than the structure
4569 or if we are initializing the structure to mostly zeros,
4570 clear the whole structure first. Don't do this if TARGET is a
4571 register whose mode size isn't equal to SIZE since clear_storage
4572 can't handle this case. */
4574 && ((list_length (CONSTRUCTOR_ELTS (exp))
4575 != fields_length (type))
4576 || mostly_zeros_p (exp))
4577 && (GET_CODE (target) != REG
4578 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4581 clear_storage (target, GEN_INT (size), align);
4586 /* Inform later passes that the old value is dead. */
4587 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4589 /* Store each element of the constructor into
4590 the corresponding field of TARGET. */
4592 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4594 register tree field = TREE_PURPOSE (elt);
4595 #ifdef WORD_REGISTER_OPERATIONS
4596 tree value = TREE_VALUE (elt);
4598 register enum machine_mode mode;
4599 HOST_WIDE_INT bitsize;
4600 HOST_WIDE_INT bitpos = 0;
4603 rtx to_rtx = target;
4605 /* Just ignore missing fields.
4606 We cleared the whole structure, above,
4607 if any fields are missing. */
4611 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4614 if (host_integerp (DECL_SIZE (field), 1))
4615 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4619 unsignedp = TREE_UNSIGNED (field);
4620 mode = DECL_MODE (field);
4621 if (DECL_BIT_FIELD (field))
4624 offset = DECL_FIELD_OFFSET (field);
4625 if (host_integerp (offset, 0)
4626 && host_integerp (bit_position (field), 0))
4628 bitpos = int_bit_position (field);
4632 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4638 if (contains_placeholder_p (offset))
4639 offset = build (WITH_RECORD_EXPR, sizetype,
4640 offset, make_tree (TREE_TYPE (exp), target));
4642 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4643 if (GET_CODE (to_rtx) != MEM)
4646 if (GET_MODE (offset_rtx) != ptr_mode)
4648 #ifdef POINTERS_EXTEND_UNSIGNED
4649 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4651 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4656 = change_address (to_rtx, VOIDmode,
4657 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4658 force_reg (ptr_mode,
4660 align = DECL_OFFSET_ALIGN (field);
4663 if (TREE_READONLY (field))
4665 if (GET_CODE (to_rtx) == MEM)
4666 to_rtx = copy_rtx (to_rtx);
4668 RTX_UNCHANGING_P (to_rtx) = 1;
4671 #ifdef WORD_REGISTER_OPERATIONS
4672 /* If this initializes a field that is smaller than a word, at the
4673 start of a word, try to widen it to a full word.
4674 This special case allows us to output C++ member function
4675 initializations in a form that the optimizers can understand. */
4676 if (GET_CODE (target) == REG
4677 && bitsize < BITS_PER_WORD
4678 && bitpos % BITS_PER_WORD == 0
4679 && GET_MODE_CLASS (mode) == MODE_INT
4680 && TREE_CODE (value) == INTEGER_CST
4682 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4684 tree type = TREE_TYPE (value);
4685 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4687 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4688 value = convert (type, value);
4690 if (BYTES_BIG_ENDIAN)
4692 = fold (build (LSHIFT_EXPR, type, value,
4693 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4694 bitsize = BITS_PER_WORD;
4698 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4699 TREE_VALUE (elt), type, align, cleared,
4700 (DECL_NONADDRESSABLE_P (field)
4701 && GET_CODE (to_rtx) == MEM)
4702 ? MEM_ALIAS_SET (to_rtx)
4703 : get_alias_set (TREE_TYPE (field)));
4706 else if (TREE_CODE (type) == ARRAY_TYPE)
4711 tree domain = TYPE_DOMAIN (type);
4712 tree elttype = TREE_TYPE (type);
4713 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4714 && TYPE_MAX_VALUE (domain)
4715 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4716 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4717 HOST_WIDE_INT minelt = 0;
4718 HOST_WIDE_INT maxelt = 0;
4720 /* If we have constant bounds for the range of the type, get them. */
4723 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4724 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4727 /* If the constructor has fewer elements than the array,
4728 clear the whole array first. Similarly if this is
4729 static constructor of a non-BLKmode object. */
4730 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4734 HOST_WIDE_INT count = 0, zero_count = 0;
4735 need_to_clear = ! const_bounds_p;
4737 /* This loop is a more accurate version of the loop in
4738 mostly_zeros_p (it handles RANGE_EXPR in an index).
4739 It is also needed to check for missing elements. */
4740 for (elt = CONSTRUCTOR_ELTS (exp);
4741 elt != NULL_TREE && ! need_to_clear;
4742 elt = TREE_CHAIN (elt))
4744 tree index = TREE_PURPOSE (elt);
4745 HOST_WIDE_INT this_node_count;
4747 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4749 tree lo_index = TREE_OPERAND (index, 0);
4750 tree hi_index = TREE_OPERAND (index, 1);
4752 if (! host_integerp (lo_index, 1)
4753 || ! host_integerp (hi_index, 1))
4759 this_node_count = (tree_low_cst (hi_index, 1)
4760 - tree_low_cst (lo_index, 1) + 1);
4763 this_node_count = 1;
4765 count += this_node_count;
4766 if (mostly_zeros_p (TREE_VALUE (elt)))
4767 zero_count += this_node_count;
4770 /* Clear the entire array first if there are any missing elements,
4771 or if the incidence of zero elements is >= 75%. */
4773 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4777 if (need_to_clear && size > 0)
4780 clear_storage (target, GEN_INT (size), align);
4783 else if (REG_P (target))
4784 /* Inform later passes that the old value is dead. */
4785 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4787 /* Store each element of the constructor into
4788 the corresponding element of TARGET, determined
4789 by counting the elements. */
4790 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4792 elt = TREE_CHAIN (elt), i++)
4794 register enum machine_mode mode;
4795 HOST_WIDE_INT bitsize;
4796 HOST_WIDE_INT bitpos;
4798 tree value = TREE_VALUE (elt);
4799 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4800 tree index = TREE_PURPOSE (elt);
4801 rtx xtarget = target;
4803 if (cleared && is_zeros_p (value))
4806 unsignedp = TREE_UNSIGNED (elttype);
4807 mode = TYPE_MODE (elttype);
4808 if (mode == BLKmode)
4809 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4810 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4813 bitsize = GET_MODE_BITSIZE (mode);
4815 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4817 tree lo_index = TREE_OPERAND (index, 0);
4818 tree hi_index = TREE_OPERAND (index, 1);
4819 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4820 struct nesting *loop;
4821 HOST_WIDE_INT lo, hi, count;
4824 /* If the range is constant and "small", unroll the loop. */
4826 && host_integerp (lo_index, 0)
4827 && host_integerp (hi_index, 0)
4828 && (lo = tree_low_cst (lo_index, 0),
4829 hi = tree_low_cst (hi_index, 0),
4830 count = hi - lo + 1,
4831 (GET_CODE (target) != MEM
4833 || (host_integerp (TYPE_SIZE (elttype), 1)
4834 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4837 lo -= minelt; hi -= minelt;
4838 for (; lo <= hi; lo++)
4840 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4841 store_constructor_field
4842 (target, bitsize, bitpos, mode, value, type, align,
4844 TYPE_NONALIASED_COMPONENT (type)
4845 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4850 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4851 loop_top = gen_label_rtx ();
4852 loop_end = gen_label_rtx ();
4854 unsignedp = TREE_UNSIGNED (domain);
4856 index = build_decl (VAR_DECL, NULL_TREE, domain);
4859 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4861 SET_DECL_RTL (index, index_r);
4862 if (TREE_CODE (value) == SAVE_EXPR
4863 && SAVE_EXPR_RTL (value) == 0)
4865 /* Make sure value gets expanded once before the
4867 expand_expr (value, const0_rtx, VOIDmode, 0);
4870 store_expr (lo_index, index_r, 0);
4871 loop = expand_start_loop (0);
4873 /* Assign value to element index. */
4875 = convert (ssizetype,
4876 fold (build (MINUS_EXPR, TREE_TYPE (index),
4877 index, TYPE_MIN_VALUE (domain))));
4878 position = size_binop (MULT_EXPR, position,
4880 TYPE_SIZE_UNIT (elttype)));
4882 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4883 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4884 xtarget = change_address (target, mode, addr);
4885 if (TREE_CODE (value) == CONSTRUCTOR)
4886 store_constructor (value, xtarget, align, cleared,
4887 bitsize / BITS_PER_UNIT);
4889 store_expr (value, xtarget, 0);
4891 expand_exit_loop_if_false (loop,
4892 build (LT_EXPR, integer_type_node,
4895 expand_increment (build (PREINCREMENT_EXPR,
4897 index, integer_one_node), 0, 0);
4899 emit_label (loop_end);
4902 else if ((index != 0 && ! host_integerp (index, 0))
4903 || ! host_integerp (TYPE_SIZE (elttype), 1))
4909 index = ssize_int (1);
4912 index = convert (ssizetype,
4913 fold (build (MINUS_EXPR, index,
4914 TYPE_MIN_VALUE (domain))));
4916 position = size_binop (MULT_EXPR, index,
4918 TYPE_SIZE_UNIT (elttype)));
4919 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4920 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4921 xtarget = change_address (target, mode, addr);
4922 store_expr (value, xtarget, 0);
4927 bitpos = ((tree_low_cst (index, 0) - minelt)
4928 * tree_low_cst (TYPE_SIZE (elttype), 1));
4930 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4932 store_constructor_field (target, bitsize, bitpos, mode, value,
4933 type, align, cleared,
4934 TYPE_NONALIASED_COMPONENT (type)
4935 && GET_CODE (target) == MEM
4936 ? MEM_ALIAS_SET (target) :
4937 get_alias_set (elttype));
4943 /* Set constructor assignments. */
4944 else if (TREE_CODE (type) == SET_TYPE)
4946 tree elt = CONSTRUCTOR_ELTS (exp);
4947 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4948 tree domain = TYPE_DOMAIN (type);
4949 tree domain_min, domain_max, bitlength;
4951 /* The default implementation strategy is to extract the constant
4952 parts of the constructor, use that to initialize the target,
4953 and then "or" in whatever non-constant ranges we need in addition.
4955 If a large set is all zero or all ones, it is
4956 probably better to set it using memset (if available) or bzero.
4957 Also, if a large set has just a single range, it may also be
4958 better to first clear all the first clear the set (using
4959 bzero/memset), and set the bits we want. */
4961 /* Check for all zeros. */
4962 if (elt == NULL_TREE && size > 0)
4965 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4969 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4970 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4971 bitlength = size_binop (PLUS_EXPR,
4972 size_diffop (domain_max, domain_min),
4975 nbits = tree_low_cst (bitlength, 1);
4977 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4978 are "complicated" (more than one range), initialize (the
4979 constant parts) by copying from a constant. */
4980 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4981 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4983 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4984 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4985 char *bit_buffer = (char *) alloca (nbits);
4986 HOST_WIDE_INT word = 0;
4987 unsigned int bit_pos = 0;
4988 unsigned int ibit = 0;
4989 unsigned int offset = 0; /* In bytes from beginning of set. */
4991 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4994 if (bit_buffer[ibit])
4996 if (BYTES_BIG_ENDIAN)
4997 word |= (1 << (set_word_size - 1 - bit_pos));
4999 word |= 1 << bit_pos;
5003 if (bit_pos >= set_word_size || ibit == nbits)
5005 if (word != 0 || ! cleared)
5007 rtx datum = GEN_INT (word);
5010 /* The assumption here is that it is safe to use
5011 XEXP if the set is multi-word, but not if
5012 it's single-word. */
5013 if (GET_CODE (target) == MEM)
5014 to_rtx = adjust_address (target, mode, offset);
5015 else if (offset == 0)
5019 emit_move_insn (to_rtx, datum);
5026 offset += set_word_size / BITS_PER_UNIT;
5031 /* Don't bother clearing storage if the set is all ones. */
5032 if (TREE_CHAIN (elt) != NULL_TREE
5033 || (TREE_PURPOSE (elt) == NULL_TREE
5035 : ( ! host_integerp (TREE_VALUE (elt), 0)
5036 || ! host_integerp (TREE_PURPOSE (elt), 0)
5037 || (tree_low_cst (TREE_VALUE (elt), 0)
5038 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5039 != (HOST_WIDE_INT) nbits))))
5040 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5042 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5044 /* Start of range of element or NULL. */
5045 tree startbit = TREE_PURPOSE (elt);
5046 /* End of range of element, or element value. */
5047 tree endbit = TREE_VALUE (elt);
5048 #ifdef TARGET_MEM_FUNCTIONS
5049 HOST_WIDE_INT startb, endb;
5051 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5053 bitlength_rtx = expand_expr (bitlength,
5054 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5056 /* Handle non-range tuple element like [ expr ]. */
5057 if (startbit == NULL_TREE)
5059 startbit = save_expr (endbit);
5063 startbit = convert (sizetype, startbit);
5064 endbit = convert (sizetype, endbit);
5065 if (! integer_zerop (domain_min))
5067 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5068 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5070 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5071 EXPAND_CONST_ADDRESS);
5072 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5073 EXPAND_CONST_ADDRESS);
5079 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5082 emit_move_insn (targetx, target);
5085 else if (GET_CODE (target) == MEM)
5090 #ifdef TARGET_MEM_FUNCTIONS
5091 /* Optimization: If startbit and endbit are
5092 constants divisible by BITS_PER_UNIT,
5093 call memset instead. */
5094 if (TREE_CODE (startbit) == INTEGER_CST
5095 && TREE_CODE (endbit) == INTEGER_CST
5096 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5097 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5099 emit_library_call (memset_libfunc, LCT_NORMAL,
5101 plus_constant (XEXP (targetx, 0),
5102 startb / BITS_PER_UNIT),
5104 constm1_rtx, TYPE_MODE (integer_type_node),
5105 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5106 TYPE_MODE (sizetype));
5110 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5111 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5112 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5113 startbit_rtx, TYPE_MODE (sizetype),
5114 endbit_rtx, TYPE_MODE (sizetype));
5117 emit_move_insn (target, targetx);
5125 /* Store the value of EXP (an expression tree)
5126 into a subfield of TARGET which has mode MODE and occupies
5127 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5128 If MODE is VOIDmode, it means that we are storing into a bit-field.
5130 If VALUE_MODE is VOIDmode, return nothing in particular.
5131 UNSIGNEDP is not used in this case.
5133 Otherwise, return an rtx for the value stored. This rtx
5134 has mode VALUE_MODE if that is convenient to do.
5135 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5137 ALIGN is the alignment that TARGET is known to have.
5138 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5140 ALIAS_SET is the alias set for the destination. This value will
5141 (in general) be different from that for TARGET, since TARGET is a
5142 reference to the containing structure. */
5145 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5146 unsignedp, align, total_size, alias_set)
5148 HOST_WIDE_INT bitsize;
5149 HOST_WIDE_INT bitpos;
5150 enum machine_mode mode;
5152 enum machine_mode value_mode;
5155 HOST_WIDE_INT total_size;
5158 HOST_WIDE_INT width_mask = 0;
5160 if (TREE_CODE (exp) == ERROR_MARK)
5163 /* If we have nothing to store, do nothing unless the expression has
5166 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5168 if (bitsize < HOST_BITS_PER_WIDE_INT)
5169 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5171 /* If we are storing into an unaligned field of an aligned union that is
5172 in a register, we may have the mode of TARGET being an integer mode but
5173 MODE == BLKmode. In that case, get an aligned object whose size and
5174 alignment are the same as TARGET and store TARGET into it (we can avoid
5175 the store if the field being stored is the entire width of TARGET). Then
5176 call ourselves recursively to store the field into a BLKmode version of
5177 that object. Finally, load from the object into TARGET. This is not
5178 very efficient in general, but should only be slightly more expensive
5179 than the otherwise-required unaligned accesses. Perhaps this can be
5180 cleaned up later. */
5183 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5187 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5190 rtx blk_object = copy_rtx (object);
5192 PUT_MODE (blk_object, BLKmode);
5194 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5195 emit_move_insn (object, target);
5197 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5198 align, total_size, alias_set);
5200 /* Even though we aren't returning target, we need to
5201 give it the updated value. */
5202 emit_move_insn (target, object);
5207 if (GET_CODE (target) == CONCAT)
5209 /* We're storing into a struct containing a single __complex. */
5213 return store_expr (exp, target, 0);
5216 /* If the structure is in a register or if the component
5217 is a bit field, we cannot use addressing to access it.
5218 Use bit-field techniques or SUBREG to store in it. */
5220 if (mode == VOIDmode
5221 || (mode != BLKmode && ! direct_store[(int) mode]
5222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5223 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5224 || GET_CODE (target) == REG
5225 || GET_CODE (target) == SUBREG
5226 /* If the field isn't aligned enough to store as an ordinary memref,
5227 store it as a bit field. */
5228 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5229 && (align < GET_MODE_ALIGNMENT (mode)
5230 || bitpos % GET_MODE_ALIGNMENT (mode)))
5231 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5232 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5233 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5234 /* If the RHS and field are a constant size and the size of the
5235 RHS isn't the same size as the bitfield, we must use bitfield
5238 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5239 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5241 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5243 /* If BITSIZE is narrower than the size of the type of EXP
5244 we will be narrowing TEMP. Normally, what's wanted are the
5245 low-order bits. However, if EXP's type is a record and this is
5246 big-endian machine, we want the upper BITSIZE bits. */
5247 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5248 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5249 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5250 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5251 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5255 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5257 if (mode != VOIDmode && mode != BLKmode
5258 && mode != TYPE_MODE (TREE_TYPE (exp)))
5259 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5261 /* If the modes of TARGET and TEMP are both BLKmode, both
5262 must be in memory and BITPOS must be aligned on a byte
5263 boundary. If so, we simply do a block copy. */
5264 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5266 unsigned int exp_align = expr_align (exp);
5268 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5269 || bitpos % BITS_PER_UNIT != 0)
5272 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5274 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5275 align = MIN (exp_align, align);
5277 /* Find an alignment that is consistent with the bit position. */
5278 while ((bitpos % align) != 0)
5281 emit_block_move (target, temp,
5282 bitsize == -1 ? expr_size (exp)
5283 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5287 return value_mode == VOIDmode ? const0_rtx : target;
5290 /* Store the value in the bitfield. */
5291 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5292 if (value_mode != VOIDmode)
5294 /* The caller wants an rtx for the value. */
5295 /* If possible, avoid refetching from the bitfield itself. */
5297 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5300 enum machine_mode tmode;
5303 return expand_and (temp,
5307 GET_MODE (temp) == VOIDmode
5309 : GET_MODE (temp))), NULL_RTX);
5310 tmode = GET_MODE (temp);
5311 if (tmode == VOIDmode)
5313 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5314 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5315 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5317 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5318 NULL_RTX, value_mode, 0, align,
5325 rtx addr = XEXP (target, 0);
5328 /* If a value is wanted, it must be the lhs;
5329 so make the address stable for multiple use. */
5331 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5332 && ! CONSTANT_ADDRESS_P (addr)
5333 /* A frame-pointer reference is already stable. */
5334 && ! (GET_CODE (addr) == PLUS
5335 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5336 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5337 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5338 target = replace_equiv_address (target, copy_to_reg (addr));
5340 /* Now build a reference to just the desired component. */
5342 to_rtx = copy_rtx (adjust_address (target, mode,
5343 bitpos / BITS_PER_UNIT));
5345 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5346 /* If the address of the structure varies, then it might be on
5347 the stack. And, stack slots may be shared across scopes.
5348 So, two different structures, of different types, can end up
5349 at the same location. We will give the structures alias set
5350 zero; here we must be careful not to give non-zero alias sets
5352 set_mem_alias_set (to_rtx,
5353 rtx_varies_p (addr, /*for_alias=*/0)
5356 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5360 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5361 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5362 codes and find the ultimate containing object, which we return.
5364 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5365 bit position, and *PUNSIGNEDP to the signedness of the field.
5366 If the position of the field is variable, we store a tree
5367 giving the variable offset (in units) in *POFFSET.
5368 This offset is in addition to the bit position.
5369 If the position is not variable, we store 0 in *POFFSET.
5370 We set *PALIGNMENT to the alignment of the address that will be
5371 computed. This is the alignment of the thing we return if *POFFSET
5372 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5374 If any of the extraction expressions is volatile,
5375 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5377 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5378 is a mode that can be used to access the field. In that case, *PBITSIZE
5381 If the field describes a variable-sized object, *PMODE is set to
5382 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5383 this case, but the address of the object can be found. */
5386 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5387 punsignedp, pvolatilep, palignment)
5389 HOST_WIDE_INT *pbitsize;
5390 HOST_WIDE_INT *pbitpos;
5392 enum machine_mode *pmode;
5395 unsigned int *palignment;
5398 enum machine_mode mode = VOIDmode;
5399 tree offset = size_zero_node;
5400 tree bit_offset = bitsize_zero_node;
5401 unsigned int alignment = BIGGEST_ALIGNMENT;
5404 /* First get the mode, signedness, and size. We do this from just the
5405 outermost expression. */
5406 if (TREE_CODE (exp) == COMPONENT_REF)
5408 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5409 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5410 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5412 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5414 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5416 size_tree = TREE_OPERAND (exp, 1);
5417 *punsignedp = TREE_UNSIGNED (exp);
5421 mode = TYPE_MODE (TREE_TYPE (exp));
5422 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5424 if (mode == BLKmode)
5425 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5427 *pbitsize = GET_MODE_BITSIZE (mode);
5432 if (! host_integerp (size_tree, 1))
5433 mode = BLKmode, *pbitsize = -1;
5435 *pbitsize = tree_low_cst (size_tree, 1);
5438 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5439 and find the ultimate containing object. */
5442 if (TREE_CODE (exp) == BIT_FIELD_REF)
5443 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5444 else if (TREE_CODE (exp) == COMPONENT_REF)
5446 tree field = TREE_OPERAND (exp, 1);
5447 tree this_offset = DECL_FIELD_OFFSET (field);
5449 /* If this field hasn't been filled in yet, don't go
5450 past it. This should only happen when folding expressions
5451 made during type construction. */
5452 if (this_offset == 0)
5454 else if (! TREE_CONSTANT (this_offset)
5455 && contains_placeholder_p (this_offset))
5456 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5458 offset = size_binop (PLUS_EXPR, offset, this_offset);
5459 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5460 DECL_FIELD_BIT_OFFSET (field));
5462 if (! host_integerp (offset, 0))
5463 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5466 else if (TREE_CODE (exp) == ARRAY_REF
5467 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5469 tree index = TREE_OPERAND (exp, 1);
5470 tree array = TREE_OPERAND (exp, 0);
5471 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5472 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5473 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5475 /* We assume all arrays have sizes that are a multiple of a byte.
5476 First subtract the lower bound, if any, in the type of the
5477 index, then convert to sizetype and multiply by the size of the
5479 if (low_bound != 0 && ! integer_zerop (low_bound))
5480 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5483 /* If the index has a self-referential type, pass it to a
5484 WITH_RECORD_EXPR; if the component size is, pass our
5485 component to one. */
5486 if (! TREE_CONSTANT (index)
5487 && contains_placeholder_p (index))
5488 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5489 if (! TREE_CONSTANT (unit_size)
5490 && contains_placeholder_p (unit_size))
5491 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5493 offset = size_binop (PLUS_EXPR, offset,
5494 size_binop (MULT_EXPR,
5495 convert (sizetype, index),
5499 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5500 && ! ((TREE_CODE (exp) == NOP_EXPR
5501 || TREE_CODE (exp) == CONVERT_EXPR)
5502 && (TYPE_MODE (TREE_TYPE (exp))
5503 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5506 /* If any reference in the chain is volatile, the effect is volatile. */
5507 if (TREE_THIS_VOLATILE (exp))
5510 /* If the offset is non-constant already, then we can't assume any
5511 alignment more than the alignment here. */
5512 if (! TREE_CONSTANT (offset))
5513 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5515 exp = TREE_OPERAND (exp, 0);
5519 alignment = MIN (alignment, DECL_ALIGN (exp));
5520 else if (TREE_TYPE (exp) != 0)
5521 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5523 /* If OFFSET is constant, see if we can return the whole thing as a
5524 constant bit position. Otherwise, split it up. */
5525 if (host_integerp (offset, 0)
5526 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5528 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5529 && host_integerp (tem, 0))
5530 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5532 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5535 *palignment = alignment;
5539 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5541 static enum memory_use_mode
5542 get_memory_usage_from_modifier (modifier)
5543 enum expand_modifier modifier;
5549 return MEMORY_USE_RO;
5551 case EXPAND_MEMORY_USE_WO:
5552 return MEMORY_USE_WO;
5554 case EXPAND_MEMORY_USE_RW:
5555 return MEMORY_USE_RW;
5557 case EXPAND_MEMORY_USE_DONT:
5558 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5559 MEMORY_USE_DONT, because they are modifiers to a call of
5560 expand_expr in the ADDR_EXPR case of expand_expr. */
5561 case EXPAND_CONST_ADDRESS:
5562 case EXPAND_INITIALIZER:
5563 return MEMORY_USE_DONT;
5564 case EXPAND_MEMORY_USE_BAD:
5570 /* Given an rtx VALUE that may contain additions and multiplications, return
5571 an equivalent value that just refers to a register, memory, or constant.
5572 This is done by generating instructions to perform the arithmetic and
5573 returning a pseudo-register containing the value.
5575 The returned value may be a REG, SUBREG, MEM or constant. */
5578 force_operand (value, target)
5581 register optab binoptab = 0;
5582 /* Use a temporary to force order of execution of calls to
5586 /* Use subtarget as the target for operand 0 of a binary operation. */
5587 register rtx subtarget = get_subtarget (target);
5589 /* Check for a PIC address load. */
5591 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5592 && XEXP (value, 0) == pic_offset_table_rtx
5593 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5594 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5595 || GET_CODE (XEXP (value, 1)) == CONST))
5598 subtarget = gen_reg_rtx (GET_MODE (value));
5599 emit_move_insn (subtarget, value);
5603 if (GET_CODE (value) == PLUS)
5604 binoptab = add_optab;
5605 else if (GET_CODE (value) == MINUS)
5606 binoptab = sub_optab;
5607 else if (GET_CODE (value) == MULT)
5609 op2 = XEXP (value, 1);
5610 if (!CONSTANT_P (op2)
5611 && !(GET_CODE (op2) == REG && op2 != subtarget))
5613 tmp = force_operand (XEXP (value, 0), subtarget);
5614 return expand_mult (GET_MODE (value), tmp,
5615 force_operand (op2, NULL_RTX),
5621 op2 = XEXP (value, 1);
5622 if (!CONSTANT_P (op2)
5623 && !(GET_CODE (op2) == REG && op2 != subtarget))
5625 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5627 binoptab = add_optab;
5628 op2 = negate_rtx (GET_MODE (value), op2);
5631 /* Check for an addition with OP2 a constant integer and our first
5632 operand a PLUS of a virtual register and something else. In that
5633 case, we want to emit the sum of the virtual register and the
5634 constant first and then add the other value. This allows virtual
5635 register instantiation to simply modify the constant rather than
5636 creating another one around this addition. */
5637 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5638 && GET_CODE (XEXP (value, 0)) == PLUS
5639 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5640 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5641 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5643 rtx temp = expand_binop (GET_MODE (value), binoptab,
5644 XEXP (XEXP (value, 0), 0), op2,
5645 subtarget, 0, OPTAB_LIB_WIDEN);
5646 return expand_binop (GET_MODE (value), binoptab, temp,
5647 force_operand (XEXP (XEXP (value, 0), 1), 0),
5648 target, 0, OPTAB_LIB_WIDEN);
5651 tmp = force_operand (XEXP (value, 0), subtarget);
5652 return expand_binop (GET_MODE (value), binoptab, tmp,
5653 force_operand (op2, NULL_RTX),
5654 target, 0, OPTAB_LIB_WIDEN);
5655 /* We give UNSIGNEDP = 0 to expand_binop
5656 because the only operations we are expanding here are signed ones. */
5661 /* Subroutine of expand_expr: return nonzero iff there is no way that
5662 EXP can reference X, which is being modified. TOP_P is nonzero if this
5663 call is going to be used to determine whether we need a temporary
5664 for EXP, as opposed to a recursive call to this function.
5666 It is always safe for this routine to return zero since it merely
5667 searches for optimization opportunities. */
5670 safe_from_p (x, exp, top_p)
5677 static tree save_expr_list;
5680 /* If EXP has varying size, we MUST use a target since we currently
5681 have no way of allocating temporaries of variable size
5682 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5683 So we assume here that something at a higher level has prevented a
5684 clash. This is somewhat bogus, but the best we can do. Only
5685 do this when X is BLKmode and when we are at the top level. */
5686 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5687 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5688 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5689 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5690 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5692 && GET_MODE (x) == BLKmode)
5693 /* If X is in the outgoing argument area, it is always safe. */
5694 || (GET_CODE (x) == MEM
5695 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5696 || (GET_CODE (XEXP (x, 0)) == PLUS
5697 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5700 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5701 find the underlying pseudo. */
5702 if (GET_CODE (x) == SUBREG)
5705 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5709 /* A SAVE_EXPR might appear many times in the expression passed to the
5710 top-level safe_from_p call, and if it has a complex subexpression,
5711 examining it multiple times could result in a combinatorial explosion.
5712 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5713 with optimization took about 28 minutes to compile -- even though it was
5714 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5715 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5716 we have processed. Note that the only test of top_p was above. */
5725 rtn = safe_from_p (x, exp, 0);
5727 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5728 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5733 /* Now look at our tree code and possibly recurse. */
5734 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5737 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5744 if (TREE_CODE (exp) == TREE_LIST)
5745 return ((TREE_VALUE (exp) == 0
5746 || safe_from_p (x, TREE_VALUE (exp), 0))
5747 && (TREE_CHAIN (exp) == 0
5748 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5749 else if (TREE_CODE (exp) == ERROR_MARK)
5750 return 1; /* An already-visited SAVE_EXPR? */
5755 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5759 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5760 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5764 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5765 the expression. If it is set, we conflict iff we are that rtx or
5766 both are in memory. Otherwise, we check all operands of the
5767 expression recursively. */
5769 switch (TREE_CODE (exp))
5772 return (staticp (TREE_OPERAND (exp, 0))
5773 || TREE_STATIC (exp)
5774 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5777 if (GET_CODE (x) == MEM
5778 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5779 get_alias_set (exp)))
5784 /* Assume that the call will clobber all hard registers and
5786 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5787 || GET_CODE (x) == MEM)
5792 /* If a sequence exists, we would have to scan every instruction
5793 in the sequence to see if it was safe. This is probably not
5795 if (RTL_EXPR_SEQUENCE (exp))
5798 exp_rtl = RTL_EXPR_RTL (exp);
5801 case WITH_CLEANUP_EXPR:
5802 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5805 case CLEANUP_POINT_EXPR:
5806 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5809 exp_rtl = SAVE_EXPR_RTL (exp);
5813 /* If we've already scanned this, don't do it again. Otherwise,
5814 show we've scanned it and record for clearing the flag if we're
5816 if (TREE_PRIVATE (exp))
5819 TREE_PRIVATE (exp) = 1;
5820 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5822 TREE_PRIVATE (exp) = 0;
5826 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5830 /* The only operand we look at is operand 1. The rest aren't
5831 part of the expression. */
5832 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5834 case METHOD_CALL_EXPR:
5835 /* This takes a rtx argument, but shouldn't appear here. */
5842 /* If we have an rtx, we do not need to scan our operands. */
5846 nops = first_rtl_op (TREE_CODE (exp));
5847 for (i = 0; i < nops; i++)
5848 if (TREE_OPERAND (exp, i) != 0
5849 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5852 /* If this is a language-specific tree code, it may require
5853 special handling. */
5854 if ((unsigned int) TREE_CODE (exp)
5855 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5857 && !(*lang_safe_from_p) (x, exp))
5861 /* If we have an rtl, find any enclosed object. Then see if we conflict
5865 if (GET_CODE (exp_rtl) == SUBREG)
5867 exp_rtl = SUBREG_REG (exp_rtl);
5868 if (GET_CODE (exp_rtl) == REG
5869 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5873 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5874 are memory and they conflict. */
5875 return ! (rtx_equal_p (x, exp_rtl)
5876 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5877 && true_dependence (exp_rtl, GET_MODE (x), x,
5878 rtx_addr_varies_p)));
5881 /* If we reach here, it is safe. */
5885 /* Subroutine of expand_expr: return rtx if EXP is a
5886 variable or parameter; else return 0. */
5893 switch (TREE_CODE (exp))
5897 return DECL_RTL (exp);
5903 #ifdef MAX_INTEGER_COMPUTATION_MODE
5906 check_max_integer_computation_mode (exp)
5909 enum tree_code code;
5910 enum machine_mode mode;
5912 /* Strip any NOPs that don't change the mode. */
5914 code = TREE_CODE (exp);
5916 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5917 if (code == NOP_EXPR
5918 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5921 /* First check the type of the overall operation. We need only look at
5922 unary, binary and relational operations. */
5923 if (TREE_CODE_CLASS (code) == '1'
5924 || TREE_CODE_CLASS (code) == '2'
5925 || TREE_CODE_CLASS (code) == '<')
5927 mode = TYPE_MODE (TREE_TYPE (exp));
5928 if (GET_MODE_CLASS (mode) == MODE_INT
5929 && mode > MAX_INTEGER_COMPUTATION_MODE)
5930 internal_error ("unsupported wide integer operation");
5933 /* Check operand of a unary op. */
5934 if (TREE_CODE_CLASS (code) == '1')
5936 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5937 if (GET_MODE_CLASS (mode) == MODE_INT
5938 && mode > MAX_INTEGER_COMPUTATION_MODE)
5939 internal_error ("unsupported wide integer operation");
5942 /* Check operands of a binary/comparison op. */
5943 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5945 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5946 if (GET_MODE_CLASS (mode) == MODE_INT
5947 && mode > MAX_INTEGER_COMPUTATION_MODE)
5948 internal_error ("unsupported wide integer operation");
5950 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5951 if (GET_MODE_CLASS (mode) == MODE_INT
5952 && mode > MAX_INTEGER_COMPUTATION_MODE)
5953 internal_error ("unsupported wide integer operation");
5958 /* Return an object on the placeholder list that matches EXP, a
5959 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5960 PLACEHOLDER_EXPR or a pointer type to it. For further information,
5961 see tree.def. If no such object is found, abort. If PLIST is nonzero,
5962 it is a location into which a pointer into the placeholder list at
5963 which the object is found is placed. */
5966 find_placeholder (exp, plist)
5970 tree type = TREE_TYPE (exp);
5971 tree placeholder_expr;
5973 for (placeholder_expr = placeholder_list; placeholder_expr != 0;
5974 placeholder_expr = TREE_CHAIN (placeholder_expr))
5976 tree need_type = TYPE_MAIN_VARIANT (type);
5979 /* Find the outermost reference that is of the type we want. If none,
5980 see if any object has a type that is a pointer to the type we
5982 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5983 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5984 || TREE_CODE (elt) == COND_EXPR)
5985 ? TREE_OPERAND (elt, 1)
5986 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5987 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5988 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5989 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5990 ? TREE_OPERAND (elt, 0) : 0))
5991 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5994 *plist = placeholder_expr;
5998 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6000 = ((TREE_CODE (elt) == COMPOUND_EXPR
6001 || TREE_CODE (elt) == COND_EXPR)
6002 ? TREE_OPERAND (elt, 1)
6003 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6004 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6005 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6006 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6007 ? TREE_OPERAND (elt, 0) : 0))
6008 if (POINTER_TYPE_P (TREE_TYPE (elt))
6009 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6013 *plist = placeholder_expr;
6014 return build1 (INDIRECT_REF, need_type, elt);
6021 /* expand_expr: generate code for computing expression EXP.
6022 An rtx for the computed value is returned. The value is never null.
6023 In the case of a void EXP, const0_rtx is returned.
6025 The value may be stored in TARGET if TARGET is nonzero.
6026 TARGET is just a suggestion; callers must assume that
6027 the rtx returned may not be the same as TARGET.
6029 If TARGET is CONST0_RTX, it means that the value will be ignored.
6031 If TMODE is not VOIDmode, it suggests generating the
6032 result in mode TMODE. But this is done only when convenient.
6033 Otherwise, TMODE is ignored and the value generated in its natural mode.
6034 TMODE is just a suggestion; callers must assume that
6035 the rtx returned may not have mode TMODE.
6037 Note that TARGET may have neither TMODE nor MODE. In that case, it
6038 probably will not be used.
6040 If MODIFIER is EXPAND_SUM then when EXP is an addition
6041 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6042 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6043 products as above, or REG or MEM, or constant.
6044 Ordinarily in such cases we would output mul or add instructions
6045 and then return a pseudo reg containing the sum.
6047 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6048 it also marks a label as absolutely required (it can't be dead).
6049 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6050 This is used for outputting expressions used in initializers.
6052 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6053 with a constant address even if that address is not normally legitimate.
6054 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6057 expand_expr (exp, target, tmode, modifier)
6060 enum machine_mode tmode;
6061 enum expand_modifier modifier;
6063 register rtx op0, op1, temp;
6064 tree type = TREE_TYPE (exp);
6065 int unsignedp = TREE_UNSIGNED (type);
6066 register enum machine_mode mode;
6067 register enum tree_code code = TREE_CODE (exp);
6069 rtx subtarget, original_target;
6072 /* Used by check-memory-usage to make modifier read only. */
6073 enum expand_modifier ro_modifier;
6075 /* Handle ERROR_MARK before anybody tries to access its type. */
6076 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6078 op0 = CONST0_RTX (tmode);
6084 mode = TYPE_MODE (type);
6085 /* Use subtarget as the target for operand 0 of a binary operation. */
6086 subtarget = get_subtarget (target);
6087 original_target = target;
6088 ignore = (target == const0_rtx
6089 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6090 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6091 || code == COND_EXPR)
6092 && TREE_CODE (type) == VOID_TYPE));
6094 /* Make a read-only version of the modifier. */
6095 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6096 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6097 ro_modifier = modifier;
6099 ro_modifier = EXPAND_NORMAL;
6101 /* If we are going to ignore this result, we need only do something
6102 if there is a side-effect somewhere in the expression. If there
6103 is, short-circuit the most common cases here. Note that we must
6104 not call expand_expr with anything but const0_rtx in case this
6105 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6109 if (! TREE_SIDE_EFFECTS (exp))
6112 /* Ensure we reference a volatile object even if value is ignored, but
6113 don't do this if all we are doing is taking its address. */
6114 if (TREE_THIS_VOLATILE (exp)
6115 && TREE_CODE (exp) != FUNCTION_DECL
6116 && mode != VOIDmode && mode != BLKmode
6117 && modifier != EXPAND_CONST_ADDRESS)
6119 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6120 if (GET_CODE (temp) == MEM)
6121 temp = copy_to_reg (temp);
6125 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6126 || code == INDIRECT_REF || code == BUFFER_REF)
6127 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6128 VOIDmode, ro_modifier);
6129 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6130 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6132 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6134 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6138 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6139 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6140 /* If the second operand has no side effects, just evaluate
6142 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6143 VOIDmode, ro_modifier);
6144 else if (code == BIT_FIELD_REF)
6146 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6148 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6150 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6158 #ifdef MAX_INTEGER_COMPUTATION_MODE
6159 /* Only check stuff here if the mode we want is different from the mode
6160 of the expression; if it's the same, check_max_integer_computiation_mode
6161 will handle it. Do we really need to check this stuff at all? */
6164 && GET_MODE (target) != mode
6165 && TREE_CODE (exp) != INTEGER_CST
6166 && TREE_CODE (exp) != PARM_DECL
6167 && TREE_CODE (exp) != ARRAY_REF
6168 && TREE_CODE (exp) != ARRAY_RANGE_REF
6169 && TREE_CODE (exp) != COMPONENT_REF
6170 && TREE_CODE (exp) != BIT_FIELD_REF
6171 && TREE_CODE (exp) != INDIRECT_REF
6172 && TREE_CODE (exp) != CALL_EXPR
6173 && TREE_CODE (exp) != VAR_DECL
6174 && TREE_CODE (exp) != RTL_EXPR)
6176 enum machine_mode mode = GET_MODE (target);
6178 if (GET_MODE_CLASS (mode) == MODE_INT
6179 && mode > MAX_INTEGER_COMPUTATION_MODE)
6180 internal_error ("unsupported wide integer operation");
6184 && TREE_CODE (exp) != INTEGER_CST
6185 && TREE_CODE (exp) != PARM_DECL
6186 && TREE_CODE (exp) != ARRAY_REF
6187 && TREE_CODE (exp) != ARRAY_RANGE_REF
6188 && TREE_CODE (exp) != COMPONENT_REF
6189 && TREE_CODE (exp) != BIT_FIELD_REF
6190 && TREE_CODE (exp) != INDIRECT_REF
6191 && TREE_CODE (exp) != VAR_DECL
6192 && TREE_CODE (exp) != CALL_EXPR
6193 && TREE_CODE (exp) != RTL_EXPR
6194 && GET_MODE_CLASS (tmode) == MODE_INT
6195 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6196 internal_error ("unsupported wide integer operation");
6198 check_max_integer_computation_mode (exp);
6201 /* If will do cse, generate all results into pseudo registers
6202 since 1) that allows cse to find more things
6203 and 2) otherwise cse could produce an insn the machine
6206 if (! cse_not_expected && mode != BLKmode && target
6207 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6214 tree function = decl_function_context (exp);
6215 /* Handle using a label in a containing function. */
6216 if (function != current_function_decl
6217 && function != inline_function_decl && function != 0)
6219 struct function *p = find_function_data (function);
6220 p->expr->x_forced_labels
6221 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6222 p->expr->x_forced_labels);
6226 if (modifier == EXPAND_INITIALIZER)
6227 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6232 temp = gen_rtx_MEM (FUNCTION_MODE,
6233 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6234 if (function != current_function_decl
6235 && function != inline_function_decl && function != 0)
6236 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6241 if (DECL_RTL (exp) == 0)
6243 error_with_decl (exp, "prior parameter's size depends on `%s'");
6244 return CONST0_RTX (mode);
6247 /* ... fall through ... */
6250 /* If a static var's type was incomplete when the decl was written,
6251 but the type is complete now, lay out the decl now. */
6252 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6253 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6255 layout_decl (exp, 0);
6256 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6259 /* Although static-storage variables start off initialized, according to
6260 ANSI C, a memcpy could overwrite them with uninitialized values. So
6261 we check them too. This also lets us check for read-only variables
6262 accessed via a non-const declaration, in case it won't be detected
6263 any other way (e.g., in an embedded system or OS kernel without
6266 Aggregates are not checked here; they're handled elsewhere. */
6267 if (cfun && current_function_check_memory_usage
6269 && GET_CODE (DECL_RTL (exp)) == MEM
6270 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6272 enum memory_use_mode memory_usage;
6273 memory_usage = get_memory_usage_from_modifier (modifier);
6275 in_check_memory_usage = 1;
6276 if (memory_usage != MEMORY_USE_DONT)
6277 emit_library_call (chkr_check_addr_libfunc,
6278 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6279 XEXP (DECL_RTL (exp), 0), Pmode,
6280 GEN_INT (int_size_in_bytes (type)),
6281 TYPE_MODE (sizetype),
6282 GEN_INT (memory_usage),
6283 TYPE_MODE (integer_type_node));
6284 in_check_memory_usage = 0;
6287 /* ... fall through ... */
6291 if (DECL_RTL (exp) == 0)
6294 /* Ensure variable marked as used even if it doesn't go through
6295 a parser. If it hasn't be used yet, write out an external
6297 if (! TREE_USED (exp))
6299 assemble_external (exp);
6300 TREE_USED (exp) = 1;
6303 /* Show we haven't gotten RTL for this yet. */
6306 /* Handle variables inherited from containing functions. */
6307 context = decl_function_context (exp);
6309 /* We treat inline_function_decl as an alias for the current function
6310 because that is the inline function whose vars, types, etc.
6311 are being merged into the current function.
6312 See expand_inline_function. */
6314 if (context != 0 && context != current_function_decl
6315 && context != inline_function_decl
6316 /* If var is static, we don't need a static chain to access it. */
6317 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6318 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6322 /* Mark as non-local and addressable. */
6323 DECL_NONLOCAL (exp) = 1;
6324 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6326 mark_addressable (exp);
6327 if (GET_CODE (DECL_RTL (exp)) != MEM)
6329 addr = XEXP (DECL_RTL (exp), 0);
6330 if (GET_CODE (addr) == MEM)
6332 = replace_equiv_address (addr,
6333 fix_lexical_addr (XEXP (addr, 0), exp));
6335 addr = fix_lexical_addr (addr, exp);
6337 temp = replace_equiv_address (DECL_RTL (exp), addr);
6340 /* This is the case of an array whose size is to be determined
6341 from its initializer, while the initializer is still being parsed.
6344 else if (GET_CODE (DECL_RTL (exp)) == MEM
6345 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6346 temp = validize_mem (DECL_RTL (exp));
6348 /* If DECL_RTL is memory, we are in the normal case and either
6349 the address is not valid or it is not a register and -fforce-addr
6350 is specified, get the address into a register. */
6352 else if (GET_CODE (DECL_RTL (exp)) == MEM
6353 && modifier != EXPAND_CONST_ADDRESS
6354 && modifier != EXPAND_SUM
6355 && modifier != EXPAND_INITIALIZER
6356 && (! memory_address_p (DECL_MODE (exp),
6357 XEXP (DECL_RTL (exp), 0))
6359 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6360 temp = replace_equiv_address (DECL_RTL (exp),
6361 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6363 /* If we got something, return it. But first, set the alignment
6364 if the address is a register. */
6367 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6368 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6373 /* If the mode of DECL_RTL does not match that of the decl, it
6374 must be a promoted value. We return a SUBREG of the wanted mode,
6375 but mark it so that we know that it was already extended. */
6377 if (GET_CODE (DECL_RTL (exp)) == REG
6378 && GET_MODE (DECL_RTL (exp)) != mode)
6380 /* Get the signedness used for this variable. Ensure we get the
6381 same mode we got when the variable was declared. */
6382 if (GET_MODE (DECL_RTL (exp))
6383 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6386 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6387 SUBREG_PROMOTED_VAR_P (temp) = 1;
6388 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6392 return DECL_RTL (exp);
6395 return immed_double_const (TREE_INT_CST_LOW (exp),
6396 TREE_INT_CST_HIGH (exp), mode);
6399 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6400 EXPAND_MEMORY_USE_BAD);
6403 /* If optimized, generate immediate CONST_DOUBLE
6404 which will be turned into memory by reload if necessary.
6406 We used to force a register so that loop.c could see it. But
6407 this does not allow gen_* patterns to perform optimizations with
6408 the constants. It also produces two insns in cases like "x = 1.0;".
6409 On most machines, floating-point constants are not permitted in
6410 many insns, so we'd end up copying it to a register in any case.
6412 Now, we do the copying in expand_binop, if appropriate. */
6413 return immed_real_const (exp);
6417 if (! TREE_CST_RTL (exp))
6418 output_constant_def (exp, 1);
6420 /* TREE_CST_RTL probably contains a constant address.
6421 On RISC machines where a constant address isn't valid,
6422 make some insns to get that address into a register. */
6423 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6424 && modifier != EXPAND_CONST_ADDRESS
6425 && modifier != EXPAND_INITIALIZER
6426 && modifier != EXPAND_SUM
6427 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6429 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6430 return replace_equiv_address (TREE_CST_RTL (exp),
6431 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6432 return TREE_CST_RTL (exp);
6434 case EXPR_WITH_FILE_LOCATION:
6437 const char *saved_input_filename = input_filename;
6438 int saved_lineno = lineno;
6439 input_filename = EXPR_WFL_FILENAME (exp);
6440 lineno = EXPR_WFL_LINENO (exp);
6441 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6442 emit_line_note (input_filename, lineno);
6443 /* Possibly avoid switching back and forth here. */
6444 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6445 input_filename = saved_input_filename;
6446 lineno = saved_lineno;
6451 context = decl_function_context (exp);
6453 /* If this SAVE_EXPR was at global context, assume we are an
6454 initialization function and move it into our context. */
6456 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6458 /* We treat inline_function_decl as an alias for the current function
6459 because that is the inline function whose vars, types, etc.
6460 are being merged into the current function.
6461 See expand_inline_function. */
6462 if (context == current_function_decl || context == inline_function_decl)
6465 /* If this is non-local, handle it. */
6468 /* The following call just exists to abort if the context is
6469 not of a containing function. */
6470 find_function_data (context);
6472 temp = SAVE_EXPR_RTL (exp);
6473 if (temp && GET_CODE (temp) == REG)
6475 put_var_into_stack (exp);
6476 temp = SAVE_EXPR_RTL (exp);
6478 if (temp == 0 || GET_CODE (temp) != MEM)
6481 replace_equiv_address (temp,
6482 fix_lexical_addr (XEXP (temp, 0), exp));
6484 if (SAVE_EXPR_RTL (exp) == 0)
6486 if (mode == VOIDmode)
6489 temp = assign_temp (build_qualified_type (type,
6491 | TYPE_QUAL_CONST)),
6494 SAVE_EXPR_RTL (exp) = temp;
6495 if (!optimize && GET_CODE (temp) == REG)
6496 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6499 /* If the mode of TEMP does not match that of the expression, it
6500 must be a promoted value. We pass store_expr a SUBREG of the
6501 wanted mode but mark it so that we know that it was already
6502 extended. Note that `unsignedp' was modified above in
6505 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6507 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6508 SUBREG_PROMOTED_VAR_P (temp) = 1;
6509 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6512 if (temp == const0_rtx)
6513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6514 EXPAND_MEMORY_USE_BAD);
6516 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6518 TREE_USED (exp) = 1;
6521 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6522 must be a promoted value. We return a SUBREG of the wanted mode,
6523 but mark it so that we know that it was already extended. */
6525 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6526 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6528 /* Compute the signedness and make the proper SUBREG. */
6529 promote_mode (type, mode, &unsignedp, 0);
6530 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6531 SUBREG_PROMOTED_VAR_P (temp) = 1;
6532 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6536 return SAVE_EXPR_RTL (exp);
6541 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6542 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6546 case PLACEHOLDER_EXPR:
6548 tree old_list = placeholder_list;
6549 tree placeholder_expr;
6551 exp = find_placeholder (exp, &placeholder_expr);
6552 placeholder_list = TREE_CHAIN (placeholder_expr);
6553 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6554 placeholder_list = old_list;
6558 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6561 case WITH_RECORD_EXPR:
6562 /* Put the object on the placeholder list, expand our first operand,
6563 and pop the list. */
6564 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6566 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6567 tmode, ro_modifier);
6568 placeholder_list = TREE_CHAIN (placeholder_list);
6572 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6573 expand_goto (TREE_OPERAND (exp, 0));
6575 expand_computed_goto (TREE_OPERAND (exp, 0));
6579 expand_exit_loop_if_false (NULL,
6580 invert_truthvalue (TREE_OPERAND (exp, 0)));
6583 case LABELED_BLOCK_EXPR:
6584 if (LABELED_BLOCK_BODY (exp))
6585 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6586 /* Should perhaps use expand_label, but this is simpler and safer. */
6587 do_pending_stack_adjust ();
6588 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6591 case EXIT_BLOCK_EXPR:
6592 if (EXIT_BLOCK_RETURN (exp))
6593 sorry ("returned value in block_exit_expr");
6594 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6599 expand_start_loop (1);
6600 expand_expr_stmt (TREE_OPERAND (exp, 0));
6608 tree vars = TREE_OPERAND (exp, 0);
6609 int vars_need_expansion = 0;
6611 /* Need to open a binding contour here because
6612 if there are any cleanups they must be contained here. */
6613 expand_start_bindings (2);
6615 /* Mark the corresponding BLOCK for output in its proper place. */
6616 if (TREE_OPERAND (exp, 2) != 0
6617 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6618 insert_block (TREE_OPERAND (exp, 2));
6620 /* If VARS have not yet been expanded, expand them now. */
6623 if (!DECL_RTL_SET_P (vars))
6625 vars_need_expansion = 1;
6628 expand_decl_init (vars);
6629 vars = TREE_CHAIN (vars);
6632 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6634 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6640 if (RTL_EXPR_SEQUENCE (exp))
6642 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6644 emit_insns (RTL_EXPR_SEQUENCE (exp));
6645 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6647 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6648 free_temps_for_rtl_expr (exp);
6649 return RTL_EXPR_RTL (exp);
6652 /* If we don't need the result, just ensure we evaluate any
6657 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6658 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6659 EXPAND_MEMORY_USE_BAD);
6663 /* All elts simple constants => refer to a constant in memory. But
6664 if this is a non-BLKmode mode, let it store a field at a time
6665 since that should make a CONST_INT or CONST_DOUBLE when we
6666 fold. Likewise, if we have a target we can use, it is best to
6667 store directly into the target unless the type is large enough
6668 that memcpy will be used. If we are making an initializer and
6669 all operands are constant, put it in memory as well. */
6670 else if ((TREE_STATIC (exp)
6671 && ((mode == BLKmode
6672 && ! (target != 0 && safe_from_p (target, exp, 1)))
6673 || TREE_ADDRESSABLE (exp)
6674 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6675 && (! MOVE_BY_PIECES_P
6676 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6678 && ! mostly_zeros_p (exp))))
6679 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6681 rtx constructor = output_constant_def (exp, 1);
6683 if (modifier != EXPAND_CONST_ADDRESS
6684 && modifier != EXPAND_INITIALIZER
6685 && modifier != EXPAND_SUM)
6686 constructor = validize_mem (constructor);
6692 /* Handle calls that pass values in multiple non-contiguous
6693 locations. The Irix 6 ABI has examples of this. */
6694 if (target == 0 || ! safe_from_p (target, exp, 1)
6695 || GET_CODE (target) == PARALLEL)
6697 = assign_temp (build_qualified_type (type,
6699 | (TREE_READONLY (exp)
6700 * TYPE_QUAL_CONST))),
6701 TREE_ADDRESSABLE (exp), 1, 1);
6703 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6704 int_size_in_bytes (TREE_TYPE (exp)));
6710 tree exp1 = TREE_OPERAND (exp, 0);
6712 tree string = string_constant (exp1, &index);
6714 /* Try to optimize reads from const strings. */
6716 && TREE_CODE (string) == STRING_CST
6717 && TREE_CODE (index) == INTEGER_CST
6718 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6719 && GET_MODE_CLASS (mode) == MODE_INT
6720 && GET_MODE_SIZE (mode) == 1
6721 && modifier != EXPAND_MEMORY_USE_WO)
6723 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6725 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6726 op0 = memory_address (mode, op0);
6728 if (cfun && current_function_check_memory_usage
6729 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6731 enum memory_use_mode memory_usage;
6732 memory_usage = get_memory_usage_from_modifier (modifier);
6734 if (memory_usage != MEMORY_USE_DONT)
6736 in_check_memory_usage = 1;
6737 emit_library_call (chkr_check_addr_libfunc,
6738 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6739 Pmode, GEN_INT (int_size_in_bytes (type)),
6740 TYPE_MODE (sizetype),
6741 GEN_INT (memory_usage),
6742 TYPE_MODE (integer_type_node));
6743 in_check_memory_usage = 0;
6747 temp = gen_rtx_MEM (mode, op0);
6748 set_mem_attributes (temp, exp, 0);
6750 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6751 here, because, in C and C++, the fact that a location is accessed
6752 through a pointer to const does not mean that the value there can
6753 never change. Languages where it can never change should
6754 also set TREE_STATIC. */
6755 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6757 /* If we are writing to this object and its type is a record with
6758 readonly fields, we must mark it as readonly so it will
6759 conflict with readonly references to those fields. */
6760 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6761 RTX_UNCHANGING_P (temp) = 1;
6767 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6771 tree array = TREE_OPERAND (exp, 0);
6772 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6773 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6774 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6777 /* Optimize the special-case of a zero lower bound.
6779 We convert the low_bound to sizetype to avoid some problems
6780 with constant folding. (E.g. suppose the lower bound is 1,
6781 and its mode is QI. Without the conversion, (ARRAY
6782 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6783 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6785 if (! integer_zerop (low_bound))
6786 index = size_diffop (index, convert (sizetype, low_bound));
6788 /* Fold an expression like: "foo"[2].
6789 This is not done in fold so it won't happen inside &.
6790 Don't fold if this is for wide characters since it's too
6791 difficult to do correctly and this is a very rare case. */
6793 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6794 && TREE_CODE (array) == STRING_CST
6795 && TREE_CODE (index) == INTEGER_CST
6796 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6797 && GET_MODE_CLASS (mode) == MODE_INT
6798 && GET_MODE_SIZE (mode) == 1)
6800 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6802 /* If this is a constant index into a constant array,
6803 just get the value from the array. Handle both the cases when
6804 we have an explicit constructor and when our operand is a variable
6805 that was declared const. */
6807 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6808 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6809 && TREE_CODE (index) == INTEGER_CST
6810 && 0 > compare_tree_int (index,
6811 list_length (CONSTRUCTOR_ELTS
6812 (TREE_OPERAND (exp, 0)))))
6816 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6817 i = TREE_INT_CST_LOW (index);
6818 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6822 return expand_expr (fold (TREE_VALUE (elem)), target,
6823 tmode, ro_modifier);
6826 else if (optimize >= 1
6827 && modifier != EXPAND_CONST_ADDRESS
6828 && modifier != EXPAND_INITIALIZER
6829 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6830 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6831 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6833 if (TREE_CODE (index) == INTEGER_CST)
6835 tree init = DECL_INITIAL (array);
6837 if (TREE_CODE (init) == CONSTRUCTOR)
6841 for (elem = CONSTRUCTOR_ELTS (init);
6843 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6844 elem = TREE_CHAIN (elem))
6847 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6848 return expand_expr (fold (TREE_VALUE (elem)), target,
6849 tmode, ro_modifier);
6851 else if (TREE_CODE (init) == STRING_CST
6852 && 0 > compare_tree_int (index,
6853 TREE_STRING_LENGTH (init)))
6855 tree type = TREE_TYPE (TREE_TYPE (init));
6856 enum machine_mode mode = TYPE_MODE (type);
6858 if (GET_MODE_CLASS (mode) == MODE_INT
6859 && GET_MODE_SIZE (mode) == 1)
6861 (TREE_STRING_POINTER
6862 (init)[TREE_INT_CST_LOW (index)]));
6871 case ARRAY_RANGE_REF:
6872 /* If the operand is a CONSTRUCTOR, we can just extract the
6873 appropriate field if it is present. Don't do this if we have
6874 already written the data since we want to refer to that copy
6875 and varasm.c assumes that's what we'll do. */
6876 if (code == COMPONENT_REF
6877 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6878 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6882 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6883 elt = TREE_CHAIN (elt))
6884 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6885 /* We can normally use the value of the field in the
6886 CONSTRUCTOR. However, if this is a bitfield in
6887 an integral mode that we can fit in a HOST_WIDE_INT,
6888 we must mask only the number of bits in the bitfield,
6889 since this is done implicitly by the constructor. If
6890 the bitfield does not meet either of those conditions,
6891 we can't do this optimization. */
6892 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6893 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6895 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6896 <= HOST_BITS_PER_WIDE_INT))))
6898 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6899 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6901 HOST_WIDE_INT bitsize
6902 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6904 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6906 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6907 op0 = expand_and (op0, op1, target);
6911 enum machine_mode imode
6912 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6914 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6917 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6919 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6929 enum machine_mode mode1;
6930 HOST_WIDE_INT bitsize, bitpos;
6933 unsigned int alignment;
6934 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6935 &mode1, &unsignedp, &volatilep,
6939 /* If we got back the original object, something is wrong. Perhaps
6940 we are evaluating an expression too early. In any event, don't
6941 infinitely recurse. */
6945 /* If TEM's type is a union of variable size, pass TARGET to the inner
6946 computation, since it will need a temporary and TARGET is known
6947 to have to do. This occurs in unchecked conversion in Ada. */
6951 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6952 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6954 ? target : NULL_RTX),
6956 (modifier == EXPAND_INITIALIZER
6957 || modifier == EXPAND_CONST_ADDRESS)
6958 ? modifier : EXPAND_NORMAL);
6960 /* If this is a constant, put it into a register if it is a
6961 legitimate constant and OFFSET is 0 and memory if it isn't. */
6962 if (CONSTANT_P (op0))
6964 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6965 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6967 op0 = force_reg (mode, op0);
6969 op0 = validize_mem (force_const_mem (mode, op0));
6974 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6976 /* If this object is in a register, put it into memory.
6977 This case can't occur in C, but can in Ada if we have
6978 unchecked conversion of an expression from a scalar type to
6979 an array or record type. */
6980 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6981 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6983 /* If the operand is a SAVE_EXPR, we can deal with this by
6984 forcing the SAVE_EXPR into memory. */
6985 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6987 put_var_into_stack (TREE_OPERAND (exp, 0));
6988 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6993 = build_qualified_type (TREE_TYPE (tem),
6994 (TYPE_QUALS (TREE_TYPE (tem))
6995 | TYPE_QUAL_CONST));
6996 rtx memloc = assign_temp (nt, 1, 1, 1);
6998 mark_temp_addr_taken (memloc);
6999 emit_move_insn (memloc, op0);
7004 if (GET_CODE (op0) != MEM)
7007 if (GET_MODE (offset_rtx) != ptr_mode)
7009 #ifdef POINTERS_EXTEND_UNSIGNED
7010 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7012 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7016 /* A constant address in OP0 can have VOIDmode, we must not try
7017 to call force_reg for that case. Avoid that case. */
7018 if (GET_CODE (op0) == MEM
7019 && GET_MODE (op0) == BLKmode
7020 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7022 && (bitpos % bitsize) == 0
7023 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7024 && alignment == GET_MODE_ALIGNMENT (mode1))
7026 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7028 if (GET_CODE (XEXP (temp, 0)) == REG)
7031 op0 = (replace_equiv_address
7033 force_reg (GET_MODE (XEXP (temp, 0)),
7038 op0 = change_address (op0, VOIDmode,
7039 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7040 force_reg (ptr_mode,
7044 /* Don't forget about volatility even if this is a bitfield. */
7045 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7047 if (op0 == orig_op0)
7048 op0 = copy_rtx (op0);
7050 MEM_VOLATILE_P (op0) = 1;
7053 /* Check the access. */
7054 if (cfun != 0 && current_function_check_memory_usage
7055 && GET_CODE (op0) == MEM)
7057 enum memory_use_mode memory_usage;
7058 memory_usage = get_memory_usage_from_modifier (modifier);
7060 if (memory_usage != MEMORY_USE_DONT)
7065 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7066 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7068 /* Check the access right of the pointer. */
7069 in_check_memory_usage = 1;
7070 if (size > BITS_PER_UNIT)
7071 emit_library_call (chkr_check_addr_libfunc,
7072 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7073 Pmode, GEN_INT (size / BITS_PER_UNIT),
7074 TYPE_MODE (sizetype),
7075 GEN_INT (memory_usage),
7076 TYPE_MODE (integer_type_node));
7077 in_check_memory_usage = 0;
7081 /* In cases where an aligned union has an unaligned object
7082 as a field, we might be extracting a BLKmode value from
7083 an integer-mode (e.g., SImode) object. Handle this case
7084 by doing the extract into an object as wide as the field
7085 (which we know to be the width of a basic mode), then
7086 storing into memory, and changing the mode to BLKmode. */
7087 if (mode1 == VOIDmode
7088 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7089 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7090 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7091 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7092 && modifier != EXPAND_CONST_ADDRESS
7093 && modifier != EXPAND_INITIALIZER)
7094 /* If the field isn't aligned enough to fetch as a memref,
7095 fetch it as a bit field. */
7096 || (mode1 != BLKmode
7097 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7098 && ((TYPE_ALIGN (TREE_TYPE (tem))
7099 < GET_MODE_ALIGNMENT (mode))
7100 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7101 /* If the type and the field are a constant size and the
7102 size of the type isn't the same size as the bitfield,
7103 we must use bitfield operations. */
7105 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7107 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7110 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7111 && (TYPE_ALIGN (type) > alignment
7112 || bitpos % TYPE_ALIGN (type) != 0)))
7114 enum machine_mode ext_mode = mode;
7116 if (ext_mode == BLKmode
7117 && ! (target != 0 && GET_CODE (op0) == MEM
7118 && GET_CODE (target) == MEM
7119 && bitpos % BITS_PER_UNIT == 0))
7120 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7122 if (ext_mode == BLKmode)
7124 /* In this case, BITPOS must start at a byte boundary and
7125 TARGET, if specified, must be a MEM. */
7126 if (GET_CODE (op0) != MEM
7127 || (target != 0 && GET_CODE (target) != MEM)
7128 || bitpos % BITS_PER_UNIT != 0)
7131 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7133 target = assign_temp (type, 0, 1, 1);
7135 emit_block_move (target, op0,
7136 bitsize == -1 ? expr_size (exp)
7137 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7144 op0 = validize_mem (op0);
7146 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7147 mark_reg_pointer (XEXP (op0, 0), alignment);
7149 op0 = extract_bit_field (op0, bitsize, bitpos,
7150 unsignedp, target, ext_mode, ext_mode,
7152 int_size_in_bytes (TREE_TYPE (tem)));
7154 /* If the result is a record type and BITSIZE is narrower than
7155 the mode of OP0, an integral mode, and this is a big endian
7156 machine, we must put the field into the high-order bits. */
7157 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7158 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7159 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7160 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7161 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7165 if (mode == BLKmode)
7167 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7169 rtx new = assign_temp (nt, 0, 1, 1);
7171 emit_move_insn (new, op0);
7172 op0 = copy_rtx (new);
7173 PUT_MODE (op0, BLKmode);
7179 /* If the result is BLKmode, use that to access the object
7181 if (mode == BLKmode)
7184 /* Get a reference to just this component. */
7185 if (modifier == EXPAND_CONST_ADDRESS
7186 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7187 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7189 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7191 if (op0 == orig_op0)
7192 op0 = copy_rtx (op0);
7194 set_mem_attributes (op0, exp, 0);
7195 if (GET_CODE (XEXP (op0, 0)) == REG)
7196 mark_reg_pointer (XEXP (op0, 0), alignment);
7198 MEM_VOLATILE_P (op0) |= volatilep;
7199 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7200 || modifier == EXPAND_CONST_ADDRESS
7201 || modifier == EXPAND_INITIALIZER)
7203 else if (target == 0)
7204 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7206 convert_move (target, op0, unsignedp);
7210 /* Intended for a reference to a buffer of a file-object in Pascal.
7211 But it's not certain that a special tree code will really be
7212 necessary for these. INDIRECT_REF might work for them. */
7218 /* Pascal set IN expression.
7221 rlo = set_low - (set_low%bits_per_word);
7222 the_word = set [ (index - rlo)/bits_per_word ];
7223 bit_index = index % bits_per_word;
7224 bitmask = 1 << bit_index;
7225 return !!(the_word & bitmask); */
7227 tree set = TREE_OPERAND (exp, 0);
7228 tree index = TREE_OPERAND (exp, 1);
7229 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7230 tree set_type = TREE_TYPE (set);
7231 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7232 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7233 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7234 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7235 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7236 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7237 rtx setaddr = XEXP (setval, 0);
7238 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7240 rtx diff, quo, rem, addr, bit, result;
7242 /* If domain is empty, answer is no. Likewise if index is constant
7243 and out of bounds. */
7244 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7245 && TREE_CODE (set_low_bound) == INTEGER_CST
7246 && tree_int_cst_lt (set_high_bound, set_low_bound))
7247 || (TREE_CODE (index) == INTEGER_CST
7248 && TREE_CODE (set_low_bound) == INTEGER_CST
7249 && tree_int_cst_lt (index, set_low_bound))
7250 || (TREE_CODE (set_high_bound) == INTEGER_CST
7251 && TREE_CODE (index) == INTEGER_CST
7252 && tree_int_cst_lt (set_high_bound, index))))
7256 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7258 /* If we get here, we have to generate the code for both cases
7259 (in range and out of range). */
7261 op0 = gen_label_rtx ();
7262 op1 = gen_label_rtx ();
7264 if (! (GET_CODE (index_val) == CONST_INT
7265 && GET_CODE (lo_r) == CONST_INT))
7267 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7268 GET_MODE (index_val), iunsignedp, 0, op1);
7271 if (! (GET_CODE (index_val) == CONST_INT
7272 && GET_CODE (hi_r) == CONST_INT))
7274 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7275 GET_MODE (index_val), iunsignedp, 0, op1);
7278 /* Calculate the element number of bit zero in the first word
7280 if (GET_CODE (lo_r) == CONST_INT)
7281 rlow = GEN_INT (INTVAL (lo_r)
7282 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7284 rlow = expand_binop (index_mode, and_optab, lo_r,
7285 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7286 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7288 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7289 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7291 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7292 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7293 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7294 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7296 addr = memory_address (byte_mode,
7297 expand_binop (index_mode, add_optab, diff,
7298 setaddr, NULL_RTX, iunsignedp,
7301 /* Extract the bit we want to examine. */
7302 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7303 gen_rtx_MEM (byte_mode, addr),
7304 make_tree (TREE_TYPE (index), rem),
7306 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7307 GET_MODE (target) == byte_mode ? target : 0,
7308 1, OPTAB_LIB_WIDEN);
7310 if (result != target)
7311 convert_move (target, result, 1);
7313 /* Output the code to handle the out-of-range case. */
7316 emit_move_insn (target, const0_rtx);
7321 case WITH_CLEANUP_EXPR:
7322 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7324 WITH_CLEANUP_EXPR_RTL (exp)
7325 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7326 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7328 /* That's it for this cleanup. */
7329 TREE_OPERAND (exp, 1) = 0;
7331 return WITH_CLEANUP_EXPR_RTL (exp);
7333 case CLEANUP_POINT_EXPR:
7335 /* Start a new binding layer that will keep track of all cleanup
7336 actions to be performed. */
7337 expand_start_bindings (2);
7339 target_temp_slot_level = temp_slot_level;
7341 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7342 /* If we're going to use this value, load it up now. */
7344 op0 = force_not_mem (op0);
7345 preserve_temp_slots (op0);
7346 expand_end_bindings (NULL_TREE, 0, 0);
7351 /* Check for a built-in function. */
7352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7353 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7355 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7357 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7358 == BUILT_IN_FRONTEND)
7359 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7361 return expand_builtin (exp, target, subtarget, tmode, ignore);
7364 return expand_call (exp, target, ignore);
7366 case NON_LVALUE_EXPR:
7369 case REFERENCE_EXPR:
7370 if (TREE_OPERAND (exp, 0) == error_mark_node)
7373 if (TREE_CODE (type) == UNION_TYPE)
7375 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7377 /* If both input and output are BLKmode, this conversion
7378 isn't actually doing anything unless we need to make the
7379 alignment stricter. */
7380 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7381 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7382 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7383 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7387 target = assign_temp (type, 0, 1, 1);
7389 if (GET_CODE (target) == MEM)
7390 /* Store data into beginning of memory target. */
7391 store_expr (TREE_OPERAND (exp, 0),
7392 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7394 else if (GET_CODE (target) == REG)
7395 /* Store this field into a union of the proper type. */
7396 store_field (target,
7397 MIN ((int_size_in_bytes (TREE_TYPE
7398 (TREE_OPERAND (exp, 0)))
7400 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7401 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7402 VOIDmode, 0, BITS_PER_UNIT,
7403 int_size_in_bytes (type), 0);
7407 /* Return the entire union. */
7411 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7413 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7416 /* If the signedness of the conversion differs and OP0 is
7417 a promoted SUBREG, clear that indication since we now
7418 have to do the proper extension. */
7419 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7420 && GET_CODE (op0) == SUBREG)
7421 SUBREG_PROMOTED_VAR_P (op0) = 0;
7426 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7427 if (GET_MODE (op0) == mode)
7430 /* If OP0 is a constant, just convert it into the proper mode. */
7431 if (CONSTANT_P (op0))
7433 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7434 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7436 if (modifier == EXPAND_INITIALIZER)
7437 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7441 convert_to_mode (mode, op0,
7442 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7444 convert_move (target, op0,
7445 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7449 /* We come here from MINUS_EXPR when the second operand is a
7452 this_optab = ! unsignedp && flag_trapv
7453 && (GET_MODE_CLASS(mode) == MODE_INT)
7454 ? addv_optab : add_optab;
7456 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7457 something else, make sure we add the register to the constant and
7458 then to the other thing. This case can occur during strength
7459 reduction and doing it this way will produce better code if the
7460 frame pointer or argument pointer is eliminated.
7462 fold-const.c will ensure that the constant is always in the inner
7463 PLUS_EXPR, so the only case we need to do anything about is if
7464 sp, ap, or fp is our second argument, in which case we must swap
7465 the innermost first argument and our second argument. */
7467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7468 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7469 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7470 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7471 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7472 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7474 tree t = TREE_OPERAND (exp, 1);
7476 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7477 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7480 /* If the result is to be ptr_mode and we are adding an integer to
7481 something, we might be forming a constant. So try to use
7482 plus_constant. If it produces a sum and we can't accept it,
7483 use force_operand. This allows P = &ARR[const] to generate
7484 efficient code on machines where a SYMBOL_REF is not a valid
7487 If this is an EXPAND_SUM call, always return the sum. */
7488 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7489 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7491 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7492 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7493 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7497 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7499 /* Use immed_double_const to ensure that the constant is
7500 truncated according to the mode of OP1, then sign extended
7501 to a HOST_WIDE_INT. Using the constant directly can result
7502 in non-canonical RTL in a 64x32 cross compile. */
7504 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7506 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7507 op1 = plus_constant (op1, INTVAL (constant_part));
7508 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7509 op1 = force_operand (op1, target);
7513 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7514 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7515 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7521 if (! CONSTANT_P (op0))
7523 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7524 VOIDmode, modifier);
7525 /* Don't go to both_summands if modifier
7526 says it's not right to return a PLUS. */
7527 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7531 /* Use immed_double_const to ensure that the constant is
7532 truncated according to the mode of OP1, then sign extended
7533 to a HOST_WIDE_INT. Using the constant directly can result
7534 in non-canonical RTL in a 64x32 cross compile. */
7536 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7538 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7539 op0 = plus_constant (op0, INTVAL (constant_part));
7540 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7541 op0 = force_operand (op0, target);
7546 /* No sense saving up arithmetic to be done
7547 if it's all in the wrong mode to form part of an address.
7548 And force_operand won't know whether to sign-extend or
7550 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7551 || mode != ptr_mode)
7554 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7558 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7561 /* Make sure any term that's a sum with a constant comes last. */
7562 if (GET_CODE (op0) == PLUS
7563 && CONSTANT_P (XEXP (op0, 1)))
7569 /* If adding to a sum including a constant,
7570 associate it to put the constant outside. */
7571 if (GET_CODE (op1) == PLUS
7572 && CONSTANT_P (XEXP (op1, 1)))
7574 rtx constant_term = const0_rtx;
7576 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7579 /* Ensure that MULT comes first if there is one. */
7580 else if (GET_CODE (op0) == MULT)
7581 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7583 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7585 /* Let's also eliminate constants from op0 if possible. */
7586 op0 = eliminate_constant_term (op0, &constant_term);
7588 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7589 their sum should be a constant. Form it into OP1, since the
7590 result we want will then be OP0 + OP1. */
7592 temp = simplify_binary_operation (PLUS, mode, constant_term,
7597 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7600 /* Put a constant term last and put a multiplication first. */
7601 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7602 temp = op1, op1 = op0, op0 = temp;
7604 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7605 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7608 /* For initializers, we are allowed to return a MINUS of two
7609 symbolic constants. Here we handle all cases when both operands
7611 /* Handle difference of two symbolic constants,
7612 for the sake of an initializer. */
7613 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7614 && really_constant_p (TREE_OPERAND (exp, 0))
7615 && really_constant_p (TREE_OPERAND (exp, 1)))
7617 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7618 VOIDmode, ro_modifier);
7619 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7620 VOIDmode, ro_modifier);
7622 /* If the last operand is a CONST_INT, use plus_constant of
7623 the negated constant. Else make the MINUS. */
7624 if (GET_CODE (op1) == CONST_INT)
7625 return plus_constant (op0, - INTVAL (op1));
7627 return gen_rtx_MINUS (mode, op0, op1);
7629 /* Convert A - const to A + (-const). */
7630 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7632 tree negated = fold (build1 (NEGATE_EXPR, type,
7633 TREE_OPERAND (exp, 1)));
7635 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7636 /* If we can't negate the constant in TYPE, leave it alone and
7637 expand_binop will negate it for us. We used to try to do it
7638 here in the signed version of TYPE, but that doesn't work
7639 on POINTER_TYPEs. */;
7642 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7646 this_optab = ! unsignedp && flag_trapv
7647 && (GET_MODE_CLASS(mode) == MODE_INT)
7648 ? subv_optab : sub_optab;
7652 /* If first operand is constant, swap them.
7653 Thus the following special case checks need only
7654 check the second operand. */
7655 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7657 register tree t1 = TREE_OPERAND (exp, 0);
7658 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7659 TREE_OPERAND (exp, 1) = t1;
7662 /* Attempt to return something suitable for generating an
7663 indexed address, for machines that support that. */
7665 if (modifier == EXPAND_SUM && mode == ptr_mode
7666 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7667 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7669 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7672 /* Apply distributive law if OP0 is x+c. */
7673 if (GET_CODE (op0) == PLUS
7674 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7679 (mode, XEXP (op0, 0),
7680 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7681 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7682 * INTVAL (XEXP (op0, 1))));
7684 if (GET_CODE (op0) != REG)
7685 op0 = force_operand (op0, NULL_RTX);
7686 if (GET_CODE (op0) != REG)
7687 op0 = copy_to_mode_reg (mode, op0);
7690 gen_rtx_MULT (mode, op0,
7691 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7694 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7697 /* Check for multiplying things that have been extended
7698 from a narrower type. If this machine supports multiplying
7699 in that narrower type with a result in the desired type,
7700 do it that way, and avoid the explicit type-conversion. */
7701 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7702 && TREE_CODE (type) == INTEGER_TYPE
7703 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7704 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7705 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7706 && int_fits_type_p (TREE_OPERAND (exp, 1),
7707 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7708 /* Don't use a widening multiply if a shift will do. */
7709 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7710 > HOST_BITS_PER_WIDE_INT)
7711 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7713 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7714 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7716 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7717 /* If both operands are extended, they must either both
7718 be zero-extended or both be sign-extended. */
7719 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7721 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7723 enum machine_mode innermode
7724 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7725 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7726 ? smul_widen_optab : umul_widen_optab);
7727 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7728 ? umul_widen_optab : smul_widen_optab);
7729 if (mode == GET_MODE_WIDER_MODE (innermode))
7731 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7733 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7734 NULL_RTX, VOIDmode, 0);
7735 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7736 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7739 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7740 NULL_RTX, VOIDmode, 0);
7743 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7744 && innermode == word_mode)
7747 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7748 NULL_RTX, VOIDmode, 0);
7749 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7750 op1 = convert_modes (innermode, mode,
7751 expand_expr (TREE_OPERAND (exp, 1),
7752 NULL_RTX, VOIDmode, 0),
7755 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7756 NULL_RTX, VOIDmode, 0);
7757 temp = expand_binop (mode, other_optab, op0, op1, target,
7758 unsignedp, OPTAB_LIB_WIDEN);
7759 htem = expand_mult_highpart_adjust (innermode,
7760 gen_highpart (innermode, temp),
7762 gen_highpart (innermode, temp),
7764 emit_move_insn (gen_highpart (innermode, temp), htem);
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7770 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7771 return expand_mult (mode, op0, op1, target, unsignedp);
7773 case TRUNC_DIV_EXPR:
7774 case FLOOR_DIV_EXPR:
7776 case ROUND_DIV_EXPR:
7777 case EXACT_DIV_EXPR:
7778 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7780 /* Possible optimization: compute the dividend with EXPAND_SUM
7781 then if the divisor is constant can optimize the case
7782 where some terms of the dividend have coeffs divisible by it. */
7783 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7784 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7785 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7788 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7789 expensive divide. If not, combine will rebuild the original
7791 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7792 && !real_onep (TREE_OPERAND (exp, 0)))
7793 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7794 build (RDIV_EXPR, type,
7795 build_real (type, dconst1),
7796 TREE_OPERAND (exp, 1))),
7797 target, tmode, unsignedp);
7798 this_optab = sdiv_optab;
7801 case TRUNC_MOD_EXPR:
7802 case FLOOR_MOD_EXPR:
7804 case ROUND_MOD_EXPR:
7805 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7807 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7808 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7809 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7811 case FIX_ROUND_EXPR:
7812 case FIX_FLOOR_EXPR:
7814 abort (); /* Not used for C. */
7816 case FIX_TRUNC_EXPR:
7817 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7819 target = gen_reg_rtx (mode);
7820 expand_fix (target, op0, unsignedp);
7824 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7826 target = gen_reg_rtx (mode);
7827 /* expand_float can't figure out what to do if FROM has VOIDmode.
7828 So give it the correct mode. With -O, cse will optimize this. */
7829 if (GET_MODE (op0) == VOIDmode)
7830 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7832 expand_float (target, op0,
7833 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7838 temp = expand_unop (mode,
7839 ! unsignedp && flag_trapv
7840 && (GET_MODE_CLASS(mode) == MODE_INT)
7841 ? negv_optab : neg_optab, op0, target, 0);
7847 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7849 /* Handle complex values specially. */
7850 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7851 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7852 return expand_complex_abs (mode, op0, target, unsignedp);
7854 /* Unsigned abs is simply the operand. Testing here means we don't
7855 risk generating incorrect code below. */
7856 if (TREE_UNSIGNED (type))
7859 return expand_abs (mode, op0, target, unsignedp,
7860 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7864 target = original_target;
7865 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7866 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7867 || GET_MODE (target) != mode
7868 || (GET_CODE (target) == REG
7869 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7870 target = gen_reg_rtx (mode);
7871 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7872 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7874 /* First try to do it with a special MIN or MAX instruction.
7875 If that does not win, use a conditional jump to select the proper
7877 this_optab = (TREE_UNSIGNED (type)
7878 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7879 : (code == MIN_EXPR ? smin_optab : smax_optab));
7881 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7886 /* At this point, a MEM target is no longer useful; we will get better
7889 if (GET_CODE (target) == MEM)
7890 target = gen_reg_rtx (mode);
7893 emit_move_insn (target, op0);
7895 op0 = gen_label_rtx ();
7897 /* If this mode is an integer too wide to compare properly,
7898 compare word by word. Rely on cse to optimize constant cases. */
7899 if (GET_MODE_CLASS (mode) == MODE_INT
7900 && ! can_compare_p (GE, mode, ccp_jump))
7902 if (code == MAX_EXPR)
7903 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7904 target, op1, NULL_RTX, op0);
7906 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7907 op1, target, NULL_RTX, op0);
7911 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7912 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7913 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7916 emit_move_insn (target, op1);
7921 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7922 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7928 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7929 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7934 /* ??? Can optimize bitwise operations with one arg constant.
7935 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7936 and (a bitwise1 b) bitwise2 b (etc)
7937 but that is probably not worth while. */
7939 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7940 boolean values when we want in all cases to compute both of them. In
7941 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7942 as actual zero-or-1 values and then bitwise anding. In cases where
7943 there cannot be any side effects, better code would be made by
7944 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7945 how to recognize those cases. */
7947 case TRUTH_AND_EXPR:
7949 this_optab = and_optab;
7954 this_optab = ior_optab;
7957 case TRUTH_XOR_EXPR:
7959 this_optab = xor_optab;
7966 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7968 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7969 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7972 /* Could determine the answer when only additive constants differ. Also,
7973 the addition of one can be handled by changing the condition. */
7980 case UNORDERED_EXPR:
7987 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7991 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7992 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7994 && GET_CODE (original_target) == REG
7995 && (GET_MODE (original_target)
7996 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7998 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8001 if (temp != original_target)
8002 temp = copy_to_reg (temp);
8004 op1 = gen_label_rtx ();
8005 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8006 GET_MODE (temp), unsignedp, 0, op1);
8007 emit_move_insn (temp, const1_rtx);
8012 /* If no set-flag instruction, must generate a conditional
8013 store into a temporary variable. Drop through
8014 and handle this like && and ||. */
8016 case TRUTH_ANDIF_EXPR:
8017 case TRUTH_ORIF_EXPR:
8019 && (target == 0 || ! safe_from_p (target, exp, 1)
8020 /* Make sure we don't have a hard reg (such as function's return
8021 value) live across basic blocks, if not optimizing. */
8022 || (!optimize && GET_CODE (target) == REG
8023 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8024 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8027 emit_clr_insn (target);
8029 op1 = gen_label_rtx ();
8030 jumpifnot (exp, op1);
8033 emit_0_to_1_insn (target);
8036 return ignore ? const0_rtx : target;
8038 case TRUTH_NOT_EXPR:
8039 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8040 /* The parser is careful to generate TRUTH_NOT_EXPR
8041 only with operands that are always zero or one. */
8042 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8043 target, 1, OPTAB_LIB_WIDEN);
8049 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8051 return expand_expr (TREE_OPERAND (exp, 1),
8052 (ignore ? const0_rtx : target),
8056 /* If we would have a "singleton" (see below) were it not for a
8057 conversion in each arm, bring that conversion back out. */
8058 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8059 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8060 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8061 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8063 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8064 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8066 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8067 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8068 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8069 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8070 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8071 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8072 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8073 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8074 return expand_expr (build1 (NOP_EXPR, type,
8075 build (COND_EXPR, TREE_TYPE (iftrue),
8076 TREE_OPERAND (exp, 0),
8078 target, tmode, modifier);
8082 /* Note that COND_EXPRs whose type is a structure or union
8083 are required to be constructed to contain assignments of
8084 a temporary variable, so that we can evaluate them here
8085 for side effect only. If type is void, we must do likewise. */
8087 /* If an arm of the branch requires a cleanup,
8088 only that cleanup is performed. */
8091 tree binary_op = 0, unary_op = 0;
8093 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8094 convert it to our mode, if necessary. */
8095 if (integer_onep (TREE_OPERAND (exp, 1))
8096 && integer_zerop (TREE_OPERAND (exp, 2))
8097 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8101 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8107 if (GET_MODE (op0) == mode)
8111 target = gen_reg_rtx (mode);
8112 convert_move (target, op0, unsignedp);
8116 /* Check for X ? A + B : A. If we have this, we can copy A to the
8117 output and conditionally add B. Similarly for unary operations.
8118 Don't do this if X has side-effects because those side effects
8119 might affect A or B and the "?" operation is a sequence point in
8120 ANSI. (operand_equal_p tests for side effects.) */
8122 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8123 && operand_equal_p (TREE_OPERAND (exp, 2),
8124 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8125 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8126 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8127 && operand_equal_p (TREE_OPERAND (exp, 1),
8128 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8129 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8130 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8131 && operand_equal_p (TREE_OPERAND (exp, 2),
8132 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8133 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8134 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8135 && operand_equal_p (TREE_OPERAND (exp, 1),
8136 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8137 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8139 /* If we are not to produce a result, we have no target. Otherwise,
8140 if a target was specified use it; it will not be used as an
8141 intermediate target unless it is safe. If no target, use a
8146 else if (original_target
8147 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8148 || (singleton && GET_CODE (original_target) == REG
8149 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8150 && original_target == var_rtx (singleton)))
8151 && GET_MODE (original_target) == mode
8152 #ifdef HAVE_conditional_move
8153 && (! can_conditionally_move_p (mode)
8154 || GET_CODE (original_target) == REG
8155 || TREE_ADDRESSABLE (type))
8157 && (GET_CODE (original_target) != MEM
8158 || TREE_ADDRESSABLE (type)))
8159 temp = original_target;
8160 else if (TREE_ADDRESSABLE (type))
8163 temp = assign_temp (type, 0, 0, 1);
8165 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8166 do the test of X as a store-flag operation, do this as
8167 A + ((X != 0) << log C). Similarly for other simple binary
8168 operators. Only do for C == 1 if BRANCH_COST is low. */
8169 if (temp && singleton && binary_op
8170 && (TREE_CODE (binary_op) == PLUS_EXPR
8171 || TREE_CODE (binary_op) == MINUS_EXPR
8172 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8173 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8174 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8175 : integer_onep (TREE_OPERAND (binary_op, 1)))
8176 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8179 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8180 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8181 ? addv_optab : add_optab)
8182 : TREE_CODE (binary_op) == MINUS_EXPR
8183 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8184 ? subv_optab : sub_optab)
8185 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8188 /* If we had X ? A : A + 1, do this as A + (X == 0).
8190 We have to invert the truth value here and then put it
8191 back later if do_store_flag fails. We cannot simply copy
8192 TREE_OPERAND (exp, 0) to another variable and modify that
8193 because invert_truthvalue can modify the tree pointed to
8195 if (singleton == TREE_OPERAND (exp, 1))
8196 TREE_OPERAND (exp, 0)
8197 = invert_truthvalue (TREE_OPERAND (exp, 0));
8199 result = do_store_flag (TREE_OPERAND (exp, 0),
8200 (safe_from_p (temp, singleton, 1)
8202 mode, BRANCH_COST <= 1);
8204 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8205 result = expand_shift (LSHIFT_EXPR, mode, result,
8206 build_int_2 (tree_log2
8210 (safe_from_p (temp, singleton, 1)
8211 ? temp : NULL_RTX), 0);
8215 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8216 return expand_binop (mode, boptab, op1, result, temp,
8217 unsignedp, OPTAB_LIB_WIDEN);
8219 else if (singleton == TREE_OPERAND (exp, 1))
8220 TREE_OPERAND (exp, 0)
8221 = invert_truthvalue (TREE_OPERAND (exp, 0));
8224 do_pending_stack_adjust ();
8226 op0 = gen_label_rtx ();
8228 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8232 /* If the target conflicts with the other operand of the
8233 binary op, we can't use it. Also, we can't use the target
8234 if it is a hard register, because evaluating the condition
8235 might clobber it. */
8237 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8238 || (GET_CODE (temp) == REG
8239 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8240 temp = gen_reg_rtx (mode);
8241 store_expr (singleton, temp, 0);
8244 expand_expr (singleton,
8245 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8246 if (singleton == TREE_OPERAND (exp, 1))
8247 jumpif (TREE_OPERAND (exp, 0), op0);
8249 jumpifnot (TREE_OPERAND (exp, 0), op0);
8251 start_cleanup_deferral ();
8252 if (binary_op && temp == 0)
8253 /* Just touch the other operand. */
8254 expand_expr (TREE_OPERAND (binary_op, 1),
8255 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8257 store_expr (build (TREE_CODE (binary_op), type,
8258 make_tree (type, temp),
8259 TREE_OPERAND (binary_op, 1)),
8262 store_expr (build1 (TREE_CODE (unary_op), type,
8263 make_tree (type, temp)),
8267 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8268 comparison operator. If we have one of these cases, set the
8269 output to A, branch on A (cse will merge these two references),
8270 then set the output to FOO. */
8272 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8273 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8274 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8275 TREE_OPERAND (exp, 1), 0)
8276 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8277 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8278 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8280 if (GET_CODE (temp) == REG
8281 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8282 temp = gen_reg_rtx (mode);
8283 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8284 jumpif (TREE_OPERAND (exp, 0), op0);
8286 start_cleanup_deferral ();
8287 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8291 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8292 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8293 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8294 TREE_OPERAND (exp, 2), 0)
8295 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8296 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8297 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8299 if (GET_CODE (temp) == REG
8300 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8301 temp = gen_reg_rtx (mode);
8302 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8303 jumpifnot (TREE_OPERAND (exp, 0), op0);
8305 start_cleanup_deferral ();
8306 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8311 op1 = gen_label_rtx ();
8312 jumpifnot (TREE_OPERAND (exp, 0), op0);
8314 start_cleanup_deferral ();
8316 /* One branch of the cond can be void, if it never returns. For
8317 example A ? throw : E */
8319 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8320 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8322 expand_expr (TREE_OPERAND (exp, 1),
8323 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8324 end_cleanup_deferral ();
8326 emit_jump_insn (gen_jump (op1));
8329 start_cleanup_deferral ();
8331 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8332 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8334 expand_expr (TREE_OPERAND (exp, 2),
8335 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8338 end_cleanup_deferral ();
8349 /* Something needs to be initialized, but we didn't know
8350 where that thing was when building the tree. For example,
8351 it could be the return value of a function, or a parameter
8352 to a function which lays down in the stack, or a temporary
8353 variable which must be passed by reference.
8355 We guarantee that the expression will either be constructed
8356 or copied into our original target. */
8358 tree slot = TREE_OPERAND (exp, 0);
8359 tree cleanups = NULL_TREE;
8362 if (TREE_CODE (slot) != VAR_DECL)
8366 target = original_target;
8368 /* Set this here so that if we get a target that refers to a
8369 register variable that's already been used, put_reg_into_stack
8370 knows that it should fix up those uses. */
8371 TREE_USED (slot) = 1;
8375 if (DECL_RTL_SET_P (slot))
8377 target = DECL_RTL (slot);
8378 /* If we have already expanded the slot, so don't do
8380 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8385 target = assign_temp (type, 2, 0, 1);
8386 /* All temp slots at this level must not conflict. */
8387 preserve_temp_slots (target);
8388 SET_DECL_RTL (slot, target);
8389 if (TREE_ADDRESSABLE (slot))
8390 put_var_into_stack (slot);
8392 /* Since SLOT is not known to the called function
8393 to belong to its stack frame, we must build an explicit
8394 cleanup. This case occurs when we must build up a reference
8395 to pass the reference as an argument. In this case,
8396 it is very likely that such a reference need not be
8399 if (TREE_OPERAND (exp, 2) == 0)
8400 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8401 cleanups = TREE_OPERAND (exp, 2);
8406 /* This case does occur, when expanding a parameter which
8407 needs to be constructed on the stack. The target
8408 is the actual stack address that we want to initialize.
8409 The function we call will perform the cleanup in this case. */
8411 /* If we have already assigned it space, use that space,
8412 not target that we were passed in, as our target
8413 parameter is only a hint. */
8414 if (DECL_RTL_SET_P (slot))
8416 target = DECL_RTL (slot);
8417 /* If we have already expanded the slot, so don't do
8419 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8424 SET_DECL_RTL (slot, target);
8425 /* If we must have an addressable slot, then make sure that
8426 the RTL that we just stored in slot is OK. */
8427 if (TREE_ADDRESSABLE (slot))
8428 put_var_into_stack (slot);
8432 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8433 /* Mark it as expanded. */
8434 TREE_OPERAND (exp, 1) = NULL_TREE;
8436 store_expr (exp1, target, 0);
8438 expand_decl_cleanup (NULL_TREE, cleanups);
8445 tree lhs = TREE_OPERAND (exp, 0);
8446 tree rhs = TREE_OPERAND (exp, 1);
8448 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8454 /* If lhs is complex, expand calls in rhs before computing it.
8455 That's so we don't compute a pointer and save it over a
8456 call. If lhs is simple, compute it first so we can give it
8457 as a target if the rhs is just a call. This avoids an
8458 extra temp and copy and that prevents a partial-subsumption
8459 which makes bad code. Actually we could treat
8460 component_ref's of vars like vars. */
8462 tree lhs = TREE_OPERAND (exp, 0);
8463 tree rhs = TREE_OPERAND (exp, 1);
8467 /* Check for |= or &= of a bitfield of size one into another bitfield
8468 of size 1. In this case, (unless we need the result of the
8469 assignment) we can do this more efficiently with a
8470 test followed by an assignment, if necessary.
8472 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8473 things change so we do, this code should be enhanced to
8476 && TREE_CODE (lhs) == COMPONENT_REF
8477 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8478 || TREE_CODE (rhs) == BIT_AND_EXPR)
8479 && TREE_OPERAND (rhs, 0) == lhs
8480 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8481 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8482 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8484 rtx label = gen_label_rtx ();
8486 do_jump (TREE_OPERAND (rhs, 1),
8487 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8488 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8489 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8490 (TREE_CODE (rhs) == BIT_IOR_EXPR
8492 : integer_zero_node)),
8494 do_pending_stack_adjust ();
8499 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8505 if (!TREE_OPERAND (exp, 0))
8506 expand_null_return ();
8508 expand_return (TREE_OPERAND (exp, 0));
8511 case PREINCREMENT_EXPR:
8512 case PREDECREMENT_EXPR:
8513 return expand_increment (exp, 0, ignore);
8515 case POSTINCREMENT_EXPR:
8516 case POSTDECREMENT_EXPR:
8517 /* Faster to treat as pre-increment if result is not used. */
8518 return expand_increment (exp, ! ignore, ignore);
8521 /* If nonzero, TEMP will be set to the address of something that might
8522 be a MEM corresponding to a stack slot. */
8525 /* Are we taking the address of a nested function? */
8526 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8527 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8528 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8529 && ! TREE_STATIC (exp))
8531 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8532 op0 = force_operand (op0, target);
8534 /* If we are taking the address of something erroneous, just
8536 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8540 /* We make sure to pass const0_rtx down if we came in with
8541 ignore set, to avoid doing the cleanups twice for something. */
8542 op0 = expand_expr (TREE_OPERAND (exp, 0),
8543 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8544 (modifier == EXPAND_INITIALIZER
8545 ? modifier : EXPAND_CONST_ADDRESS));
8547 /* If we are going to ignore the result, OP0 will have been set
8548 to const0_rtx, so just return it. Don't get confused and
8549 think we are taking the address of the constant. */
8553 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8554 clever and returns a REG when given a MEM. */
8555 op0 = protect_from_queue (op0, 1);
8557 /* We would like the object in memory. If it is a constant, we can
8558 have it be statically allocated into memory. For a non-constant,
8559 we need to allocate some memory and store the value into it. */
8561 if (CONSTANT_P (op0))
8562 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8564 else if (GET_CODE (op0) == MEM)
8566 mark_temp_addr_taken (op0);
8567 temp = XEXP (op0, 0);
8570 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8571 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8572 || GET_CODE (op0) == PARALLEL)
8574 /* If this object is in a register, it must be not
8576 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8577 tree nt = build_qualified_type (inner_type,
8578 (TYPE_QUALS (inner_type)
8579 | TYPE_QUAL_CONST));
8580 rtx memloc = assign_temp (nt, 1, 1, 1);
8582 mark_temp_addr_taken (memloc);
8583 if (GET_CODE (op0) == PARALLEL)
8584 /* Handle calls that pass values in multiple non-contiguous
8585 locations. The Irix 6 ABI has examples of this. */
8586 emit_group_store (memloc, op0,
8587 int_size_in_bytes (inner_type),
8588 TYPE_ALIGN (inner_type));
8590 emit_move_insn (memloc, op0);
8594 if (GET_CODE (op0) != MEM)
8597 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8599 temp = XEXP (op0, 0);
8600 #ifdef POINTERS_EXTEND_UNSIGNED
8601 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8602 && mode == ptr_mode)
8603 temp = convert_memory_address (ptr_mode, temp);
8608 op0 = force_operand (XEXP (op0, 0), target);
8611 if (flag_force_addr && GET_CODE (op0) != REG)
8612 op0 = force_reg (Pmode, op0);
8614 if (GET_CODE (op0) == REG
8615 && ! REG_USERVAR_P (op0))
8616 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8618 /* If we might have had a temp slot, add an equivalent address
8621 update_temp_slot_address (temp, op0);
8623 #ifdef POINTERS_EXTEND_UNSIGNED
8624 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8625 && mode == ptr_mode)
8626 op0 = convert_memory_address (ptr_mode, op0);
8631 case ENTRY_VALUE_EXPR:
8634 /* COMPLEX type for Extended Pascal & Fortran */
8637 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8640 /* Get the rtx code of the operands. */
8641 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8642 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8645 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8649 /* Move the real (op0) and imaginary (op1) parts to their location. */
8650 emit_move_insn (gen_realpart (mode, target), op0);
8651 emit_move_insn (gen_imagpart (mode, target), op1);
8653 insns = get_insns ();
8656 /* Complex construction should appear as a single unit. */
8657 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8658 each with a separate pseudo as destination.
8659 It's not correct for flow to treat them as a unit. */
8660 if (GET_CODE (target) != CONCAT)
8661 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8669 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8670 return gen_realpart (mode, op0);
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 return gen_imagpart (mode, op0);
8678 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8682 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8685 target = gen_reg_rtx (mode);
8689 /* Store the realpart and the negated imagpart to target. */
8690 emit_move_insn (gen_realpart (partmode, target),
8691 gen_realpart (partmode, op0));
8693 imag_t = gen_imagpart (partmode, target);
8694 temp = expand_unop (partmode,
8695 ! unsignedp && flag_trapv
8696 && (GET_MODE_CLASS(partmode) == MODE_INT)
8697 ? negv_optab : neg_optab,
8698 gen_imagpart (partmode, op0), imag_t, 0);
8700 emit_move_insn (imag_t, temp);
8702 insns = get_insns ();
8705 /* Conjugate should appear as a single unit
8706 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8707 each with a separate pseudo as destination.
8708 It's not correct for flow to treat them as a unit. */
8709 if (GET_CODE (target) != CONCAT)
8710 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8717 case TRY_CATCH_EXPR:
8719 tree handler = TREE_OPERAND (exp, 1);
8721 expand_eh_region_start ();
8723 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8725 expand_eh_region_end_cleanup (handler);
8730 case TRY_FINALLY_EXPR:
8732 tree try_block = TREE_OPERAND (exp, 0);
8733 tree finally_block = TREE_OPERAND (exp, 1);
8734 rtx finally_label = gen_label_rtx ();
8735 rtx done_label = gen_label_rtx ();
8736 rtx return_link = gen_reg_rtx (Pmode);
8737 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8738 (tree) finally_label, (tree) return_link);
8739 TREE_SIDE_EFFECTS (cleanup) = 1;
8741 /* Start a new binding layer that will keep track of all cleanup
8742 actions to be performed. */
8743 expand_start_bindings (2);
8745 target_temp_slot_level = temp_slot_level;
8747 expand_decl_cleanup (NULL_TREE, cleanup);
8748 op0 = expand_expr (try_block, target, tmode, modifier);
8750 preserve_temp_slots (op0);
8751 expand_end_bindings (NULL_TREE, 0, 0);
8752 emit_jump (done_label);
8753 emit_label (finally_label);
8754 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8755 emit_indirect_jump (return_link);
8756 emit_label (done_label);
8760 case GOTO_SUBROUTINE_EXPR:
8762 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8763 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8764 rtx return_address = gen_label_rtx ();
8765 emit_move_insn (return_link,
8766 gen_rtx_LABEL_REF (Pmode, return_address));
8768 emit_label (return_address);
8773 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8776 return get_exception_pointer (cfun);
8779 /* Function descriptors are not valid except for as
8780 initialization constants, and should not be expanded. */
8784 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8787 /* Here to do an ordinary binary operator, generating an instruction
8788 from the optab already placed in `this_optab'. */
8790 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8792 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8793 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8795 temp = expand_binop (mode, this_optab, op0, op1, target,
8796 unsignedp, OPTAB_LIB_WIDEN);
8802 /* Similar to expand_expr, except that we don't specify a target, target
8803 mode, or modifier and we return the alignment of the inner type. This is
8804 used in cases where it is not necessary to align the result to the
8805 alignment of its type as long as we know the alignment of the result, for
8806 example for comparisons of BLKmode values. */
8809 expand_expr_unaligned (exp, palign)
8811 unsigned int *palign;
8814 tree type = TREE_TYPE (exp);
8815 register enum machine_mode mode = TYPE_MODE (type);
8817 /* Default the alignment we return to that of the type. */
8818 *palign = TYPE_ALIGN (type);
8820 /* The only cases in which we do anything special is if the resulting mode
8822 if (mode != BLKmode)
8823 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8825 switch (TREE_CODE (exp))
8829 case NON_LVALUE_EXPR:
8830 /* Conversions between BLKmode values don't change the underlying
8831 alignment or value. */
8832 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8833 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8837 /* Much of the code for this case is copied directly from expand_expr.
8838 We need to duplicate it here because we will do something different
8839 in the fall-through case, so we need to handle the same exceptions
8842 tree array = TREE_OPERAND (exp, 0);
8843 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8844 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8845 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8848 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8851 /* Optimize the special-case of a zero lower bound.
8853 We convert the low_bound to sizetype to avoid some problems
8854 with constant folding. (E.g. suppose the lower bound is 1,
8855 and its mode is QI. Without the conversion, (ARRAY
8856 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8857 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8859 if (! integer_zerop (low_bound))
8860 index = size_diffop (index, convert (sizetype, low_bound));
8862 /* If this is a constant index into a constant array,
8863 just get the value from the array. Handle both the cases when
8864 we have an explicit constructor and when our operand is a variable
8865 that was declared const. */
8867 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8868 && host_integerp (index, 0)
8869 && 0 > compare_tree_int (index,
8870 list_length (CONSTRUCTOR_ELTS
8871 (TREE_OPERAND (exp, 0)))))
8875 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8876 i = tree_low_cst (index, 0);
8877 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8881 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8884 else if (optimize >= 1
8885 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8886 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8887 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8889 if (TREE_CODE (index) == INTEGER_CST)
8891 tree init = DECL_INITIAL (array);
8893 if (TREE_CODE (init) == CONSTRUCTOR)
8897 for (elem = CONSTRUCTOR_ELTS (init);
8898 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8899 elem = TREE_CHAIN (elem))
8903 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8913 case ARRAY_RANGE_REF:
8914 /* If the operand is a CONSTRUCTOR, we can just extract the
8915 appropriate field if it is present. Don't do this if we have
8916 already written the data since we want to refer to that copy
8917 and varasm.c assumes that's what we'll do. */
8918 if (TREE_CODE (exp) == COMPONENT_REF
8919 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8920 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8924 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8925 elt = TREE_CHAIN (elt))
8926 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8927 /* Note that unlike the case in expand_expr, we know this is
8928 BLKmode and hence not an integer. */
8929 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8933 enum machine_mode mode1;
8934 HOST_WIDE_INT bitsize, bitpos;
8937 unsigned int alignment;
8939 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8940 &mode1, &unsignedp, &volatilep,
8943 /* If we got back the original object, something is wrong. Perhaps
8944 we are evaluating an expression too early. In any event, don't
8945 infinitely recurse. */
8949 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8951 /* If this is a constant, put it into a register if it is a
8952 legitimate constant and OFFSET is 0 and memory if it isn't. */
8953 if (CONSTANT_P (op0))
8955 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8957 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8959 op0 = force_reg (inner_mode, op0);
8961 op0 = validize_mem (force_const_mem (inner_mode, op0));
8966 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8968 /* If this object is in a register, put it into memory.
8969 This case can't occur in C, but can in Ada if we have
8970 unchecked conversion of an expression from a scalar type to
8971 an array or record type. */
8972 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8973 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8975 tree nt = build_qualified_type (TREE_TYPE (tem),
8976 (TYPE_QUALS (TREE_TYPE (tem))
8977 | TYPE_QUAL_CONST));
8978 rtx memloc = assign_temp (nt, 1, 1, 1);
8980 mark_temp_addr_taken (memloc);
8981 emit_move_insn (memloc, op0);
8985 if (GET_CODE (op0) != MEM)
8988 if (GET_MODE (offset_rtx) != ptr_mode)
8990 #ifdef POINTERS_EXTEND_UNSIGNED
8991 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8993 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8997 op0 = change_address (op0, VOIDmode,
8998 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8999 force_reg (ptr_mode,
9003 /* Don't forget about volatility even if this is a bitfield. */
9004 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9006 op0 = copy_rtx (op0);
9007 MEM_VOLATILE_P (op0) = 1;
9010 /* Check the access. */
9011 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9016 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9017 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9019 /* Check the access right of the pointer. */
9020 in_check_memory_usage = 1;
9021 if (size > BITS_PER_UNIT)
9022 emit_library_call (chkr_check_addr_libfunc,
9023 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9024 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9025 TYPE_MODE (sizetype),
9026 GEN_INT (MEMORY_USE_RO),
9027 TYPE_MODE (integer_type_node));
9028 in_check_memory_usage = 0;
9031 /* In cases where an aligned union has an unaligned object
9032 as a field, we might be extracting a BLKmode value from
9033 an integer-mode (e.g., SImode) object. Handle this case
9034 by doing the extract into an object as wide as the field
9035 (which we know to be the width of a basic mode), then
9036 storing into memory, and changing the mode to BLKmode.
9037 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9038 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9039 if (mode1 == VOIDmode
9040 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9041 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9042 && (TYPE_ALIGN (type) > alignment
9043 || bitpos % TYPE_ALIGN (type) != 0)))
9045 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9047 if (ext_mode == BLKmode)
9049 /* In this case, BITPOS must start at a byte boundary. */
9050 if (GET_CODE (op0) != MEM
9051 || bitpos % BITS_PER_UNIT != 0)
9054 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9058 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9060 rtx new = assign_temp (nt, 0, 1, 1);
9062 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9063 unsignedp, NULL_RTX, ext_mode,
9064 ext_mode, alignment,
9065 int_size_in_bytes (TREE_TYPE (tem)));
9067 /* If the result is a record type and BITSIZE is narrower than
9068 the mode of OP0, an integral mode, and this is a big endian
9069 machine, we must put the field into the high-order bits. */
9070 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9071 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9072 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9073 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9074 size_int (GET_MODE_BITSIZE
9079 emit_move_insn (new, op0);
9080 op0 = copy_rtx (new);
9081 PUT_MODE (op0, BLKmode);
9085 /* Get a reference to just this component. */
9086 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9088 set_mem_alias_set (op0, get_alias_set (exp));
9090 /* Adjust the alignment in case the bit position is not
9091 a multiple of the alignment of the inner object. */
9092 while (bitpos % alignment != 0)
9095 if (GET_CODE (XEXP (op0, 0)) == REG)
9096 mark_reg_pointer (XEXP (op0, 0), alignment);
9098 MEM_IN_STRUCT_P (op0) = 1;
9099 MEM_VOLATILE_P (op0) |= volatilep;
9101 *palign = alignment;
9110 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9113 /* Return the tree node if a ARG corresponds to a string constant or zero
9114 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9115 in bytes within the string that ARG is accessing. The type of the
9116 offset will be `sizetype'. */
9119 string_constant (arg, ptr_offset)
9125 if (TREE_CODE (arg) == ADDR_EXPR
9126 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9128 *ptr_offset = size_zero_node;
9129 return TREE_OPERAND (arg, 0);
9131 else if (TREE_CODE (arg) == PLUS_EXPR)
9133 tree arg0 = TREE_OPERAND (arg, 0);
9134 tree arg1 = TREE_OPERAND (arg, 1);
9139 if (TREE_CODE (arg0) == ADDR_EXPR
9140 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9142 *ptr_offset = convert (sizetype, arg1);
9143 return TREE_OPERAND (arg0, 0);
9145 else if (TREE_CODE (arg1) == ADDR_EXPR
9146 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9148 *ptr_offset = convert (sizetype, arg0);
9149 return TREE_OPERAND (arg1, 0);
9156 /* Expand code for a post- or pre- increment or decrement
9157 and return the RTX for the result.
9158 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9161 expand_increment (exp, post, ignore)
9165 register rtx op0, op1;
9166 register rtx temp, value;
9167 register tree incremented = TREE_OPERAND (exp, 0);
9168 optab this_optab = add_optab;
9170 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9171 int op0_is_copy = 0;
9172 int single_insn = 0;
9173 /* 1 means we can't store into OP0 directly,
9174 because it is a subreg narrower than a word,
9175 and we don't dare clobber the rest of the word. */
9178 /* Stabilize any component ref that might need to be
9179 evaluated more than once below. */
9181 || TREE_CODE (incremented) == BIT_FIELD_REF
9182 || (TREE_CODE (incremented) == COMPONENT_REF
9183 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9184 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9185 incremented = stabilize_reference (incremented);
9186 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9187 ones into save exprs so that they don't accidentally get evaluated
9188 more than once by the code below. */
9189 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9190 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9191 incremented = save_expr (incremented);
9193 /* Compute the operands as RTX.
9194 Note whether OP0 is the actual lvalue or a copy of it:
9195 I believe it is a copy iff it is a register or subreg
9196 and insns were generated in computing it. */
9198 temp = get_last_insn ();
9199 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9201 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9202 in place but instead must do sign- or zero-extension during assignment,
9203 so we copy it into a new register and let the code below use it as
9206 Note that we can safely modify this SUBREG since it is know not to be
9207 shared (it was made by the expand_expr call above). */
9209 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9212 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9216 else if (GET_CODE (op0) == SUBREG
9217 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9219 /* We cannot increment this SUBREG in place. If we are
9220 post-incrementing, get a copy of the old value. Otherwise,
9221 just mark that we cannot increment in place. */
9223 op0 = copy_to_reg (op0);
9228 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9229 && temp != get_last_insn ());
9230 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9231 EXPAND_MEMORY_USE_BAD);
9233 /* Decide whether incrementing or decrementing. */
9234 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9235 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9236 this_optab = sub_optab;
9238 /* Convert decrement by a constant into a negative increment. */
9239 if (this_optab == sub_optab
9240 && GET_CODE (op1) == CONST_INT)
9242 op1 = GEN_INT (-INTVAL (op1));
9243 this_optab = add_optab;
9246 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9247 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9249 /* For a preincrement, see if we can do this with a single instruction. */
9252 icode = (int) this_optab->handlers[(int) mode].insn_code;
9253 if (icode != (int) CODE_FOR_nothing
9254 /* Make sure that OP0 is valid for operands 0 and 1
9255 of the insn we want to queue. */
9256 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9257 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9258 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9262 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9263 then we cannot just increment OP0. We must therefore contrive to
9264 increment the original value. Then, for postincrement, we can return
9265 OP0 since it is a copy of the old value. For preincrement, expand here
9266 unless we can do it with a single insn.
9268 Likewise if storing directly into OP0 would clobber high bits
9269 we need to preserve (bad_subreg). */
9270 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9272 /* This is the easiest way to increment the value wherever it is.
9273 Problems with multiple evaluation of INCREMENTED are prevented
9274 because either (1) it is a component_ref or preincrement,
9275 in which case it was stabilized above, or (2) it is an array_ref
9276 with constant index in an array in a register, which is
9277 safe to reevaluate. */
9278 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9279 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9280 ? MINUS_EXPR : PLUS_EXPR),
9283 TREE_OPERAND (exp, 1));
9285 while (TREE_CODE (incremented) == NOP_EXPR
9286 || TREE_CODE (incremented) == CONVERT_EXPR)
9288 newexp = convert (TREE_TYPE (incremented), newexp);
9289 incremented = TREE_OPERAND (incremented, 0);
9292 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9293 return post ? op0 : temp;
9298 /* We have a true reference to the value in OP0.
9299 If there is an insn to add or subtract in this mode, queue it.
9300 Queueing the increment insn avoids the register shuffling
9301 that often results if we must increment now and first save
9302 the old value for subsequent use. */
9304 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9305 op0 = stabilize (op0);
9308 icode = (int) this_optab->handlers[(int) mode].insn_code;
9309 if (icode != (int) CODE_FOR_nothing
9310 /* Make sure that OP0 is valid for operands 0 and 1
9311 of the insn we want to queue. */
9312 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9313 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9315 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9316 op1 = force_reg (mode, op1);
9318 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9320 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9322 rtx addr = (general_operand (XEXP (op0, 0), mode)
9323 ? force_reg (Pmode, XEXP (op0, 0))
9324 : copy_to_reg (XEXP (op0, 0)));
9327 op0 = replace_equiv_address (op0, addr);
9328 temp = force_reg (GET_MODE (op0), op0);
9329 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9330 op1 = force_reg (mode, op1);
9332 /* The increment queue is LIFO, thus we have to `queue'
9333 the instructions in reverse order. */
9334 enqueue_insn (op0, gen_move_insn (op0, temp));
9335 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9340 /* Preincrement, or we can't increment with one simple insn. */
9342 /* Save a copy of the value before inc or dec, to return it later. */
9343 temp = value = copy_to_reg (op0);
9345 /* Arrange to return the incremented value. */
9346 /* Copy the rtx because expand_binop will protect from the queue,
9347 and the results of that would be invalid for us to return
9348 if our caller does emit_queue before using our result. */
9349 temp = copy_rtx (value = op0);
9351 /* Increment however we can. */
9352 op1 = expand_binop (mode, this_optab, value, op1,
9353 current_function_check_memory_usage ? NULL_RTX : op0,
9354 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9355 /* Make sure the value is stored into OP0. */
9357 emit_move_insn (op0, op1);
9362 /* At the start of a function, record that we have no previously-pushed
9363 arguments waiting to be popped. */
9366 init_pending_stack_adjust ()
9368 pending_stack_adjust = 0;
9371 /* When exiting from function, if safe, clear out any pending stack adjust
9372 so the adjustment won't get done.
9374 Note, if the current function calls alloca, then it must have a
9375 frame pointer regardless of the value of flag_omit_frame_pointer. */
9378 clear_pending_stack_adjust ()
9380 #ifdef EXIT_IGNORE_STACK
9382 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9383 && EXIT_IGNORE_STACK
9384 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9385 && ! flag_inline_functions)
9387 stack_pointer_delta -= pending_stack_adjust,
9388 pending_stack_adjust = 0;
9393 /* Pop any previously-pushed arguments that have not been popped yet. */
9396 do_pending_stack_adjust ()
9398 if (inhibit_defer_pop == 0)
9400 if (pending_stack_adjust != 0)
9401 adjust_stack (GEN_INT (pending_stack_adjust));
9402 pending_stack_adjust = 0;
9406 /* Expand conditional expressions. */
9408 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9409 LABEL is an rtx of code CODE_LABEL, in this function and all the
9413 jumpifnot (exp, label)
9417 do_jump (exp, label, NULL_RTX);
9420 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9427 do_jump (exp, NULL_RTX, label);
9430 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9431 the result is zero, or IF_TRUE_LABEL if the result is one.
9432 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9433 meaning fall through in that case.
9435 do_jump always does any pending stack adjust except when it does not
9436 actually perform a jump. An example where there is no jump
9437 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9439 This function is responsible for optimizing cases such as
9440 &&, || and comparison operators in EXP. */
9443 do_jump (exp, if_false_label, if_true_label)
9445 rtx if_false_label, if_true_label;
9447 register enum tree_code code = TREE_CODE (exp);
9448 /* Some cases need to create a label to jump to
9449 in order to properly fall through.
9450 These cases set DROP_THROUGH_LABEL nonzero. */
9451 rtx drop_through_label = 0;
9455 enum machine_mode mode;
9457 #ifdef MAX_INTEGER_COMPUTATION_MODE
9458 check_max_integer_computation_mode (exp);
9469 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9475 /* This is not true with #pragma weak */
9477 /* The address of something can never be zero. */
9479 emit_jump (if_true_label);
9484 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9485 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9486 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9487 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9490 /* If we are narrowing the operand, we have to do the compare in the
9492 if ((TYPE_PRECISION (TREE_TYPE (exp))
9493 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9495 case NON_LVALUE_EXPR:
9496 case REFERENCE_EXPR:
9501 /* These cannot change zero->non-zero or vice versa. */
9502 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9505 case WITH_RECORD_EXPR:
9506 /* Put the object on the placeholder list, recurse through our first
9507 operand, and pop the list. */
9508 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9510 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9511 placeholder_list = TREE_CHAIN (placeholder_list);
9515 /* This is never less insns than evaluating the PLUS_EXPR followed by
9516 a test and can be longer if the test is eliminated. */
9518 /* Reduce to minus. */
9519 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9520 TREE_OPERAND (exp, 0),
9521 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9522 TREE_OPERAND (exp, 1))));
9523 /* Process as MINUS. */
9527 /* Non-zero iff operands of minus differ. */
9528 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9529 TREE_OPERAND (exp, 0),
9530 TREE_OPERAND (exp, 1)),
9531 NE, NE, if_false_label, if_true_label);
9535 /* If we are AND'ing with a small constant, do this comparison in the
9536 smallest type that fits. If the machine doesn't have comparisons
9537 that small, it will be converted back to the wider comparison.
9538 This helps if we are testing the sign bit of a narrower object.
9539 combine can't do this for us because it can't know whether a
9540 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9542 if (! SLOW_BYTE_ACCESS
9543 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9544 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9545 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9546 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9547 && (type = type_for_mode (mode, 1)) != 0
9548 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9549 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9550 != CODE_FOR_nothing))
9552 do_jump (convert (type, exp), if_false_label, if_true_label);
9557 case TRUTH_NOT_EXPR:
9558 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9561 case TRUTH_ANDIF_EXPR:
9562 if (if_false_label == 0)
9563 if_false_label = drop_through_label = gen_label_rtx ();
9564 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9565 start_cleanup_deferral ();
9566 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9567 end_cleanup_deferral ();
9570 case TRUTH_ORIF_EXPR:
9571 if (if_true_label == 0)
9572 if_true_label = drop_through_label = gen_label_rtx ();
9573 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9574 start_cleanup_deferral ();
9575 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9576 end_cleanup_deferral ();
9581 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9582 preserve_temp_slots (NULL_RTX);
9586 do_pending_stack_adjust ();
9587 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9593 case ARRAY_RANGE_REF:
9595 HOST_WIDE_INT bitsize, bitpos;
9597 enum machine_mode mode;
9601 unsigned int alignment;
9603 /* Get description of this reference. We don't actually care
9604 about the underlying object here. */
9605 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9606 &unsignedp, &volatilep, &alignment);
9608 type = type_for_size (bitsize, unsignedp);
9609 if (! SLOW_BYTE_ACCESS
9610 && type != 0 && bitsize >= 0
9611 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9612 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9613 != CODE_FOR_nothing))
9615 do_jump (convert (type, exp), if_false_label, if_true_label);
9622 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9623 if (integer_onep (TREE_OPERAND (exp, 1))
9624 && integer_zerop (TREE_OPERAND (exp, 2)))
9625 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9627 else if (integer_zerop (TREE_OPERAND (exp, 1))
9628 && integer_onep (TREE_OPERAND (exp, 2)))
9629 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9633 register rtx label1 = gen_label_rtx ();
9634 drop_through_label = gen_label_rtx ();
9636 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9638 start_cleanup_deferral ();
9639 /* Now the THEN-expression. */
9640 do_jump (TREE_OPERAND (exp, 1),
9641 if_false_label ? if_false_label : drop_through_label,
9642 if_true_label ? if_true_label : drop_through_label);
9643 /* In case the do_jump just above never jumps. */
9644 do_pending_stack_adjust ();
9645 emit_label (label1);
9647 /* Now the ELSE-expression. */
9648 do_jump (TREE_OPERAND (exp, 2),
9649 if_false_label ? if_false_label : drop_through_label,
9650 if_true_label ? if_true_label : drop_through_label);
9651 end_cleanup_deferral ();
9657 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9659 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9660 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9662 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9663 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9666 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9667 fold (build (EQ_EXPR, TREE_TYPE (exp),
9668 fold (build1 (REALPART_EXPR,
9669 TREE_TYPE (inner_type),
9671 fold (build1 (REALPART_EXPR,
9672 TREE_TYPE (inner_type),
9674 fold (build (EQ_EXPR, TREE_TYPE (exp),
9675 fold (build1 (IMAGPART_EXPR,
9676 TREE_TYPE (inner_type),
9678 fold (build1 (IMAGPART_EXPR,
9679 TREE_TYPE (inner_type),
9681 if_false_label, if_true_label);
9684 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9685 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9687 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9688 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9689 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9691 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9697 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9699 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9700 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9702 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9703 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9706 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9707 fold (build (NE_EXPR, TREE_TYPE (exp),
9708 fold (build1 (REALPART_EXPR,
9709 TREE_TYPE (inner_type),
9711 fold (build1 (REALPART_EXPR,
9712 TREE_TYPE (inner_type),
9714 fold (build (NE_EXPR, TREE_TYPE (exp),
9715 fold (build1 (IMAGPART_EXPR,
9716 TREE_TYPE (inner_type),
9718 fold (build1 (IMAGPART_EXPR,
9719 TREE_TYPE (inner_type),
9721 if_false_label, if_true_label);
9724 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9725 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9727 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9728 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9729 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9731 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9736 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9737 if (GET_MODE_CLASS (mode) == MODE_INT
9738 && ! can_compare_p (LT, mode, ccp_jump))
9739 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9741 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9745 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9746 if (GET_MODE_CLASS (mode) == MODE_INT
9747 && ! can_compare_p (LE, mode, ccp_jump))
9748 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9750 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9754 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9755 if (GET_MODE_CLASS (mode) == MODE_INT
9756 && ! can_compare_p (GT, mode, ccp_jump))
9757 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9759 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9763 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9764 if (GET_MODE_CLASS (mode) == MODE_INT
9765 && ! can_compare_p (GE, mode, ccp_jump))
9766 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9768 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9771 case UNORDERED_EXPR:
9774 enum rtx_code cmp, rcmp;
9777 if (code == UNORDERED_EXPR)
9778 cmp = UNORDERED, rcmp = ORDERED;
9780 cmp = ORDERED, rcmp = UNORDERED;
9781 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9784 if (! can_compare_p (cmp, mode, ccp_jump)
9785 && (can_compare_p (rcmp, mode, ccp_jump)
9786 /* If the target doesn't provide either UNORDERED or ORDERED
9787 comparisons, canonicalize on UNORDERED for the library. */
9788 || rcmp == UNORDERED))
9792 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9794 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9799 enum rtx_code rcode1;
9800 enum tree_code tcode2;
9824 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9825 if (can_compare_p (rcode1, mode, ccp_jump))
9826 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9830 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9831 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9834 /* If the target doesn't support combined unordered
9835 compares, decompose into UNORDERED + comparison. */
9836 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9837 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9838 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9839 do_jump (exp, if_false_label, if_true_label);
9845 __builtin_expect (<test>, 0) and
9846 __builtin_expect (<test>, 1)
9848 We need to do this here, so that <test> is not converted to a SCC
9849 operation on machines that use condition code registers and COMPARE
9850 like the PowerPC, and then the jump is done based on whether the SCC
9851 operation produced a 1 or 0. */
9853 /* Check for a built-in function. */
9854 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9856 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9857 tree arglist = TREE_OPERAND (exp, 1);
9859 if (TREE_CODE (fndecl) == FUNCTION_DECL
9860 && DECL_BUILT_IN (fndecl)
9861 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9862 && arglist != NULL_TREE
9863 && TREE_CHAIN (arglist) != NULL_TREE)
9865 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9868 if (seq != NULL_RTX)
9875 /* fall through and generate the normal code. */
9879 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9881 /* This is not needed any more and causes poor code since it causes
9882 comparisons and tests from non-SI objects to have different code
9884 /* Copy to register to avoid generating bad insns by cse
9885 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9886 if (!cse_not_expected && GET_CODE (temp) == MEM)
9887 temp = copy_to_reg (temp);
9889 do_pending_stack_adjust ();
9890 /* Do any postincrements in the expression that was tested. */
9893 if (GET_CODE (temp) == CONST_INT
9894 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9895 || GET_CODE (temp) == LABEL_REF)
9897 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9901 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9902 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9903 /* Note swapping the labels gives us not-equal. */
9904 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9905 else if (GET_MODE (temp) != VOIDmode)
9906 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9907 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9908 GET_MODE (temp), NULL_RTX, 0,
9909 if_false_label, if_true_label);
9914 if (drop_through_label)
9916 /* If do_jump produces code that might be jumped around,
9917 do any stack adjusts from that code, before the place
9918 where control merges in. */
9919 do_pending_stack_adjust ();
9920 emit_label (drop_through_label);
9924 /* Given a comparison expression EXP for values too wide to be compared
9925 with one insn, test the comparison and jump to the appropriate label.
9926 The code of EXP is ignored; we always test GT if SWAP is 0,
9927 and LT if SWAP is 1. */
9930 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9933 rtx if_false_label, if_true_label;
9935 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9936 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9937 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9938 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9940 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9943 /* Compare OP0 with OP1, word at a time, in mode MODE.
9944 UNSIGNEDP says to do unsigned comparison.
9945 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9948 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9949 enum machine_mode mode;
9952 rtx if_false_label, if_true_label;
9954 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9955 rtx drop_through_label = 0;
9958 if (! if_true_label || ! if_false_label)
9959 drop_through_label = gen_label_rtx ();
9960 if (! if_true_label)
9961 if_true_label = drop_through_label;
9962 if (! if_false_label)
9963 if_false_label = drop_through_label;
9965 /* Compare a word at a time, high order first. */
9966 for (i = 0; i < nwords; i++)
9968 rtx op0_word, op1_word;
9970 if (WORDS_BIG_ENDIAN)
9972 op0_word = operand_subword_force (op0, i, mode);
9973 op1_word = operand_subword_force (op1, i, mode);
9977 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9978 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9981 /* All but high-order word must be compared as unsigned. */
9982 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9983 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9984 NULL_RTX, if_true_label);
9986 /* Consider lower words only if these are equal. */
9987 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9988 NULL_RTX, 0, NULL_RTX, if_false_label);
9992 emit_jump (if_false_label);
9993 if (drop_through_label)
9994 emit_label (drop_through_label);
9997 /* Given an EQ_EXPR expression EXP for values too wide to be compared
9998 with one insn, test the comparison and jump to the appropriate label. */
10001 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10003 rtx if_false_label, if_true_label;
10005 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10006 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10007 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10008 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10010 rtx drop_through_label = 0;
10012 if (! if_false_label)
10013 drop_through_label = if_false_label = gen_label_rtx ();
10015 for (i = 0; i < nwords; i++)
10016 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10017 operand_subword_force (op1, i, mode),
10018 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10019 word_mode, NULL_RTX, 0, if_false_label,
10023 emit_jump (if_true_label);
10024 if (drop_through_label)
10025 emit_label (drop_through_label);
10028 /* Jump according to whether OP0 is 0.
10029 We assume that OP0 has an integer mode that is too wide
10030 for the available compare insns. */
10033 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10035 rtx if_false_label, if_true_label;
10037 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10040 rtx drop_through_label = 0;
10042 /* The fastest way of doing this comparison on almost any machine is to
10043 "or" all the words and compare the result. If all have to be loaded
10044 from memory and this is a very wide item, it's possible this may
10045 be slower, but that's highly unlikely. */
10047 part = gen_reg_rtx (word_mode);
10048 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10049 for (i = 1; i < nwords && part != 0; i++)
10050 part = expand_binop (word_mode, ior_optab, part,
10051 operand_subword_force (op0, i, GET_MODE (op0)),
10052 part, 1, OPTAB_WIDEN);
10056 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10057 NULL_RTX, 0, if_false_label, if_true_label);
10062 /* If we couldn't do the "or" simply, do this with a series of compares. */
10063 if (! if_false_label)
10064 drop_through_label = if_false_label = gen_label_rtx ();
10066 for (i = 0; i < nwords; i++)
10067 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10068 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10069 if_false_label, NULL_RTX);
10072 emit_jump (if_true_label);
10074 if (drop_through_label)
10075 emit_label (drop_through_label);
10078 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10079 (including code to compute the values to be compared)
10080 and set (CC0) according to the result.
10081 The decision as to signed or unsigned comparison must be made by the caller.
10083 We force a stack adjustment unless there are currently
10084 things pushed on the stack that aren't yet used.
10086 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10089 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10090 size of MODE should be used. */
10093 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10094 register rtx op0, op1;
10095 enum rtx_code code;
10097 enum machine_mode mode;
10099 unsigned int align;
10103 /* If one operand is constant, make it the second one. Only do this
10104 if the other operand is not constant as well. */
10106 if (swap_commutative_operands_p (op0, op1))
10111 code = swap_condition (code);
10114 if (flag_force_mem)
10116 op0 = force_not_mem (op0);
10117 op1 = force_not_mem (op1);
10120 do_pending_stack_adjust ();
10122 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10123 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10127 /* There's no need to do this now that combine.c can eliminate lots of
10128 sign extensions. This can be less efficient in certain cases on other
10131 /* If this is a signed equality comparison, we can do it as an
10132 unsigned comparison since zero-extension is cheaper than sign
10133 extension and comparisons with zero are done as unsigned. This is
10134 the case even on machines that can do fast sign extension, since
10135 zero-extension is easier to combine with other operations than
10136 sign-extension is. If we are comparing against a constant, we must
10137 convert it to what it would look like unsigned. */
10138 if ((code == EQ || code == NE) && ! unsignedp
10139 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10141 if (GET_CODE (op1) == CONST_INT
10142 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10143 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10148 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10150 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10153 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10154 The decision as to signed or unsigned comparison must be made by the caller.
10156 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10159 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10160 size of MODE should be used. */
10163 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10164 if_false_label, if_true_label)
10165 register rtx op0, op1;
10166 enum rtx_code code;
10168 enum machine_mode mode;
10170 unsigned int align;
10171 rtx if_false_label, if_true_label;
10174 int dummy_true_label = 0;
10176 /* Reverse the comparison if that is safe and we want to jump if it is
10178 if (! if_true_label && ! FLOAT_MODE_P (mode))
10180 if_true_label = if_false_label;
10181 if_false_label = 0;
10182 code = reverse_condition (code);
10185 /* If one operand is constant, make it the second one. Only do this
10186 if the other operand is not constant as well. */
10188 if (swap_commutative_operands_p (op0, op1))
10193 code = swap_condition (code);
10196 if (flag_force_mem)
10198 op0 = force_not_mem (op0);
10199 op1 = force_not_mem (op1);
10202 do_pending_stack_adjust ();
10204 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10205 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10207 if (tem == const_true_rtx)
10210 emit_jump (if_true_label);
10214 if (if_false_label)
10215 emit_jump (if_false_label);
10221 /* There's no need to do this now that combine.c can eliminate lots of
10222 sign extensions. This can be less efficient in certain cases on other
10225 /* If this is a signed equality comparison, we can do it as an
10226 unsigned comparison since zero-extension is cheaper than sign
10227 extension and comparisons with zero are done as unsigned. This is
10228 the case even on machines that can do fast sign extension, since
10229 zero-extension is easier to combine with other operations than
10230 sign-extension is. If we are comparing against a constant, we must
10231 convert it to what it would look like unsigned. */
10232 if ((code == EQ || code == NE) && ! unsignedp
10233 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10235 if (GET_CODE (op1) == CONST_INT
10236 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10237 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10242 if (! if_true_label)
10244 dummy_true_label = 1;
10245 if_true_label = gen_label_rtx ();
10248 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10251 if (if_false_label)
10252 emit_jump (if_false_label);
10253 if (dummy_true_label)
10254 emit_label (if_true_label);
10257 /* Generate code for a comparison expression EXP (including code to compute
10258 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10259 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10260 generated code will drop through.
10261 SIGNED_CODE should be the rtx operation for this comparison for
10262 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10264 We force a stack adjustment unless there are currently
10265 things pushed on the stack that aren't yet used. */
10268 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10271 enum rtx_code signed_code, unsigned_code;
10272 rtx if_false_label, if_true_label;
10274 unsigned int align0, align1;
10275 register rtx op0, op1;
10276 register tree type;
10277 register enum machine_mode mode;
10279 enum rtx_code code;
10281 /* Don't crash if the comparison was erroneous. */
10282 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10283 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10286 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10287 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10290 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10291 mode = TYPE_MODE (type);
10292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10293 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10294 || (GET_MODE_BITSIZE (mode)
10295 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10298 /* op0 might have been replaced by promoted constant, in which
10299 case the type of second argument should be used. */
10300 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10301 mode = TYPE_MODE (type);
10303 unsignedp = TREE_UNSIGNED (type);
10304 code = unsignedp ? unsigned_code : signed_code;
10306 #ifdef HAVE_canonicalize_funcptr_for_compare
10307 /* If function pointers need to be "canonicalized" before they can
10308 be reliably compared, then canonicalize them. */
10309 if (HAVE_canonicalize_funcptr_for_compare
10310 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10311 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10314 rtx new_op0 = gen_reg_rtx (mode);
10316 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10320 if (HAVE_canonicalize_funcptr_for_compare
10321 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10322 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10325 rtx new_op1 = gen_reg_rtx (mode);
10327 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10332 /* Do any postincrements in the expression that was tested. */
10335 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10337 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10338 MIN (align0, align1),
10339 if_false_label, if_true_label);
10342 /* Generate code to calculate EXP using a store-flag instruction
10343 and return an rtx for the result. EXP is either a comparison
10344 or a TRUTH_NOT_EXPR whose operand is a comparison.
10346 If TARGET is nonzero, store the result there if convenient.
10348 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10351 Return zero if there is no suitable set-flag instruction
10352 available on this machine.
10354 Once expand_expr has been called on the arguments of the comparison,
10355 we are committed to doing the store flag, since it is not safe to
10356 re-evaluate the expression. We emit the store-flag insn by calling
10357 emit_store_flag, but only expand the arguments if we have a reason
10358 to believe that emit_store_flag will be successful. If we think that
10359 it will, but it isn't, we have to simulate the store-flag with a
10360 set/jump/set sequence. */
10363 do_store_flag (exp, target, mode, only_cheap)
10366 enum machine_mode mode;
10369 enum rtx_code code;
10370 tree arg0, arg1, type;
10372 enum machine_mode operand_mode;
10376 enum insn_code icode;
10377 rtx subtarget = target;
10380 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10381 result at the end. We can't simply invert the test since it would
10382 have already been inverted if it were valid. This case occurs for
10383 some floating-point comparisons. */
10385 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10386 invert = 1, exp = TREE_OPERAND (exp, 0);
10388 arg0 = TREE_OPERAND (exp, 0);
10389 arg1 = TREE_OPERAND (exp, 1);
10391 /* Don't crash if the comparison was erroneous. */
10392 if (arg0 == error_mark_node || arg1 == error_mark_node)
10395 type = TREE_TYPE (arg0);
10396 operand_mode = TYPE_MODE (type);
10397 unsignedp = TREE_UNSIGNED (type);
10399 /* We won't bother with BLKmode store-flag operations because it would mean
10400 passing a lot of information to emit_store_flag. */
10401 if (operand_mode == BLKmode)
10404 /* We won't bother with store-flag operations involving function pointers
10405 when function pointers must be canonicalized before comparisons. */
10406 #ifdef HAVE_canonicalize_funcptr_for_compare
10407 if (HAVE_canonicalize_funcptr_for_compare
10408 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10409 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10411 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10413 == FUNCTION_TYPE))))
10420 /* Get the rtx comparison code to use. We know that EXP is a comparison
10421 operation of some type. Some comparisons against 1 and -1 can be
10422 converted to comparisons with zero. Do so here so that the tests
10423 below will be aware that we have a comparison with zero. These
10424 tests will not catch constants in the first operand, but constants
10425 are rarely passed as the first operand. */
10427 switch (TREE_CODE (exp))
10436 if (integer_onep (arg1))
10437 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10439 code = unsignedp ? LTU : LT;
10442 if (! unsignedp && integer_all_onesp (arg1))
10443 arg1 = integer_zero_node, code = LT;
10445 code = unsignedp ? LEU : LE;
10448 if (! unsignedp && integer_all_onesp (arg1))
10449 arg1 = integer_zero_node, code = GE;
10451 code = unsignedp ? GTU : GT;
10454 if (integer_onep (arg1))
10455 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10457 code = unsignedp ? GEU : GE;
10460 case UNORDERED_EXPR:
10486 /* Put a constant second. */
10487 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10489 tem = arg0; arg0 = arg1; arg1 = tem;
10490 code = swap_condition (code);
10493 /* If this is an equality or inequality test of a single bit, we can
10494 do this by shifting the bit being tested to the low-order bit and
10495 masking the result with the constant 1. If the condition was EQ,
10496 we xor it with 1. This does not require an scc insn and is faster
10497 than an scc insn even if we have it. */
10499 if ((code == NE || code == EQ)
10500 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10501 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10503 tree inner = TREE_OPERAND (arg0, 0);
10504 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10507 /* If INNER is a right shift of a constant and it plus BITNUM does
10508 not overflow, adjust BITNUM and INNER. */
10510 if (TREE_CODE (inner) == RSHIFT_EXPR
10511 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10512 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10513 && bitnum < TYPE_PRECISION (type)
10514 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10515 bitnum - TYPE_PRECISION (type)))
10517 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10518 inner = TREE_OPERAND (inner, 0);
10521 /* If we are going to be able to omit the AND below, we must do our
10522 operations as unsigned. If we must use the AND, we have a choice.
10523 Normally unsigned is faster, but for some machines signed is. */
10524 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10525 #ifdef LOAD_EXTEND_OP
10526 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10532 if (! get_subtarget (subtarget)
10533 || GET_MODE (subtarget) != operand_mode
10534 || ! safe_from_p (subtarget, inner, 1))
10537 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10540 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10541 size_int (bitnum), subtarget, ops_unsignedp);
10543 if (GET_MODE (op0) != mode)
10544 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10546 if ((code == EQ && ! invert) || (code == NE && invert))
10547 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10548 ops_unsignedp, OPTAB_LIB_WIDEN);
10550 /* Put the AND last so it can combine with more things. */
10551 if (bitnum != TYPE_PRECISION (type) - 1)
10552 op0 = expand_and (op0, const1_rtx, subtarget);
10557 /* Now see if we are likely to be able to do this. Return if not. */
10558 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10561 icode = setcc_gen_code[(int) code];
10562 if (icode == CODE_FOR_nothing
10563 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10565 /* We can only do this if it is one of the special cases that
10566 can be handled without an scc insn. */
10567 if ((code == LT && integer_zerop (arg1))
10568 || (! only_cheap && code == GE && integer_zerop (arg1)))
10570 else if (BRANCH_COST >= 0
10571 && ! only_cheap && (code == NE || code == EQ)
10572 && TREE_CODE (type) != REAL_TYPE
10573 && ((abs_optab->handlers[(int) operand_mode].insn_code
10574 != CODE_FOR_nothing)
10575 || (ffs_optab->handlers[(int) operand_mode].insn_code
10576 != CODE_FOR_nothing)))
10582 if (! get_subtarget (target)
10583 || GET_MODE (subtarget) != operand_mode
10584 || ! safe_from_p (subtarget, arg1, 1))
10587 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10588 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10591 target = gen_reg_rtx (mode);
10593 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10594 because, if the emit_store_flag does anything it will succeed and
10595 OP0 and OP1 will not be used subsequently. */
10597 result = emit_store_flag (target, code,
10598 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10599 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10600 operand_mode, unsignedp, 1);
10605 result = expand_binop (mode, xor_optab, result, const1_rtx,
10606 result, 0, OPTAB_LIB_WIDEN);
10610 /* If this failed, we have to do this with set/compare/jump/set code. */
10611 if (GET_CODE (target) != REG
10612 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10613 target = gen_reg_rtx (GET_MODE (target));
10615 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10616 result = compare_from_rtx (op0, op1, code, unsignedp,
10617 operand_mode, NULL_RTX, 0);
10618 if (GET_CODE (result) == CONST_INT)
10619 return (((result == const0_rtx && ! invert)
10620 || (result != const0_rtx && invert))
10621 ? const0_rtx : const1_rtx);
10623 label = gen_label_rtx ();
10624 if (bcc_gen_fctn[(int) code] == 0)
10627 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10628 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10629 emit_label (label);
10635 /* Stubs in case we haven't got a casesi insn. */
10636 #ifndef HAVE_casesi
10637 # define HAVE_casesi 0
10638 # define gen_casesi(a, b, c, d, e) (0)
10639 # define CODE_FOR_casesi CODE_FOR_nothing
10642 /* If the machine does not have a case insn that compares the bounds,
10643 this means extra overhead for dispatch tables, which raises the
10644 threshold for using them. */
10645 #ifndef CASE_VALUES_THRESHOLD
10646 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10647 #endif /* CASE_VALUES_THRESHOLD */
10650 case_values_threshold ()
10652 return CASE_VALUES_THRESHOLD;
10655 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10656 0 otherwise (i.e. if there is no casesi instruction). */
10658 try_casesi (index_type, index_expr, minval, range,
10659 table_label, default_label)
10660 tree index_type, index_expr, minval, range;
10661 rtx table_label ATTRIBUTE_UNUSED;
10664 enum machine_mode index_mode = SImode;
10665 int index_bits = GET_MODE_BITSIZE (index_mode);
10666 rtx op1, op2, index;
10667 enum machine_mode op_mode;
10672 /* Convert the index to SImode. */
10673 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10675 enum machine_mode omode = TYPE_MODE (index_type);
10676 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10678 /* We must handle the endpoints in the original mode. */
10679 index_expr = build (MINUS_EXPR, index_type,
10680 index_expr, minval);
10681 minval = integer_zero_node;
10682 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10683 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10684 omode, 1, 0, default_label);
10685 /* Now we can safely truncate. */
10686 index = convert_to_mode (index_mode, index, 0);
10690 if (TYPE_MODE (index_type) != index_mode)
10692 index_expr = convert (type_for_size (index_bits, 0),
10694 index_type = TREE_TYPE (index_expr);
10697 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10700 index = protect_from_queue (index, 0);
10701 do_pending_stack_adjust ();
10703 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10704 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10706 index = copy_to_mode_reg (op_mode, index);
10708 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10710 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10711 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10712 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10713 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10715 op1 = copy_to_mode_reg (op_mode, op1);
10717 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10719 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10720 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10721 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10722 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10724 op2 = copy_to_mode_reg (op_mode, op2);
10726 emit_jump_insn (gen_casesi (index, op1, op2,
10727 table_label, default_label));
10731 /* Attempt to generate a tablejump instruction; same concept. */
10732 #ifndef HAVE_tablejump
10733 #define HAVE_tablejump 0
10734 #define gen_tablejump(x, y) (0)
10737 /* Subroutine of the next function.
10739 INDEX is the value being switched on, with the lowest value
10740 in the table already subtracted.
10741 MODE is its expected mode (needed if INDEX is constant).
10742 RANGE is the length of the jump table.
10743 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10745 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10746 index value is out of range. */
10749 do_tablejump (index, mode, range, table_label, default_label)
10750 rtx index, range, table_label, default_label;
10751 enum machine_mode mode;
10753 register rtx temp, vector;
10755 /* Do an unsigned comparison (in the proper mode) between the index
10756 expression and the value which represents the length of the range.
10757 Since we just finished subtracting the lower bound of the range
10758 from the index expression, this comparison allows us to simultaneously
10759 check that the original index expression value is both greater than
10760 or equal to the minimum value of the range and less than or equal to
10761 the maximum value of the range. */
10763 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10766 /* If index is in range, it must fit in Pmode.
10767 Convert to Pmode so we can index with it. */
10769 index = convert_to_mode (Pmode, index, 1);
10771 /* Don't let a MEM slip thru, because then INDEX that comes
10772 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10773 and break_out_memory_refs will go to work on it and mess it up. */
10774 #ifdef PIC_CASE_VECTOR_ADDRESS
10775 if (flag_pic && GET_CODE (index) != REG)
10776 index = copy_to_mode_reg (Pmode, index);
10779 /* If flag_force_addr were to affect this address
10780 it could interfere with the tricky assumptions made
10781 about addresses that contain label-refs,
10782 which may be valid only very near the tablejump itself. */
10783 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10784 GET_MODE_SIZE, because this indicates how large insns are. The other
10785 uses should all be Pmode, because they are addresses. This code
10786 could fail if addresses and insns are not the same size. */
10787 index = gen_rtx_PLUS (Pmode,
10788 gen_rtx_MULT (Pmode, index,
10789 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10790 gen_rtx_LABEL_REF (Pmode, table_label));
10791 #ifdef PIC_CASE_VECTOR_ADDRESS
10793 index = PIC_CASE_VECTOR_ADDRESS (index);
10796 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10797 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10798 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10799 RTX_UNCHANGING_P (vector) = 1;
10800 convert_move (temp, vector, 0);
10802 emit_jump_insn (gen_tablejump (temp, table_label));
10804 /* If we are generating PIC code or if the table is PC-relative, the
10805 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10806 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10811 try_tablejump (index_type, index_expr, minval, range,
10812 table_label, default_label)
10813 tree index_type, index_expr, minval, range;
10814 rtx table_label, default_label;
10818 if (! HAVE_tablejump)
10821 index_expr = fold (build (MINUS_EXPR, index_type,
10822 convert (index_type, index_expr),
10823 convert (index_type, minval)));
10824 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10826 index = protect_from_queue (index, 0);
10827 do_pending_stack_adjust ();
10829 do_tablejump (index, TYPE_MODE (index_type),
10830 convert_modes (TYPE_MODE (index_type),
10831 TYPE_MODE (TREE_TYPE (range)),
10832 expand_expr (range, NULL_RTX,
10834 TREE_UNSIGNED (TREE_TYPE (range))),
10835 table_label, default_label);