1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
41 #include "typeclass.h"
47 /* Decide whether a function's arguments should be processed
48 from first to last or from last to first.
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
55 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
56 #define PUSH_ARGS_REVERSED /* If it's last to first. */
61 #ifndef STACK_PUSH_CODE
62 #ifdef STACK_GROWS_DOWNWARD
63 #define STACK_PUSH_CODE PRE_DEC
65 #define STACK_PUSH_CODE PRE_INC
69 /* Assume that case vectors are not pc-relative. */
70 #ifndef CASE_VECTOR_PC_RELATIVE
71 #define CASE_VECTOR_PC_RELATIVE 0
74 /* Hook called by safe_from_p for language-specific tree codes. It is
75 up to the language front-end to install a hook if it has any such
76 codes that safe_from_p needs to know about. Since same_from_p will
77 recursively explore the TREE_OPERANDs of an expression, this hook
78 should not reexamine those pieces. This routine may recursively
79 call safe_from_p; it should always pass `0' as the TOP_P
81 int (*lang_safe_from_p) PARAMS ((rtx, tree));
83 /* If this is nonzero, we do not bother generating VOLATILE
84 around volatile memory references, and we are willing to
85 output indirect addresses. If cse is to follow, we reject
86 indirect addresses so a useful potential cse is generated;
87 if it is used only once, instruction combination will produce
88 the same indirect address eventually. */
91 /* Don't check memory usage, since code is being emitted to check a memory
92 usage. Used when current_function_check_memory_usage is true, to avoid
93 infinite recursion. */
94 static int in_check_memory_usage;
96 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
97 static tree placeholder_list = 0;
99 /* This structure is used by move_by_pieces to describe the move to
101 struct move_by_pieces
110 int explicit_inc_from;
111 unsigned HOST_WIDE_INT len;
112 HOST_WIDE_INT offset;
116 /* This structure is used by store_by_pieces to describe the clear to
119 struct store_by_pieces
125 unsigned HOST_WIDE_INT len;
126 HOST_WIDE_INT offset;
127 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
132 extern struct obstack permanent_obstack;
134 static rtx get_push_address PARAMS ((int));
136 static rtx enqueue_insn PARAMS ((rtx, rtx));
137 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
138 PARAMS ((unsigned HOST_WIDE_INT,
140 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
141 struct move_by_pieces *));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static rtx get_subtarget PARAMS ((rtx));
152 static int is_zeros_p PARAMS ((tree));
153 static int mostly_zeros_p PARAMS ((tree));
154 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
155 HOST_WIDE_INT, enum machine_mode,
156 tree, tree, unsigned int, int,
158 static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
160 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
162 tree, enum machine_mode, int,
163 unsigned int, HOST_WIDE_INT, int));
164 static enum memory_use_mode
165 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
166 static rtx var_rtx PARAMS ((tree));
167 static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
168 static rtx expand_increment PARAMS ((tree, int, int));
169 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
170 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
171 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
173 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
175 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
177 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
179 /* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
183 static char direct_load[NUM_MACHINE_MODES];
184 static char direct_store[NUM_MACHINE_MODES];
186 /* If a memory-to-memory move would take MOVE_RATIO or more simple
187 move-instruction sequences, we will do a movstr or libcall instead. */
190 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
193 /* If we are optimizing for space (-Os), cut down the default move ratio. */
194 #define MOVE_RATIO (optimize_size ? 3 : 15)
198 /* This macro is used to determine whether move_by_pieces should be called
199 to perform a structure copy. */
200 #ifndef MOVE_BY_PIECES_P
201 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
202 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
205 /* This array records the insn_code of insns to perform block moves. */
206 enum insn_code movstr_optab[NUM_MACHINE_MODES];
208 /* This array records the insn_code of insns to perform block clears. */
209 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
211 /* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
213 #ifndef SLOW_UNALIGNED_ACCESS
214 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
217 /* This is run once per compilation to set up which modes can be used
218 directly in memory and to initialize the block move optab. */
224 enum machine_mode mode;
230 /* Try indexing by frame ptr and try by stack ptr.
231 It is known that on the Convex the stack ptr isn't a valid index.
232 With luck, one or the other is valid on any machine. */
233 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
234 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
236 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
237 pat = PATTERN (insn);
239 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
240 mode = (enum machine_mode) ((int) mode + 1))
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
249 /* See if there is some register that can be used in this mode and
250 directly loaded or stored from memory. */
252 if (mode != VOIDmode && mode != BLKmode)
253 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
254 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
257 if (! HARD_REGNO_MODE_OK (regno, mode))
260 reg = gen_rtx_REG (mode, regno);
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
267 SET_SRC (pat) = mem1;
268 SET_DEST (pat) = reg;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_load[(int) mode] = 1;
273 SET_DEST (pat) = mem;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
278 SET_DEST (pat) = mem1;
279 if (recog (pat, insn, &num_clobbers) >= 0)
280 direct_store[(int) mode] = 1;
287 /* This is run at the start of compiling a function. */
292 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
295 pending_stack_adjust = 0;
296 stack_pointer_delta = 0;
297 inhibit_defer_pop = 0;
299 apply_args_value = 0;
305 struct expr_status *p;
310 ggc_mark_rtx (p->x_saveregs_value);
311 ggc_mark_rtx (p->x_apply_args_value);
312 ggc_mark_rtx (p->x_forced_labels);
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function ()
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (var, body)
346 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
347 body, pending_chain);
348 return pending_chain;
351 /* Use protect_from_queue to convert a QUEUED expression
352 into something that you can put immediately into an instruction.
353 If the queued incrementation has not happened yet,
354 protect_from_queue returns the variable itself.
355 If the incrementation has happened, protect_from_queue returns a temp
356 that contains a copy of the old value of the variable.
358 Any time an rtx which might possibly be a QUEUED is to be put
359 into an instruction, it must be passed through protect_from_queue first.
360 QUEUED expressions are not meaningful in instructions.
362 Do not pass a value through protect_from_queue and then hold
363 on to it for a while before putting it in an instruction!
364 If the queue is flushed in between, incorrect code will result. */
367 protect_from_queue (x, modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
456 enum rtx_code code = GET_CODE (x);
462 return queued_subexp_p (XEXP (x, 0));
466 return (queued_subexp_p (XEXP (x, 0))
467 || queued_subexp_p (XEXP (x, 1)));
473 /* Perform all the pending incrementations. */
479 while ((p = pending_chain))
481 rtx body = QUEUED_BODY (p);
483 if (GET_CODE (body) == SEQUENCE)
485 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
486 emit_insn (QUEUED_BODY (p));
489 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
490 pending_chain = QUEUED_NEXT (p);
494 /* Copy data from FROM to TO, where the machine modes are not the same.
495 Both modes may be integer, or both may be floating.
496 UNSIGNEDP should be nonzero if FROM is an unsigned type.
497 This causes zero-extension instead of sign-extension. */
500 convert_move (to, from, unsignedp)
504 enum machine_mode to_mode = GET_MODE (to);
505 enum machine_mode from_mode = GET_MODE (from);
506 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
507 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
511 /* rtx code for making an equivalent value. */
512 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
514 to = protect_from_queue (to, 1);
515 from = protect_from_queue (from, 0);
517 if (to_real != from_real)
520 /* If FROM is a SUBREG that indicates that we have already done at least
521 the required extension, strip it. We don't handle such SUBREGs as
524 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
525 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
526 >= GET_MODE_SIZE (to_mode))
527 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
528 from = gen_lowpart (to_mode, from), from_mode = to_mode;
530 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
533 if (to_mode == from_mode
534 || (from_mode == VOIDmode && CONSTANT_P (from)))
536 emit_move_insn (to, from);
540 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
542 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
545 if (VECTOR_MODE_P (to_mode))
546 from = gen_rtx_SUBREG (to_mode, from, 0);
548 to = gen_rtx_SUBREG (from_mode, to, 0);
550 emit_move_insn (to, from);
554 if (to_real != from_real)
561 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
563 /* Try converting directly if the insn is supported. */
564 if ((code = can_extend_p (to_mode, from_mode, 0))
567 emit_unop_insn (code, to, from, UNKNOWN);
572 #ifdef HAVE_trunchfqf2
573 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
575 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
579 #ifdef HAVE_trunctqfqf2
580 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
582 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
586 #ifdef HAVE_truncsfqf2
587 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
589 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
593 #ifdef HAVE_truncdfqf2
594 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
596 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
600 #ifdef HAVE_truncxfqf2
601 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
603 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
607 #ifdef HAVE_trunctfqf2
608 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
610 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
615 #ifdef HAVE_trunctqfhf2
616 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
618 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
622 #ifdef HAVE_truncsfhf2
623 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
625 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
629 #ifdef HAVE_truncdfhf2
630 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
632 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
636 #ifdef HAVE_truncxfhf2
637 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
639 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
643 #ifdef HAVE_trunctfhf2
644 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
646 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
651 #ifdef HAVE_truncsftqf2
652 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
654 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
658 #ifdef HAVE_truncdftqf2
659 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
661 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
665 #ifdef HAVE_truncxftqf2
666 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
668 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
672 #ifdef HAVE_trunctftqf2
673 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
675 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
680 #ifdef HAVE_truncdfsf2
681 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
683 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
687 #ifdef HAVE_truncxfsf2
688 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
690 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
694 #ifdef HAVE_trunctfsf2
695 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
697 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
701 #ifdef HAVE_truncxfdf2
702 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
704 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
708 #ifdef HAVE_trunctfdf2
709 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
711 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
723 libcall = extendsfdf2_libfunc;
727 libcall = extendsfxf2_libfunc;
731 libcall = extendsftf2_libfunc;
743 libcall = truncdfsf2_libfunc;
747 libcall = extenddfxf2_libfunc;
751 libcall = extenddftf2_libfunc;
763 libcall = truncxfsf2_libfunc;
767 libcall = truncxfdf2_libfunc;
779 libcall = trunctfsf2_libfunc;
783 libcall = trunctfdf2_libfunc;
795 if (libcall == (rtx) 0)
796 /* This conversion is not implemented yet. */
800 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
802 insns = get_insns ();
804 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
809 /* Now both modes are integers. */
811 /* Handle expanding beyond a word. */
812 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
813 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
820 enum machine_mode lowpart_mode;
821 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
823 /* Try converting directly if the insn is supported. */
824 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
827 /* If FROM is a SUBREG, put it into a register. Do this
828 so that we always generate the same set of insns for
829 better cse'ing; if an intermediate assignment occurred,
830 we won't be doing the operation directly on the SUBREG. */
831 if (optimize > 0 && GET_CODE (from) == SUBREG)
832 from = force_reg (from_mode, from);
833 emit_unop_insn (code, to, from, equiv_code);
836 /* Next, try converting via full word. */
837 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
838 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
839 != CODE_FOR_nothing))
841 if (GET_CODE (to) == REG)
842 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
843 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
844 emit_unop_insn (code, to,
845 gen_lowpart (word_mode, to), equiv_code);
849 /* No special multiword conversion insn; do it by hand. */
852 /* Since we will turn this into a no conflict block, we must ensure
853 that the source does not overlap the target. */
855 if (reg_overlap_mentioned_p (to, from))
856 from = force_reg (from_mode, from);
858 /* Get a copy of FROM widened to a word, if necessary. */
859 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
860 lowpart_mode = word_mode;
862 lowpart_mode = from_mode;
864 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
866 lowpart = gen_lowpart (lowpart_mode, to);
867 emit_move_insn (lowpart, lowfrom);
869 /* Compute the value to put in each remaining word. */
871 fill_value = const0_rtx;
876 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
877 && STORE_FLAG_VALUE == -1)
879 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
881 fill_value = gen_reg_rtx (word_mode);
882 emit_insn (gen_slt (fill_value));
888 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
889 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
891 fill_value = convert_to_mode (word_mode, fill_value, 1);
895 /* Fill the remaining words. */
896 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
898 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
899 rtx subword = operand_subword (to, index, 1, to_mode);
904 if (fill_value != subword)
905 emit_move_insn (subword, fill_value);
908 insns = get_insns ();
911 emit_no_conflict_block (insns, to, from, NULL_RTX,
912 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
916 /* Truncating multi-word to a word or less. */
917 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
918 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
920 if (!((GET_CODE (from) == MEM
921 && ! MEM_VOLATILE_P (from)
922 && direct_load[(int) to_mode]
923 && ! mode_dependent_address_p (XEXP (from, 0)))
924 || GET_CODE (from) == REG
925 || GET_CODE (from) == SUBREG))
926 from = force_reg (from_mode, from);
927 convert_move (to, gen_lowpart (word_mode, from), 0);
931 /* Handle pointer conversion. */ /* SPEE 900220. */
932 if (to_mode == PQImode)
934 if (from_mode != QImode)
935 from = convert_to_mode (QImode, from, unsignedp);
937 #ifdef HAVE_truncqipqi2
938 if (HAVE_truncqipqi2)
940 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
943 #endif /* HAVE_truncqipqi2 */
947 if (from_mode == PQImode)
949 if (to_mode != QImode)
951 from = convert_to_mode (QImode, from, unsignedp);
956 #ifdef HAVE_extendpqiqi2
957 if (HAVE_extendpqiqi2)
959 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
962 #endif /* HAVE_extendpqiqi2 */
967 if (to_mode == PSImode)
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
972 #ifdef HAVE_truncsipsi2
973 if (HAVE_truncsipsi2)
975 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
978 #endif /* HAVE_truncsipsi2 */
982 if (from_mode == PSImode)
984 if (to_mode != SImode)
986 from = convert_to_mode (SImode, from, unsignedp);
991 #ifdef HAVE_extendpsisi2
992 if (! unsignedp && HAVE_extendpsisi2)
994 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
997 #endif /* HAVE_extendpsisi2 */
998 #ifdef HAVE_zero_extendpsisi2
999 if (unsignedp && HAVE_zero_extendpsisi2)
1001 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1004 #endif /* HAVE_zero_extendpsisi2 */
1009 if (to_mode == PDImode)
1011 if (from_mode != DImode)
1012 from = convert_to_mode (DImode, from, unsignedp);
1014 #ifdef HAVE_truncdipdi2
1015 if (HAVE_truncdipdi2)
1017 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1020 #endif /* HAVE_truncdipdi2 */
1024 if (from_mode == PDImode)
1026 if (to_mode != DImode)
1028 from = convert_to_mode (DImode, from, unsignedp);
1033 #ifdef HAVE_extendpdidi2
1034 if (HAVE_extendpdidi2)
1036 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1039 #endif /* HAVE_extendpdidi2 */
1044 /* Now follow all the conversions between integers
1045 no more than a word long. */
1047 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1048 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (from_mode)))
1052 if (!((GET_CODE (from) == MEM
1053 && ! MEM_VOLATILE_P (from)
1054 && direct_load[(int) to_mode]
1055 && ! mode_dependent_address_p (XEXP (from, 0)))
1056 || GET_CODE (from) == REG
1057 || GET_CODE (from) == SUBREG))
1058 from = force_reg (from_mode, from);
1059 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1060 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1061 from = copy_to_reg (from);
1062 emit_move_insn (to, gen_lowpart (to_mode, from));
1066 /* Handle extension. */
1067 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1069 /* Convert directly if that works. */
1070 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1071 != CODE_FOR_nothing)
1073 emit_unop_insn (code, to, from, equiv_code);
1078 enum machine_mode intermediate;
1082 /* Search for a mode to convert via. */
1083 for (intermediate = from_mode; intermediate != VOIDmode;
1084 intermediate = GET_MODE_WIDER_MODE (intermediate))
1085 if (((can_extend_p (to_mode, intermediate, unsignedp)
1086 != CODE_FOR_nothing)
1087 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1088 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1089 GET_MODE_BITSIZE (intermediate))))
1090 && (can_extend_p (intermediate, from_mode, unsignedp)
1091 != CODE_FOR_nothing))
1093 convert_move (to, convert_to_mode (intermediate, from,
1094 unsignedp), unsignedp);
1098 /* No suitable intermediate mode.
1099 Generate what we need with shifts. */
1100 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1101 - GET_MODE_BITSIZE (from_mode), 0);
1102 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1103 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1105 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1108 emit_move_insn (to, tmp);
1113 /* Support special truncate insns for certain modes. */
1115 if (from_mode == DImode && to_mode == SImode)
1117 #ifdef HAVE_truncdisi2
1118 if (HAVE_truncdisi2)
1120 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 if (from_mode == DImode && to_mode == HImode)
1130 #ifdef HAVE_truncdihi2
1131 if (HAVE_truncdihi2)
1133 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 if (from_mode == DImode && to_mode == QImode)
1143 #ifdef HAVE_truncdiqi2
1144 if (HAVE_truncdiqi2)
1146 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 if (from_mode == SImode && to_mode == HImode)
1156 #ifdef HAVE_truncsihi2
1157 if (HAVE_truncsihi2)
1159 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 if (from_mode == SImode && to_mode == QImode)
1169 #ifdef HAVE_truncsiqi2
1170 if (HAVE_truncsiqi2)
1172 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 if (from_mode == HImode && to_mode == QImode)
1182 #ifdef HAVE_trunchiqi2
1183 if (HAVE_trunchiqi2)
1185 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 if (from_mode == TImode && to_mode == DImode)
1195 #ifdef HAVE_trunctidi2
1196 if (HAVE_trunctidi2)
1198 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 if (from_mode == TImode && to_mode == SImode)
1208 #ifdef HAVE_trunctisi2
1209 if (HAVE_trunctisi2)
1211 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1215 convert_move (to, force_reg (from_mode, from), unsignedp);
1219 if (from_mode == TImode && to_mode == HImode)
1221 #ifdef HAVE_trunctihi2
1222 if (HAVE_trunctihi2)
1224 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1228 convert_move (to, force_reg (from_mode, from), unsignedp);
1232 if (from_mode == TImode && to_mode == QImode)
1234 #ifdef HAVE_trunctiqi2
1235 if (HAVE_trunctiqi2)
1237 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1241 convert_move (to, force_reg (from_mode, from), unsignedp);
1245 /* Handle truncation of volatile memrefs, and so on;
1246 the things that couldn't be truncated directly,
1247 and for which there was no special instruction. */
1248 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1250 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1251 emit_move_insn (to, temp);
1255 /* Mode combination is not recognized. */
1259 /* Return an rtx for a value that would result
1260 from converting X to mode MODE.
1261 Both X and MODE may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263 This can be done by referring to a part of X in place
1264 or by copying to a new temporary with conversion.
1266 This function *must not* call protect_from_queue
1267 except when putting X into an insn (in which case convert_move does it). */
1270 convert_to_mode (mode, x, unsignedp)
1271 enum machine_mode mode;
1275 return convert_modes (mode, VOIDmode, x, unsignedp);
1278 /* Return an rtx for a value that would result
1279 from converting X from mode OLDMODE to mode MODE.
1280 Both modes may be floating, or both integer.
1281 UNSIGNEDP is nonzero if X is an unsigned value.
1283 This can be done by referring to a part of X in place
1284 or by copying to a new temporary with conversion.
1286 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1288 This function *must not* call protect_from_queue
1289 except when putting X into an insn (in which case convert_move does it). */
1292 convert_modes (mode, oldmode, x, unsignedp)
1293 enum machine_mode mode, oldmode;
1299 /* If FROM is a SUBREG that indicates that we have already done at least
1300 the required extension, strip it. */
1302 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1303 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1304 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1305 x = gen_lowpart (mode, x);
1307 if (GET_MODE (x) != VOIDmode)
1308 oldmode = GET_MODE (x);
1310 if (mode == oldmode)
1313 /* There is one case that we must handle specially: If we are converting
1314 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1315 we are to interpret the constant as unsigned, gen_lowpart will do
1316 the wrong if the constant appears negative. What we want to do is
1317 make the high-order word of the constant zero, not all ones. */
1319 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1320 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1321 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1323 HOST_WIDE_INT val = INTVAL (x);
1325 if (oldmode != VOIDmode
1326 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1328 int width = GET_MODE_BITSIZE (oldmode);
1330 /* We need to zero extend VAL. */
1331 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1334 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1337 /* We can do this with a gen_lowpart if both desired and current modes
1338 are integer, and this is either a constant integer, a register, or a
1339 non-volatile MEM. Except for the constant case where MODE is no
1340 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1342 if ((GET_CODE (x) == CONST_INT
1343 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1344 || (GET_MODE_CLASS (mode) == MODE_INT
1345 && GET_MODE_CLASS (oldmode) == MODE_INT
1346 && (GET_CODE (x) == CONST_DOUBLE
1347 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1348 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1349 && direct_load[(int) mode])
1350 || (GET_CODE (x) == REG
1351 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1352 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1354 /* ?? If we don't know OLDMODE, we have to assume here that
1355 X does not need sign- or zero-extension. This may not be
1356 the case, but it's the best we can do. */
1357 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1358 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1360 HOST_WIDE_INT val = INTVAL (x);
1361 int width = GET_MODE_BITSIZE (oldmode);
1363 /* We must sign or zero-extend in this case. Start by
1364 zero-extending, then sign extend if we need to. */
1365 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1367 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1368 val |= (HOST_WIDE_INT) (-1) << width;
1370 return GEN_INT (trunc_int_for_mode (val, mode));
1373 return gen_lowpart (mode, x);
1376 temp = gen_reg_rtx (mode);
1377 convert_move (temp, x, unsignedp);
1381 /* This macro is used to determine what the largest unit size that
1382 move_by_pieces can use is. */
1384 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1385 move efficiently, as opposed to MOVE_MAX which is the maximum
1386 number of bytes we can move with a single instruction. */
1388 #ifndef MOVE_MAX_PIECES
1389 #define MOVE_MAX_PIECES MOVE_MAX
1392 /* Generate several move instructions to copy LEN bytes from block FROM to
1393 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1394 and TO through protect_from_queue before calling.
1396 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1397 used to push FROM to the stack.
1399 ALIGN is maximum alignment we can assume. */
1402 move_by_pieces (to, from, len, align)
1404 unsigned HOST_WIDE_INT len;
1407 struct move_by_pieces data;
1408 rtx to_addr, from_addr = XEXP (from, 0);
1409 unsigned int max_size = MOVE_MAX_PIECES + 1;
1410 enum machine_mode mode = VOIDmode, tmode;
1411 enum insn_code icode;
1414 data.from_addr = from_addr;
1417 to_addr = XEXP (to, 0);
1420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1423 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1430 #ifdef STACK_GROWS_DOWNWARD
1436 data.to_addr = to_addr;
1439 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1440 || GET_CODE (from_addr) == POST_INC
1441 || GET_CODE (from_addr) == POST_DEC);
1443 data.explicit_inc_from = 0;
1444 data.explicit_inc_to = 0;
1445 if (data.reverse) data.offset = len;
1448 /* If copying requires more than two move insns,
1449 copy addresses to registers (to make displacements shorter)
1450 and use post-increment if available. */
1451 if (!(data.autinc_from && data.autinc_to)
1452 && move_by_pieces_ninsns (len, align) > 2)
1454 /* Find the mode of the largest move... */
1455 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1456 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1457 if (GET_MODE_SIZE (tmode) < max_size)
1460 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1462 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1463 data.autinc_from = 1;
1464 data.explicit_inc_from = -1;
1466 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1468 data.from_addr = copy_addr_to_reg (from_addr);
1469 data.autinc_from = 1;
1470 data.explicit_inc_from = 1;
1472 if (!data.autinc_from && CONSTANT_P (from_addr))
1473 data.from_addr = copy_addr_to_reg (from_addr);
1474 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1476 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1478 data.explicit_inc_to = -1;
1480 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1482 data.to_addr = copy_addr_to_reg (to_addr);
1484 data.explicit_inc_to = 1;
1486 if (!data.autinc_to && CONSTANT_P (to_addr))
1487 data.to_addr = copy_addr_to_reg (to_addr);
1490 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1491 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1492 align = MOVE_MAX * BITS_PER_UNIT;
1494 /* First move what we can in the largest integer mode, then go to
1495 successively smaller modes. */
1497 while (max_size > 1)
1499 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1500 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1501 if (GET_MODE_SIZE (tmode) < max_size)
1504 if (mode == VOIDmode)
1507 icode = mov_optab->handlers[(int) mode].insn_code;
1508 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1509 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1511 max_size = GET_MODE_SIZE (mode);
1514 /* The code above should have handled everything. */
1519 /* Return number of insns required to move L bytes by pieces.
1520 ALIGN (in bits) is maximum alignment we can assume. */
1522 static unsigned HOST_WIDE_INT
1523 move_by_pieces_ninsns (l, align)
1524 unsigned HOST_WIDE_INT l;
1527 unsigned HOST_WIDE_INT n_insns = 0;
1528 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1530 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1531 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1532 align = MOVE_MAX * BITS_PER_UNIT;
1534 while (max_size > 1)
1536 enum machine_mode mode = VOIDmode, tmode;
1537 enum insn_code icode;
1539 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1540 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1541 if (GET_MODE_SIZE (tmode) < max_size)
1544 if (mode == VOIDmode)
1547 icode = mov_optab->handlers[(int) mode].insn_code;
1548 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1549 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1551 max_size = GET_MODE_SIZE (mode);
1559 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1560 with move instructions for mode MODE. GENFUN is the gen_... function
1561 to make a move insn for that mode. DATA has all the other info. */
1564 move_by_pieces_1 (genfun, mode, data)
1565 rtx (*genfun) PARAMS ((rtx, ...));
1566 enum machine_mode mode;
1567 struct move_by_pieces *data;
1569 unsigned int size = GET_MODE_SIZE (mode);
1570 rtx to1 = NULL_RTX, from1;
1572 while (data->len >= size)
1575 data->offset -= size;
1579 if (data->autinc_to)
1581 to1 = replace_equiv_address (data->to, data->to_addr);
1582 to1 = adjust_address (to1, mode, 0);
1585 to1 = adjust_address (data->to, mode, data->offset);
1588 if (data->autinc_from)
1590 from1 = replace_equiv_address (data->from, data->from_addr);
1591 from1 = adjust_address (from1, mode, 0);
1594 from1 = adjust_address (data->from, mode, data->offset);
1596 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1597 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
1598 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1599 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
1602 emit_insn ((*genfun) (to1, from1));
1605 #ifdef PUSH_ROUNDING
1606 emit_single_push_insn (mode, from1, NULL);
1612 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1613 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1614 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1615 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1617 if (! data->reverse)
1618 data->offset += size;
1624 /* Emit code to move a block Y to a block X.
1625 This may be done with string-move instructions,
1626 with multiple scalar move instructions, or with a library call.
1628 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1630 SIZE is an rtx that says how long they are.
1631 ALIGN is the maximum alignment we can assume they have.
1633 Return the address of the new block, if memcpy is called and returns it,
1637 emit_block_move (x, y, size, align)
1643 #ifdef TARGET_MEM_FUNCTIONS
1645 tree call_expr, arg_list;
1648 if (GET_MODE (x) != BLKmode)
1651 if (GET_MODE (y) != BLKmode)
1654 x = protect_from_queue (x, 1);
1655 y = protect_from_queue (y, 0);
1656 size = protect_from_queue (size, 0);
1658 if (GET_CODE (x) != MEM)
1660 if (GET_CODE (y) != MEM)
1665 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1666 move_by_pieces (x, y, INTVAL (size), align);
1669 /* Try the most limited insn first, because there's no point
1670 including more than one in the machine description unless
1671 the more limited one has some advantage. */
1673 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1674 enum machine_mode mode;
1676 /* Since this is a move insn, we don't care about volatility. */
1679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1680 mode = GET_MODE_WIDER_MODE (mode))
1682 enum insn_code code = movstr_optab[(int) mode];
1683 insn_operand_predicate_fn pred;
1685 if (code != CODE_FOR_nothing
1686 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1687 here because if SIZE is less than the mode mask, as it is
1688 returned by the macro, it will definitely be less than the
1689 actual mode mask. */
1690 && ((GET_CODE (size) == CONST_INT
1691 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1692 <= (GET_MODE_MASK (mode) >> 1)))
1693 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1694 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1695 || (*pred) (x, BLKmode))
1696 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1697 || (*pred) (y, BLKmode))
1698 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1699 || (*pred) (opalign, VOIDmode)))
1702 rtx last = get_last_insn ();
1705 op2 = convert_to_mode (mode, size, 1);
1706 pred = insn_data[(int) code].operand[2].predicate;
1707 if (pred != 0 && ! (*pred) (op2, mode))
1708 op2 = copy_to_mode_reg (mode, op2);
1710 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1718 delete_insns_since (last);
1724 /* X, Y, or SIZE may have been passed through protect_from_queue.
1726 It is unsafe to save the value generated by protect_from_queue
1727 and reuse it later. Consider what happens if emit_queue is
1728 called before the return value from protect_from_queue is used.
1730 Expansion of the CALL_EXPR below will call emit_queue before
1731 we are finished emitting RTL for argument setup. So if we are
1732 not careful we could get the wrong value for an argument.
1734 To avoid this problem we go ahead and emit code to copy X, Y &
1735 SIZE into new pseudos. We can then place those new pseudos
1736 into an RTL_EXPR and use them later, even after a call to
1739 Note this is not strictly needed for library calls since they
1740 do not call emit_queue before loading their arguments. However,
1741 we may need to have library calls call emit_queue in the future
1742 since failing to do so could cause problems for targets which
1743 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1744 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1745 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1747 #ifdef TARGET_MEM_FUNCTIONS
1748 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1750 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1751 TREE_UNSIGNED (integer_type_node));
1752 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
1755 #ifdef TARGET_MEM_FUNCTIONS
1756 /* It is incorrect to use the libcall calling conventions to call
1757 memcpy in this context.
1759 This could be a user call to memcpy and the user may wish to
1760 examine the return value from memcpy.
1762 For targets where libcalls and normal calls have different conventions
1763 for returning pointers, we could end up generating incorrect code.
1765 So instead of using a libcall sequence we build up a suitable
1766 CALL_EXPR and expand the call in the normal fashion. */
1767 if (fn == NULL_TREE)
1771 /* This was copied from except.c, I don't know if all this is
1772 necessary in this context or not. */
1773 fn = get_identifier ("memcpy");
1774 fntype = build_pointer_type (void_type_node);
1775 fntype = build_function_type (fntype, NULL_TREE);
1776 fn = build_decl (FUNCTION_DECL, fn, fntype);
1777 ggc_add_tree_root (&fn, 1);
1778 DECL_EXTERNAL (fn) = 1;
1779 TREE_PUBLIC (fn) = 1;
1780 DECL_ARTIFICIAL (fn) = 1;
1781 TREE_NOTHROW (fn) = 1;
1782 make_decl_rtl (fn, NULL);
1783 assemble_external (fn);
1786 /* We need to make an argument list for the function call.
1788 memcpy has three arguments, the first two are void * addresses and
1789 the last is a size_t byte count for the copy. */
1791 = build_tree_list (NULL_TREE,
1792 make_tree (build_pointer_type (void_type_node), x));
1793 TREE_CHAIN (arg_list)
1794 = build_tree_list (NULL_TREE,
1795 make_tree (build_pointer_type (void_type_node), y));
1796 TREE_CHAIN (TREE_CHAIN (arg_list))
1797 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1798 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1800 /* Now we have to build up the CALL_EXPR itself. */
1801 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1802 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1803 call_expr, arg_list, NULL_TREE);
1804 TREE_SIDE_EFFECTS (call_expr) = 1;
1806 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1808 emit_library_call (bcopy_libfunc, LCT_NORMAL,
1809 VOIDmode, 3, y, Pmode, x, Pmode,
1810 convert_to_mode (TYPE_MODE (integer_type_node), size,
1811 TREE_UNSIGNED (integer_type_node)),
1812 TYPE_MODE (integer_type_node));
1819 /* Copy all or part of a value X into registers starting at REGNO.
1820 The number of registers to be filled is NREGS. */
1823 move_block_to_reg (regno, x, nregs, mode)
1827 enum machine_mode mode;
1830 #ifdef HAVE_load_multiple
1838 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1839 x = validize_mem (force_const_mem (mode, x));
1841 /* See if the machine can do this with a load multiple insn. */
1842 #ifdef HAVE_load_multiple
1843 if (HAVE_load_multiple)
1845 last = get_last_insn ();
1846 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1854 delete_insns_since (last);
1858 for (i = 0; i < nregs; i++)
1859 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1860 operand_subword_force (x, i, mode));
1863 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1864 The number of registers to be filled is NREGS. SIZE indicates the number
1865 of bytes in the object X. */
1868 move_block_from_reg (regno, x, nregs, size)
1875 #ifdef HAVE_store_multiple
1879 enum machine_mode mode;
1884 /* If SIZE is that of a mode no bigger than a word, just use that
1885 mode's store operation. */
1886 if (size <= UNITS_PER_WORD
1887 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1889 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
1893 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1894 to the left before storing to memory. Note that the previous test
1895 doesn't handle all cases (e.g. SIZE == 3). */
1896 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1898 rtx tem = operand_subword (x, 0, 1, BLKmode);
1904 shift = expand_shift (LSHIFT_EXPR, word_mode,
1905 gen_rtx_REG (word_mode, regno),
1906 build_int_2 ((UNITS_PER_WORD - size)
1907 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1908 emit_move_insn (tem, shift);
1912 /* See if the machine can do this with a store multiple insn. */
1913 #ifdef HAVE_store_multiple
1914 if (HAVE_store_multiple)
1916 last = get_last_insn ();
1917 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1925 delete_insns_since (last);
1929 for (i = 0; i < nregs; i++)
1931 rtx tem = operand_subword (x, i, 1, BLKmode);
1936 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1940 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1941 registers represented by a PARALLEL. SSIZE represents the total size of
1942 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1944 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1945 the balance will be in what would be the low-order memory addresses, i.e.
1946 left justified for big endian, right justified for little endian. This
1947 happens to be true for the targets currently using this support. If this
1948 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1952 emit_group_load (dst, orig_src, ssize, align)
1960 if (GET_CODE (dst) != PARALLEL)
1963 /* Check for a NULL entry, used to indicate that the parameter goes
1964 both on the stack and in registers. */
1965 if (XEXP (XVECEXP (dst, 0, 0), 0))
1970 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
1972 /* Process the pieces. */
1973 for (i = start; i < XVECLEN (dst, 0); i++)
1975 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1976 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1977 unsigned int bytelen = GET_MODE_SIZE (mode);
1980 /* Handle trailing fragments that run over the size of the struct. */
1981 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1983 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1984 bytelen = ssize - bytepos;
1989 /* If we won't be loading directly from memory, protect the real source
1990 from strange tricks we might play; but make sure that the source can
1991 be loaded directly into the destination. */
1993 if (GET_CODE (orig_src) != MEM
1994 && (!CONSTANT_P (orig_src)
1995 || (GET_MODE (orig_src) != mode
1996 && GET_MODE (orig_src) != VOIDmode)))
1998 if (GET_MODE (orig_src) == VOIDmode)
1999 src = gen_reg_rtx (mode);
2001 src = gen_reg_rtx (GET_MODE (orig_src));
2002 emit_move_insn (src, orig_src);
2005 /* Optimize the access just a bit. */
2006 if (GET_CODE (src) == MEM
2007 && align >= GET_MODE_ALIGNMENT (mode)
2008 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2009 && bytelen == GET_MODE_SIZE (mode))
2011 tmps[i] = gen_reg_rtx (mode);
2012 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2014 else if (GET_CODE (src) == CONCAT)
2017 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
2018 tmps[i] = XEXP (src, 0);
2019 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
2020 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
2021 tmps[i] = XEXP (src, 1);
2025 else if (CONSTANT_P (src)
2026 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2029 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2030 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2031 mode, mode, align, ssize);
2033 if (BYTES_BIG_ENDIAN && shift)
2034 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2035 tmps[i], 0, OPTAB_WIDEN);
2040 /* Copy the extracted pieces into the proper (probable) hard regs. */
2041 for (i = start; i < XVECLEN (dst, 0); i++)
2042 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2045 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2046 registers represented by a PARALLEL. SSIZE represents the total size of
2047 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
2050 emit_group_store (orig_dst, src, ssize, align)
2058 if (GET_CODE (src) != PARALLEL)
2061 /* Check for a NULL entry, used to indicate that the parameter goes
2062 both on the stack and in registers. */
2063 if (XEXP (XVECEXP (src, 0, 0), 0))
2068 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2070 /* Copy the (probable) hard regs into pseudos. */
2071 for (i = start; i < XVECLEN (src, 0); i++)
2073 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2074 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2075 emit_move_insn (tmps[i], reg);
2079 /* If we won't be storing directly into memory, protect the real destination
2080 from strange tricks we might play. */
2082 if (GET_CODE (dst) == PARALLEL)
2086 /* We can get a PARALLEL dst if there is a conditional expression in
2087 a return statement. In that case, the dst and src are the same,
2088 so no action is necessary. */
2089 if (rtx_equal_p (dst, src))
2092 /* It is unclear if we can ever reach here, but we may as well handle
2093 it. Allocate a temporary, and split this into a store/load to/from
2096 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2097 emit_group_store (temp, src, ssize, align);
2098 emit_group_load (dst, temp, ssize, align);
2101 else if (GET_CODE (dst) != MEM)
2103 dst = gen_reg_rtx (GET_MODE (orig_dst));
2104 /* Make life a bit easier for combine. */
2105 emit_move_insn (dst, const0_rtx);
2108 /* Process the pieces. */
2109 for (i = start; i < XVECLEN (src, 0); i++)
2111 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2112 enum machine_mode mode = GET_MODE (tmps[i]);
2113 unsigned int bytelen = GET_MODE_SIZE (mode);
2115 /* Handle trailing fragments that run over the size of the struct. */
2116 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2118 if (BYTES_BIG_ENDIAN)
2120 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2121 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2122 tmps[i], 0, OPTAB_WIDEN);
2124 bytelen = ssize - bytepos;
2127 /* Optimize the access just a bit. */
2128 if (GET_CODE (dst) == MEM
2129 && align >= GET_MODE_ALIGNMENT (mode)
2130 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2131 && bytelen == GET_MODE_SIZE (mode))
2132 emit_move_insn (adjust_address (dst, mode, bytepos), tmps[i]);
2134 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2135 mode, tmps[i], align, ssize);
2140 /* Copy from the pseudo into the (probable) hard reg. */
2141 if (GET_CODE (dst) == REG)
2142 emit_move_insn (orig_dst, dst);
2145 /* Generate code to copy a BLKmode object of TYPE out of a
2146 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2147 is null, a stack temporary is created. TGTBLK is returned.
2149 The primary purpose of this routine is to handle functions
2150 that return BLKmode structures in registers. Some machines
2151 (the PA for example) want to return all small structures
2152 in registers regardless of the structure's alignment. */
2155 copy_blkmode_from_reg (tgtblk, srcreg, type)
2160 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2161 rtx src = NULL, dst = NULL;
2162 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2163 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2167 tgtblk = assign_temp (build_qualified_type (type,
2169 | TYPE_QUAL_CONST)),
2171 preserve_temp_slots (tgtblk);
2174 /* This code assumes srcreg is at least a full word. If it isn't,
2175 copy it into a new pseudo which is a full word. */
2176 if (GET_MODE (srcreg) != BLKmode
2177 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2178 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2180 /* Structures whose size is not a multiple of a word are aligned
2181 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2182 machine, this means we must skip the empty high order bytes when
2183 calculating the bit offset. */
2184 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2185 big_endian_correction
2186 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2188 /* Copy the structure BITSIZE bites at a time.
2190 We could probably emit more efficient code for machines which do not use
2191 strict alignment, but it doesn't seem worth the effort at the current
2193 for (bitpos = 0, xbitpos = big_endian_correction;
2194 bitpos < bytes * BITS_PER_UNIT;
2195 bitpos += bitsize, xbitpos += bitsize)
2197 /* We need a new source operand each time xbitpos is on a
2198 word boundary and when xbitpos == big_endian_correction
2199 (the first time through). */
2200 if (xbitpos % BITS_PER_WORD == 0
2201 || xbitpos == big_endian_correction)
2202 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 /* We need a new destination operand each time bitpos is on
2207 if (bitpos % BITS_PER_WORD == 0)
2208 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2210 /* Use xbitpos for the source extraction (right justified) and
2211 xbitpos for the destination store (left justified). */
2212 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2213 extract_bit_field (src, bitsize,
2214 xbitpos % BITS_PER_WORD, 1,
2215 NULL_RTX, word_mode, word_mode,
2216 bitsize, BITS_PER_WORD),
2217 bitsize, BITS_PER_WORD);
2223 /* Add a USE expression for REG to the (possibly empty) list pointed
2224 to by CALL_FUSAGE. REG must denote a hard register. */
2227 use_reg (call_fusage, reg)
2228 rtx *call_fusage, reg;
2230 if (GET_CODE (reg) != REG
2231 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2235 = gen_rtx_EXPR_LIST (VOIDmode,
2236 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2239 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2240 starting at REGNO. All of these registers must be hard registers. */
2243 use_regs (call_fusage, regno, nregs)
2250 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2253 for (i = 0; i < nregs; i++)
2254 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
2257 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2258 PARALLEL REGS. This is for calls that pass values in multiple
2259 non-contiguous locations. The Irix 6 ABI has examples of this. */
2262 use_group_regs (call_fusage, regs)
2268 for (i = 0; i < XVECLEN (regs, 0); i++)
2270 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2272 /* A NULL entry means the parameter goes both on the stack and in
2273 registers. This can also be a MEM for targets that pass values
2274 partially on the stack and partially in registers. */
2275 if (reg != 0 && GET_CODE (reg) == REG)
2276 use_reg (call_fusage, reg);
2282 can_store_by_pieces (len, constfun, constfundata, align)
2283 unsigned HOST_WIDE_INT len;
2284 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2288 unsigned HOST_WIDE_INT max_size, l;
2289 HOST_WIDE_INT offset = 0;
2290 enum machine_mode mode, tmode;
2291 enum insn_code icode;
2295 if (! MOVE_BY_PIECES_P (len, align))
2298 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2299 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2300 align = MOVE_MAX * BITS_PER_UNIT;
2302 /* We would first store what we can in the largest integer mode, then go to
2303 successively smaller modes. */
2306 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2311 max_size = MOVE_MAX_PIECES + 1;
2312 while (max_size > 1)
2314 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2315 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2316 if (GET_MODE_SIZE (tmode) < max_size)
2319 if (mode == VOIDmode)
2322 icode = mov_optab->handlers[(int) mode].insn_code;
2323 if (icode != CODE_FOR_nothing
2324 && align >= GET_MODE_ALIGNMENT (mode))
2326 unsigned int size = GET_MODE_SIZE (mode);
2333 cst = (*constfun) (constfundata, offset, mode);
2334 if (!LEGITIMATE_CONSTANT_P (cst))
2344 max_size = GET_MODE_SIZE (mode);
2347 /* The code above should have handled everything. */
2355 /* Generate several move instructions to store LEN bytes generated by
2356 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2357 pointer which will be passed as argument in every CONSTFUN call.
2358 ALIGN is maximum alignment we can assume. */
2361 store_by_pieces (to, len, constfun, constfundata, align)
2363 unsigned HOST_WIDE_INT len;
2364 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2368 struct store_by_pieces data;
2370 if (! MOVE_BY_PIECES_P (len, align))
2372 to = protect_from_queue (to, 1);
2373 data.constfun = constfun;
2374 data.constfundata = constfundata;
2377 store_by_pieces_1 (&data, align);
2380 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2381 rtx with BLKmode). The caller must pass TO through protect_from_queue
2382 before calling. ALIGN is maximum alignment we can assume. */
2385 clear_by_pieces (to, len, align)
2387 unsigned HOST_WIDE_INT len;
2390 struct store_by_pieces data;
2392 data.constfun = clear_by_pieces_1;
2393 data.constfundata = NULL;
2396 store_by_pieces_1 (&data, align);
2399 /* Callback routine for clear_by_pieces.
2400 Return const0_rtx unconditionally. */
2403 clear_by_pieces_1 (data, offset, mode)
2404 PTR data ATTRIBUTE_UNUSED;
2405 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2406 enum machine_mode mode ATTRIBUTE_UNUSED;
2411 /* Subroutine of clear_by_pieces and store_by_pieces.
2412 Generate several move instructions to store LEN bytes of block TO. (A MEM
2413 rtx with BLKmode). The caller must pass TO through protect_from_queue
2414 before calling. ALIGN is maximum alignment we can assume. */
2417 store_by_pieces_1 (data, align)
2418 struct store_by_pieces *data;
2421 rtx to_addr = XEXP (data->to, 0);
2422 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
2423 enum machine_mode mode = VOIDmode, tmode;
2424 enum insn_code icode;
2427 data->to_addr = to_addr;
2429 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2430 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2432 data->explicit_inc_to = 0;
2434 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2436 data->offset = data->len;
2438 /* If storing requires more than two move insns,
2439 copy addresses to registers (to make displacements shorter)
2440 and use post-increment if available. */
2441 if (!data->autinc_to
2442 && move_by_pieces_ninsns (data->len, align) > 2)
2444 /* Determine the main mode we'll be using. */
2445 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2446 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2447 if (GET_MODE_SIZE (tmode) < max_size)
2450 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2452 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2453 data->autinc_to = 1;
2454 data->explicit_inc_to = -1;
2457 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2458 && ! data->autinc_to)
2460 data->to_addr = copy_addr_to_reg (to_addr);
2461 data->autinc_to = 1;
2462 data->explicit_inc_to = 1;
2465 if ( !data->autinc_to && CONSTANT_P (to_addr))
2466 data->to_addr = copy_addr_to_reg (to_addr);
2469 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2470 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2471 align = MOVE_MAX * BITS_PER_UNIT;
2473 /* First store what we can in the largest integer mode, then go to
2474 successively smaller modes. */
2476 while (max_size > 1)
2478 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2479 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2480 if (GET_MODE_SIZE (tmode) < max_size)
2483 if (mode == VOIDmode)
2486 icode = mov_optab->handlers[(int) mode].insn_code;
2487 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2488 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2490 max_size = GET_MODE_SIZE (mode);
2493 /* The code above should have handled everything. */
2498 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2499 with move instructions for mode MODE. GENFUN is the gen_... function
2500 to make a move insn for that mode. DATA has all the other info. */
2503 store_by_pieces_2 (genfun, mode, data)
2504 rtx (*genfun) PARAMS ((rtx, ...));
2505 enum machine_mode mode;
2506 struct store_by_pieces *data;
2508 unsigned int size = GET_MODE_SIZE (mode);
2511 while (data->len >= size)
2514 data->offset -= size;
2516 if (data->autinc_to)
2518 to1 = replace_equiv_address (data->to, data->to_addr);
2519 to1 = adjust_address (to1, mode, 0);
2522 to1 = adjust_address (data->to, mode, data->offset);
2524 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2525 emit_insn (gen_add2_insn (data->to_addr,
2526 GEN_INT (-(HOST_WIDE_INT) size)));
2528 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2529 emit_insn ((*genfun) (to1, cst));
2531 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2532 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2534 if (! data->reverse)
2535 data->offset += size;
2541 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2542 its length in bytes and ALIGN is the maximum alignment we can is has.
2544 If we call a function that returns the length of the block, return it. */
2547 clear_storage (object, size, align)
2552 #ifdef TARGET_MEM_FUNCTIONS
2554 tree call_expr, arg_list;
2558 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2559 just move a zero. Otherwise, do this a piece at a time. */
2560 if (GET_MODE (object) != BLKmode
2561 && GET_CODE (size) == CONST_INT
2562 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
2563 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2566 object = protect_from_queue (object, 1);
2567 size = protect_from_queue (size, 0);
2569 if (GET_CODE (size) == CONST_INT
2570 && MOVE_BY_PIECES_P (INTVAL (size), align))
2571 clear_by_pieces (object, INTVAL (size), align);
2574 /* Try the most limited insn first, because there's no point
2575 including more than one in the machine description unless
2576 the more limited one has some advantage. */
2578 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2579 enum machine_mode mode;
2581 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2582 mode = GET_MODE_WIDER_MODE (mode))
2584 enum insn_code code = clrstr_optab[(int) mode];
2585 insn_operand_predicate_fn pred;
2587 if (code != CODE_FOR_nothing
2588 /* We don't need MODE to be narrower than
2589 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2590 the mode mask, as it is returned by the macro, it will
2591 definitely be less than the actual mode mask. */
2592 && ((GET_CODE (size) == CONST_INT
2593 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2594 <= (GET_MODE_MASK (mode) >> 1)))
2595 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2596 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2597 || (*pred) (object, BLKmode))
2598 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2599 || (*pred) (opalign, VOIDmode)))
2602 rtx last = get_last_insn ();
2605 op1 = convert_to_mode (mode, size, 1);
2606 pred = insn_data[(int) code].operand[1].predicate;
2607 if (pred != 0 && ! (*pred) (op1, mode))
2608 op1 = copy_to_mode_reg (mode, op1);
2610 pat = GEN_FCN ((int) code) (object, op1, opalign);
2617 delete_insns_since (last);
2621 /* OBJECT or SIZE may have been passed through protect_from_queue.
2623 It is unsafe to save the value generated by protect_from_queue
2624 and reuse it later. Consider what happens if emit_queue is
2625 called before the return value from protect_from_queue is used.
2627 Expansion of the CALL_EXPR below will call emit_queue before
2628 we are finished emitting RTL for argument setup. So if we are
2629 not careful we could get the wrong value for an argument.
2631 To avoid this problem we go ahead and emit code to copy OBJECT
2632 and SIZE into new pseudos. We can then place those new pseudos
2633 into an RTL_EXPR and use them later, even after a call to
2636 Note this is not strictly needed for library calls since they
2637 do not call emit_queue before loading their arguments. However,
2638 we may need to have library calls call emit_queue in the future
2639 since failing to do so could cause problems for targets which
2640 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2641 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2643 #ifdef TARGET_MEM_FUNCTIONS
2644 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2646 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2647 TREE_UNSIGNED (integer_type_node));
2648 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
2651 #ifdef TARGET_MEM_FUNCTIONS
2652 /* It is incorrect to use the libcall calling conventions to call
2653 memset in this context.
2655 This could be a user call to memset and the user may wish to
2656 examine the return value from memset.
2658 For targets where libcalls and normal calls have different
2659 conventions for returning pointers, we could end up generating
2662 So instead of using a libcall sequence we build up a suitable
2663 CALL_EXPR and expand the call in the normal fashion. */
2664 if (fn == NULL_TREE)
2668 /* This was copied from except.c, I don't know if all this is
2669 necessary in this context or not. */
2670 fn = get_identifier ("memset");
2671 fntype = build_pointer_type (void_type_node);
2672 fntype = build_function_type (fntype, NULL_TREE);
2673 fn = build_decl (FUNCTION_DECL, fn, fntype);
2674 ggc_add_tree_root (&fn, 1);
2675 DECL_EXTERNAL (fn) = 1;
2676 TREE_PUBLIC (fn) = 1;
2677 DECL_ARTIFICIAL (fn) = 1;
2678 TREE_NOTHROW (fn) = 1;
2679 make_decl_rtl (fn, NULL);
2680 assemble_external (fn);
2683 /* We need to make an argument list for the function call.
2685 memset has three arguments, the first is a void * addresses, the
2686 second an integer with the initialization value, the last is a
2687 size_t byte count for the copy. */
2689 = build_tree_list (NULL_TREE,
2690 make_tree (build_pointer_type (void_type_node),
2692 TREE_CHAIN (arg_list)
2693 = build_tree_list (NULL_TREE,
2694 make_tree (integer_type_node, const0_rtx));
2695 TREE_CHAIN (TREE_CHAIN (arg_list))
2696 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2697 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2699 /* Now we have to build up the CALL_EXPR itself. */
2700 call_expr = build1 (ADDR_EXPR,
2701 build_pointer_type (TREE_TYPE (fn)), fn);
2702 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2703 call_expr, arg_list, NULL_TREE);
2704 TREE_SIDE_EFFECTS (call_expr) = 1;
2706 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2708 emit_library_call (bzero_libfunc, LCT_NORMAL,
2709 VOIDmode, 2, object, Pmode, size,
2710 TYPE_MODE (integer_type_node));
2718 /* Generate code to copy Y into X.
2719 Both Y and X must have the same mode, except that
2720 Y can be a constant with VOIDmode.
2721 This mode cannot be BLKmode; use emit_block_move for that.
2723 Return the last instruction emitted. */
2726 emit_move_insn (x, y)
2729 enum machine_mode mode = GET_MODE (x);
2730 rtx y_cst = NULL_RTX;
2733 x = protect_from_queue (x, 1);
2734 y = protect_from_queue (y, 0);
2736 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2739 /* Never force constant_p_rtx to memory. */
2740 if (GET_CODE (y) == CONSTANT_P_RTX)
2742 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2745 y = force_const_mem (mode, y);
2748 /* If X or Y are memory references, verify that their addresses are valid
2750 if (GET_CODE (x) == MEM
2751 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2752 && ! push_operand (x, GET_MODE (x)))
2754 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2755 x = validize_mem (x);
2757 if (GET_CODE (y) == MEM
2758 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2760 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2761 y = validize_mem (y);
2763 if (mode == BLKmode)
2766 last_insn = emit_move_insn_1 (x, y);
2768 if (y_cst && GET_CODE (x) == REG)
2769 REG_NOTES (last_insn)
2770 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2775 /* Low level part of emit_move_insn.
2776 Called just like emit_move_insn, but assumes X and Y
2777 are basically valid. */
2780 emit_move_insn_1 (x, y)
2783 enum machine_mode mode = GET_MODE (x);
2784 enum machine_mode submode;
2785 enum mode_class class = GET_MODE_CLASS (mode);
2788 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2791 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2793 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2795 /* Expand complex moves by moving real part and imag part, if possible. */
2796 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2797 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2799 (class == MODE_COMPLEX_INT
2800 ? MODE_INT : MODE_FLOAT),
2802 && (mov_optab->handlers[(int) submode].insn_code
2803 != CODE_FOR_nothing))
2805 /* Don't split destination if it is a stack push. */
2806 int stack = push_operand (x, GET_MODE (x));
2808 #ifdef PUSH_ROUNDING
2809 /* In case we output to the stack, but the size is smaller machine can
2810 push exactly, we need to use move instructions. */
2812 && PUSH_ROUNDING (GET_MODE_SIZE (submode)) != GET_MODE_SIZE (submode))
2815 int offset1, offset2;
2817 /* Do not use anti_adjust_stack, since we don't want to update
2818 stack_pointer_delta. */
2819 temp = expand_binop (Pmode,
2820 #ifdef STACK_GROWS_DOWNWARD
2827 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2831 if (temp != stack_pointer_rtx)
2832 emit_move_insn (stack_pointer_rtx, temp);
2833 #ifdef STACK_GROWS_DOWNWARD
2835 offset2 = GET_MODE_SIZE (submode);
2837 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2838 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2839 + GET_MODE_SIZE (submode));
2841 emit_move_insn (change_address (x, submode,
2842 gen_rtx_PLUS (Pmode,
2844 GEN_INT (offset1))),
2845 gen_realpart (submode, y));
2846 emit_move_insn (change_address (x, submode,
2847 gen_rtx_PLUS (Pmode,
2849 GEN_INT (offset2))),
2850 gen_imagpart (submode, y));
2854 /* If this is a stack, push the highpart first, so it
2855 will be in the argument order.
2857 In that case, change_address is used only to convert
2858 the mode, not to change the address. */
2861 /* Note that the real part always precedes the imag part in memory
2862 regardless of machine's endianness. */
2863 #ifdef STACK_GROWS_DOWNWARD
2864 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2865 (gen_rtx_MEM (submode, XEXP (x, 0)),
2866 gen_imagpart (submode, y)));
2867 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2868 (gen_rtx_MEM (submode, XEXP (x, 0)),
2869 gen_realpart (submode, y)));
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2872 (gen_rtx_MEM (submode, XEXP (x, 0)),
2873 gen_realpart (submode, y)));
2874 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2875 (gen_rtx_MEM (submode, XEXP (x, 0)),
2876 gen_imagpart (submode, y)));
2881 rtx realpart_x, realpart_y;
2882 rtx imagpart_x, imagpart_y;
2884 /* If this is a complex value with each part being smaller than a
2885 word, the usual calling sequence will likely pack the pieces into
2886 a single register. Unfortunately, SUBREG of hard registers only
2887 deals in terms of words, so we have a problem converting input
2888 arguments to the CONCAT of two registers that is used elsewhere
2889 for complex values. If this is before reload, we can copy it into
2890 memory and reload. FIXME, we should see about using extract and
2891 insert on integer registers, but complex short and complex char
2892 variables should be rarely used. */
2893 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2894 && (reload_in_progress | reload_completed) == 0)
2896 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2897 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2899 if (packed_dest_p || packed_src_p)
2901 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2902 ? MODE_FLOAT : MODE_INT);
2904 enum machine_mode reg_mode
2905 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2907 if (reg_mode != BLKmode)
2909 rtx mem = assign_stack_temp (reg_mode,
2910 GET_MODE_SIZE (mode), 0);
2911 rtx cmem = adjust_address (mem, mode, 0);
2914 = N_("function using short complex types cannot be inline");
2918 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2919 emit_move_insn_1 (cmem, y);
2920 return emit_move_insn_1 (sreg, mem);
2924 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2925 emit_move_insn_1 (mem, sreg);
2926 return emit_move_insn_1 (x, cmem);
2932 realpart_x = gen_realpart (submode, x);
2933 realpart_y = gen_realpart (submode, y);
2934 imagpart_x = gen_imagpart (submode, x);
2935 imagpart_y = gen_imagpart (submode, y);
2937 /* Show the output dies here. This is necessary for SUBREGs
2938 of pseudos since we cannot track their lifetimes correctly;
2939 hard regs shouldn't appear here except as return values.
2940 We never want to emit such a clobber after reload. */
2942 && ! (reload_in_progress || reload_completed)
2943 && (GET_CODE (realpart_x) == SUBREG
2944 || GET_CODE (imagpart_x) == SUBREG))
2946 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2949 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2950 (realpart_x, realpart_y));
2951 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
2952 (imagpart_x, imagpart_y));
2955 return get_last_insn ();
2958 /* This will handle any multi-word mode that lacks a move_insn pattern.
2959 However, you will get better code if you define such patterns,
2960 even if they must turn into multiple assembler instructions. */
2961 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
2967 #ifdef PUSH_ROUNDING
2969 /* If X is a push on the stack, do the push now and replace
2970 X with a reference to the stack pointer. */
2971 if (push_operand (x, GET_MODE (x)))
2976 /* Do not use anti_adjust_stack, since we don't want to update
2977 stack_pointer_delta. */
2978 temp = expand_binop (Pmode,
2979 #ifdef STACK_GROWS_DOWNWARD
2986 (PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))),
2990 if (temp != stack_pointer_rtx)
2991 emit_move_insn (stack_pointer_rtx, temp);
2993 code = GET_CODE (XEXP (x, 0));
2994 /* Just hope that small offsets off SP are OK. */
2995 if (code == POST_INC)
2996 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
2997 GEN_INT (-(HOST_WIDE_INT)
2998 GET_MODE_SIZE (GET_MODE (x))));
2999 else if (code == POST_DEC)
3000 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3001 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3003 temp = stack_pointer_rtx;
3005 x = change_address (x, VOIDmode, temp);
3009 /* If we are in reload, see if either operand is a MEM whose address
3010 is scheduled for replacement. */
3011 if (reload_in_progress && GET_CODE (x) == MEM
3012 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3013 x = replace_equiv_address_nv (x, inner);
3014 if (reload_in_progress && GET_CODE (y) == MEM
3015 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3016 y = replace_equiv_address_nv (y, inner);
3022 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3025 rtx xpart = operand_subword (x, i, 1, mode);
3026 rtx ypart = operand_subword (y, i, 1, mode);
3028 /* If we can't get a part of Y, put Y into memory if it is a
3029 constant. Otherwise, force it into a register. If we still
3030 can't get a part of Y, abort. */
3031 if (ypart == 0 && CONSTANT_P (y))
3033 y = force_const_mem (mode, y);
3034 ypart = operand_subword (y, i, 1, mode);
3036 else if (ypart == 0)
3037 ypart = operand_subword_force (y, i, mode);
3039 if (xpart == 0 || ypart == 0)
3042 need_clobber |= (GET_CODE (xpart) == SUBREG);
3044 last_insn = emit_move_insn (xpart, ypart);
3047 seq = gen_sequence ();
3050 /* Show the output dies here. This is necessary for SUBREGs
3051 of pseudos since we cannot track their lifetimes correctly;
3052 hard regs shouldn't appear here except as return values.
3053 We never want to emit such a clobber after reload. */
3055 && ! (reload_in_progress || reload_completed)
3056 && need_clobber != 0)
3058 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3069 /* Pushing data onto the stack. */
3071 /* Push a block of length SIZE (perhaps variable)
3072 and return an rtx to address the beginning of the block.
3073 Note that it is not possible for the value returned to be a QUEUED.
3074 The value may be virtual_outgoing_args_rtx.
3076 EXTRA is the number of bytes of padding to push in addition to SIZE.
3077 BELOW nonzero means this padding comes at low addresses;
3078 otherwise, the padding comes at high addresses. */
3081 push_block (size, extra, below)
3087 size = convert_modes (Pmode, ptr_mode, size, 1);
3088 if (CONSTANT_P (size))
3089 anti_adjust_stack (plus_constant (size, extra));
3090 else if (GET_CODE (size) == REG && extra == 0)
3091 anti_adjust_stack (size);
3094 temp = copy_to_mode_reg (Pmode, size);
3096 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3097 temp, 0, OPTAB_LIB_WIDEN);
3098 anti_adjust_stack (temp);
3101 #ifndef STACK_GROWS_DOWNWARD
3107 temp = virtual_outgoing_args_rtx;
3108 if (extra != 0 && below)
3109 temp = plus_constant (temp, extra);
3113 if (GET_CODE (size) == CONST_INT)
3114 temp = plus_constant (virtual_outgoing_args_rtx,
3115 -INTVAL (size) - (below ? 0 : extra));
3116 else if (extra != 0 && !below)
3117 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3118 negate_rtx (Pmode, plus_constant (size, extra)));
3120 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3121 negate_rtx (Pmode, size));
3124 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3128 /* Return an rtx for the address of the beginning of an as-if-it-was-pushed
3129 block of SIZE bytes. */
3132 get_push_address (size)
3137 if (STACK_PUSH_CODE == POST_DEC)
3138 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3139 else if (STACK_PUSH_CODE == POST_INC)
3140 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
3142 temp = stack_pointer_rtx;
3144 return copy_to_reg (temp);
3147 #ifdef PUSH_ROUNDING
3149 /* Emit single push insn. */
3152 emit_single_push_insn (mode, x, type)
3154 enum machine_mode mode;
3158 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3160 enum insn_code icode;
3161 insn_operand_predicate_fn pred;
3163 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3164 /* If there is push pattern, use it. Otherwise try old way of throwing
3165 MEM representing push operation to move expander. */
3166 icode = push_optab->handlers[(int) mode].insn_code;
3167 if (icode != CODE_FOR_nothing)
3169 if (((pred = insn_data[(int) icode].operand[0].predicate)
3170 && !((*pred) (x, mode))))
3171 x = force_reg (mode, x);
3172 emit_insn (GEN_FCN (icode) (x));
3175 if (GET_MODE_SIZE (mode) == rounded_size)
3176 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3179 #ifdef STACK_GROWS_DOWNWARD
3180 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3181 GEN_INT (-(HOST_WIDE_INT)rounded_size));
3183 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3184 GEN_INT (rounded_size));
3186 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3189 dest = gen_rtx_MEM (mode, dest_addr);
3193 set_mem_attributes (dest, type, 1);
3194 /* Function incoming arguments may overlap with sibling call
3195 outgoing arguments and we cannot allow reordering of reads
3196 from function arguments with stores to outgoing arguments
3197 of sibling calls. */
3198 set_mem_alias_set (dest, 0);
3200 emit_move_insn (dest, x);
3204 /* Generate code to push X onto the stack, assuming it has mode MODE and
3206 MODE is redundant except when X is a CONST_INT (since they don't
3208 SIZE is an rtx for the size of data to be copied (in bytes),
3209 needed only if X is BLKmode.
3211 ALIGN (in bits) is maximum alignment we can assume.
3213 If PARTIAL and REG are both nonzero, then copy that many of the first
3214 words of X into registers starting with REG, and push the rest of X.
3215 The amount of space pushed is decreased by PARTIAL words,
3216 rounded *down* to a multiple of PARM_BOUNDARY.
3217 REG must be a hard register in this case.
3218 If REG is zero but PARTIAL is not, take any all others actions for an
3219 argument partially in registers, but do not actually load any
3222 EXTRA is the amount in bytes of extra space to leave next to this arg.
3223 This is ignored if an argument block has already been allocated.
3225 On a machine that lacks real push insns, ARGS_ADDR is the address of
3226 the bottom of the argument block for this call. We use indexing off there
3227 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3228 argument block has not been preallocated.
3230 ARGS_SO_FAR is the size of args previously pushed for this call.
3232 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3233 for arguments passed in registers. If nonzero, it will be the number
3234 of bytes required. */
3237 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3238 args_addr, args_so_far, reg_parm_stack_space,
3241 enum machine_mode mode;
3250 int reg_parm_stack_space;
3254 enum direction stack_direction
3255 #ifdef STACK_GROWS_DOWNWARD
3261 /* Decide where to pad the argument: `downward' for below,
3262 `upward' for above, or `none' for don't pad it.
3263 Default is below for small data on big-endian machines; else above. */
3264 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3266 /* Invert direction if stack is post-decrement.
3268 if (STACK_PUSH_CODE == POST_DEC)
3269 if (where_pad != none)
3270 where_pad = (where_pad == downward ? upward : downward);
3272 xinner = x = protect_from_queue (x, 0);
3274 if (mode == BLKmode)
3276 /* Copy a block into the stack, entirely or partially. */
3279 int used = partial * UNITS_PER_WORD;
3280 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3288 /* USED is now the # of bytes we need not copy to the stack
3289 because registers will take care of them. */
3292 xinner = adjust_address (xinner, BLKmode, used);
3294 /* If the partial register-part of the arg counts in its stack size,
3295 skip the part of stack space corresponding to the registers.
3296 Otherwise, start copying to the beginning of the stack space,
3297 by setting SKIP to 0. */
3298 skip = (reg_parm_stack_space == 0) ? 0 : used;
3300 #ifdef PUSH_ROUNDING
3301 /* Do it with several push insns if that doesn't take lots of insns
3302 and if there is no difficulty with push insns that skip bytes
3303 on the stack for alignment purposes. */
3306 && GET_CODE (size) == CONST_INT
3308 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3309 /* Here we avoid the case of a structure whose weak alignment
3310 forces many pushes of a small amount of data,
3311 and such small pushes do rounding that causes trouble. */
3312 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3313 || align >= BIGGEST_ALIGNMENT
3314 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3315 == (align / BITS_PER_UNIT)))
3316 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3318 /* Push padding now if padding above and stack grows down,
3319 or if padding below and stack grows up.
3320 But if space already allocated, this has already been done. */
3321 if (extra && args_addr == 0
3322 && where_pad != none && where_pad != stack_direction)
3323 anti_adjust_stack (GEN_INT (extra));
3325 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3327 if (current_function_check_memory_usage && ! in_check_memory_usage)
3331 in_check_memory_usage = 1;
3332 temp = get_push_address (INTVAL (size) - used);
3333 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3334 emit_library_call (chkr_copy_bitmap_libfunc,
3335 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3336 Pmode, XEXP (xinner, 0), Pmode,
3337 GEN_INT (INTVAL (size) - used),
3338 TYPE_MODE (sizetype));
3340 emit_library_call (chkr_set_right_libfunc,
3341 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3342 Pmode, GEN_INT (INTVAL (size) - used),
3343 TYPE_MODE (sizetype),
3344 GEN_INT (MEMORY_USE_RW),
3345 TYPE_MODE (integer_type_node));
3346 in_check_memory_usage = 0;
3350 #endif /* PUSH_ROUNDING */
3354 /* Otherwise make space on the stack and copy the data
3355 to the address of that space. */
3357 /* Deduct words put into registers from the size we must copy. */
3360 if (GET_CODE (size) == CONST_INT)
3361 size = GEN_INT (INTVAL (size) - used);
3363 size = expand_binop (GET_MODE (size), sub_optab, size,
3364 GEN_INT (used), NULL_RTX, 0,
3368 /* Get the address of the stack space.
3369 In this case, we do not deal with EXTRA separately.
3370 A single stack adjust will do. */
3373 temp = push_block (size, extra, where_pad == downward);
3376 else if (GET_CODE (args_so_far) == CONST_INT)
3377 temp = memory_address (BLKmode,
3378 plus_constant (args_addr,
3379 skip + INTVAL (args_so_far)));
3381 temp = memory_address (BLKmode,
3382 plus_constant (gen_rtx_PLUS (Pmode,
3386 if (current_function_check_memory_usage && ! in_check_memory_usage)
3388 in_check_memory_usage = 1;
3389 target = copy_to_reg (temp);
3390 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3391 emit_library_call (chkr_copy_bitmap_libfunc,
3392 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3394 XEXP (xinner, 0), Pmode,
3395 size, TYPE_MODE (sizetype));
3397 emit_library_call (chkr_set_right_libfunc,
3398 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
3400 size, TYPE_MODE (sizetype),
3401 GEN_INT (MEMORY_USE_RW),
3402 TYPE_MODE (integer_type_node));
3403 in_check_memory_usage = 0;
3406 target = gen_rtx_MEM (BLKmode, temp);
3410 set_mem_attributes (target, type, 1);
3411 /* Function incoming arguments may overlap with sibling call
3412 outgoing arguments and we cannot allow reordering of reads
3413 from function arguments with stores to outgoing arguments
3414 of sibling calls. */
3415 set_mem_alias_set (target, 0);
3418 /* TEMP is the address of the block. Copy the data there. */
3419 if (GET_CODE (size) == CONST_INT
3420 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
3422 move_by_pieces (target, xinner, INTVAL (size), align);
3427 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3428 enum machine_mode mode;
3430 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3432 mode = GET_MODE_WIDER_MODE (mode))
3434 enum insn_code code = movstr_optab[(int) mode];
3435 insn_operand_predicate_fn pred;
3437 if (code != CODE_FOR_nothing
3438 && ((GET_CODE (size) == CONST_INT
3439 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3440 <= (GET_MODE_MASK (mode) >> 1)))
3441 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3442 && (!(pred = insn_data[(int) code].operand[0].predicate)
3443 || ((*pred) (target, BLKmode)))
3444 && (!(pred = insn_data[(int) code].operand[1].predicate)
3445 || ((*pred) (xinner, BLKmode)))
3446 && (!(pred = insn_data[(int) code].operand[3].predicate)
3447 || ((*pred) (opalign, VOIDmode))))
3449 rtx op2 = convert_to_mode (mode, size, 1);
3450 rtx last = get_last_insn ();
3453 pred = insn_data[(int) code].operand[2].predicate;
3454 if (pred != 0 && ! (*pred) (op2, mode))
3455 op2 = copy_to_mode_reg (mode, op2);
3457 pat = GEN_FCN ((int) code) (target, xinner,
3465 delete_insns_since (last);
3470 if (!ACCUMULATE_OUTGOING_ARGS)
3472 /* If the source is referenced relative to the stack pointer,
3473 copy it to another register to stabilize it. We do not need
3474 to do this if we know that we won't be changing sp. */
3476 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3477 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3478 temp = copy_to_reg (temp);
3481 /* Make inhibit_defer_pop nonzero around the library call
3482 to force it to pop the bcopy-arguments right away. */
3484 #ifdef TARGET_MEM_FUNCTIONS
3485 emit_library_call (memcpy_libfunc, LCT_NORMAL,
3486 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
3487 convert_to_mode (TYPE_MODE (sizetype),
3488 size, TREE_UNSIGNED (sizetype)),
3489 TYPE_MODE (sizetype));
3491 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3492 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3493 convert_to_mode (TYPE_MODE (integer_type_node),
3495 TREE_UNSIGNED (integer_type_node)),
3496 TYPE_MODE (integer_type_node));
3501 else if (partial > 0)
3503 /* Scalar partly in registers. */
3505 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3508 /* # words of start of argument
3509 that we must make space for but need not store. */
3510 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3511 int args_offset = INTVAL (args_so_far);
3514 /* Push padding now if padding above and stack grows down,
3515 or if padding below and stack grows up.
3516 But if space already allocated, this has already been done. */
3517 if (extra && args_addr == 0
3518 && where_pad != none && where_pad != stack_direction)
3519 anti_adjust_stack (GEN_INT (extra));
3521 /* If we make space by pushing it, we might as well push
3522 the real data. Otherwise, we can leave OFFSET nonzero
3523 and leave the space uninitialized. */
3527 /* Now NOT_STACK gets the number of words that we don't need to
3528 allocate on the stack. */
3529 not_stack = partial - offset;
3531 /* If the partial register-part of the arg counts in its stack size,
3532 skip the part of stack space corresponding to the registers.
3533 Otherwise, start copying to the beginning of the stack space,
3534 by setting SKIP to 0. */
3535 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3537 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3538 x = validize_mem (force_const_mem (mode, x));
3540 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3541 SUBREGs of such registers are not allowed. */
3542 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3543 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3544 x = copy_to_reg (x);
3546 /* Loop over all the words allocated on the stack for this arg. */
3547 /* We can do it by words, because any scalar bigger than a word
3548 has a size a multiple of a word. */
3549 #ifndef PUSH_ARGS_REVERSED
3550 for (i = not_stack; i < size; i++)
3552 for (i = size - 1; i >= not_stack; i--)
3554 if (i >= not_stack + offset)
3555 emit_push_insn (operand_subword_force (x, i, mode),
3556 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3558 GEN_INT (args_offset + ((i - not_stack + skip)
3560 reg_parm_stack_space, alignment_pad);
3565 rtx target = NULL_RTX;
3568 /* Push padding now if padding above and stack grows down,
3569 or if padding below and stack grows up.
3570 But if space already allocated, this has already been done. */
3571 if (extra && args_addr == 0
3572 && where_pad != none && where_pad != stack_direction)
3573 anti_adjust_stack (GEN_INT (extra));
3575 #ifdef PUSH_ROUNDING
3576 if (args_addr == 0 && PUSH_ARGS)
3577 emit_single_push_insn (mode, x, type);
3581 if (GET_CODE (args_so_far) == CONST_INT)
3583 = memory_address (mode,
3584 plus_constant (args_addr,
3585 INTVAL (args_so_far)));
3587 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3590 dest = gen_rtx_MEM (mode, addr);
3593 set_mem_attributes (dest, type, 1);
3594 /* Function incoming arguments may overlap with sibling call
3595 outgoing arguments and we cannot allow reordering of reads
3596 from function arguments with stores to outgoing arguments
3597 of sibling calls. */
3598 set_mem_alias_set (dest, 0);
3601 emit_move_insn (dest, x);
3605 if (current_function_check_memory_usage && ! in_check_memory_usage)
3607 in_check_memory_usage = 1;
3609 target = get_push_address (GET_MODE_SIZE (mode));
3611 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
3612 emit_library_call (chkr_copy_bitmap_libfunc,
3613 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3614 Pmode, XEXP (x, 0), Pmode,
3615 GEN_INT (GET_MODE_SIZE (mode)),
3616 TYPE_MODE (sizetype));
3618 emit_library_call (chkr_set_right_libfunc,
3619 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3620 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
3621 TYPE_MODE (sizetype),
3622 GEN_INT (MEMORY_USE_RW),
3623 TYPE_MODE (integer_type_node));
3624 in_check_memory_usage = 0;
3629 /* If part should go in registers, copy that part
3630 into the appropriate registers. Do this now, at the end,
3631 since mem-to-mem copies above may do function calls. */
3632 if (partial > 0 && reg != 0)
3634 /* Handle calls that pass values in multiple non-contiguous locations.
3635 The Irix 6 ABI has examples of this. */
3636 if (GET_CODE (reg) == PARALLEL)
3637 emit_group_load (reg, x, -1, align); /* ??? size? */
3639 move_block_to_reg (REGNO (reg), x, partial, mode);
3642 if (extra && args_addr == 0 && where_pad == stack_direction)
3643 anti_adjust_stack (GEN_INT (extra));
3645 if (alignment_pad && args_addr == 0)
3646 anti_adjust_stack (alignment_pad);
3649 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3657 /* Only registers can be subtargets. */
3658 || GET_CODE (x) != REG
3659 /* If the register is readonly, it can't be set more than once. */
3660 || RTX_UNCHANGING_P (x)
3661 /* Don't use hard regs to avoid extending their life. */
3662 || REGNO (x) < FIRST_PSEUDO_REGISTER
3663 /* Avoid subtargets inside loops,
3664 since they hide some invariant expressions. */
3665 || preserve_subexpressions_p ())
3669 /* Expand an assignment that stores the value of FROM into TO.
3670 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3671 (This may contain a QUEUED rtx;
3672 if the value is constant, this rtx is a constant.)
3673 Otherwise, the returned value is NULL_RTX.
3675 SUGGEST_REG is no longer actually used.
3676 It used to mean, copy the value through a register
3677 and return that register, if that is possible.
3678 We now use WANT_VALUE to decide whether to do this. */
3681 expand_assignment (to, from, want_value, suggest_reg)
3684 int suggest_reg ATTRIBUTE_UNUSED;
3689 /* Don't crash if the lhs of the assignment was erroneous. */
3691 if (TREE_CODE (to) == ERROR_MARK)
3693 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3694 return want_value ? result : NULL_RTX;
3697 /* Assignment of a structure component needs special treatment
3698 if the structure component's rtx is not simply a MEM.
3699 Assignment of an array element at a constant index, and assignment of
3700 an array element in an unaligned packed structure field, has the same
3703 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3704 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
3706 enum machine_mode mode1;
3707 HOST_WIDE_INT bitsize, bitpos;
3712 unsigned int alignment;
3715 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3716 &unsignedp, &volatilep, &alignment);
3718 /* If we are going to use store_bit_field and extract_bit_field,
3719 make sure to_rtx will be safe for multiple use. */
3721 if (mode1 == VOIDmode && want_value)
3722 tem = stabilize_reference (tem);
3724 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
3727 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3729 if (GET_CODE (to_rtx) != MEM)
3732 if (GET_MODE (offset_rtx) != ptr_mode)
3734 #ifdef POINTERS_EXTEND_UNSIGNED
3735 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
3737 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3741 /* A constant address in TO_RTX can have VOIDmode, we must not try
3742 to call force_reg for that case. Avoid that case. */
3743 if (GET_CODE (to_rtx) == MEM
3744 && GET_MODE (to_rtx) == BLKmode
3745 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3747 && (bitpos % bitsize) == 0
3748 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3749 && alignment == GET_MODE_ALIGNMENT (mode1))
3752 = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3754 if (GET_CODE (XEXP (temp, 0)) == REG)
3757 to_rtx = (replace_equiv_address
3758 (to_rtx, force_reg (GET_MODE (XEXP (temp, 0)),
3763 to_rtx = change_address (to_rtx, VOIDmode,
3764 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3765 force_reg (ptr_mode,
3771 if (GET_CODE (to_rtx) == MEM)
3773 /* When the offset is zero, to_rtx is the address of the
3774 structure we are storing into, and hence may be shared.
3775 We must make a new MEM before setting the volatile bit. */
3777 to_rtx = copy_rtx (to_rtx);
3779 MEM_VOLATILE_P (to_rtx) = 1;
3781 #if 0 /* This was turned off because, when a field is volatile
3782 in an object which is not volatile, the object may be in a register,
3783 and then we would abort over here. */
3789 if (TREE_CODE (to) == COMPONENT_REF
3790 && TREE_READONLY (TREE_OPERAND (to, 1)))
3793 to_rtx = copy_rtx (to_rtx);
3795 RTX_UNCHANGING_P (to_rtx) = 1;
3798 /* Check the access. */
3799 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
3804 enum machine_mode best_mode;
3806 best_mode = get_best_mode (bitsize, bitpos,
3807 TYPE_ALIGN (TREE_TYPE (tem)),
3809 if (best_mode == VOIDmode)
3812 best_mode_size = GET_MODE_BITSIZE (best_mode);
3813 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3814 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3815 size *= GET_MODE_SIZE (best_mode);
3817 /* Check the access right of the pointer. */
3818 in_check_memory_usage = 1;
3820 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3821 VOIDmode, 3, to_addr, Pmode,
3822 GEN_INT (size), TYPE_MODE (sizetype),
3823 GEN_INT (MEMORY_USE_WO),
3824 TYPE_MODE (integer_type_node));
3825 in_check_memory_usage = 0;
3828 /* If this is a varying-length object, we must get the address of
3829 the source and do an explicit block move. */
3832 unsigned int from_align;
3833 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3835 = adjust_address (to_rtx, BLKmode, bitpos / BITS_PER_UNIT);
3837 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
3838 MIN (alignment, from_align));
3845 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3847 /* Spurious cast for HPUX compiler. */
3848 ? ((enum machine_mode)
3849 TYPE_MODE (TREE_TYPE (to)))
3853 int_size_in_bytes (TREE_TYPE (tem)),
3854 get_alias_set (to));
3856 preserve_temp_slots (result);
3860 /* If the value is meaningful, convert RESULT to the proper mode.
3861 Otherwise, return nothing. */
3862 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3863 TYPE_MODE (TREE_TYPE (from)),
3865 TREE_UNSIGNED (TREE_TYPE (to)))
3870 /* If the rhs is a function call and its value is not an aggregate,
3871 call the function before we start to compute the lhs.
3872 This is needed for correct code for cases such as
3873 val = setjmp (buf) on machines where reference to val
3874 requires loading up part of an address in a separate insn.
3876 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3877 since it might be a promoted variable where the zero- or sign- extension
3878 needs to be done. Handling this in the normal way is safe because no
3879 computation is done before the call. */
3880 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
3881 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3882 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3883 && GET_CODE (DECL_RTL (to)) == REG))
3888 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3890 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3892 /* Handle calls that return values in multiple non-contiguous locations.
3893 The Irix 6 ABI has examples of this. */
3894 if (GET_CODE (to_rtx) == PARALLEL)
3895 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3896 TYPE_ALIGN (TREE_TYPE (from)));
3897 else if (GET_MODE (to_rtx) == BLKmode)
3898 emit_block_move (to_rtx, value, expr_size (from),
3899 TYPE_ALIGN (TREE_TYPE (from)));
3902 #ifdef POINTERS_EXTEND_UNSIGNED
3903 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3904 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
3905 value = convert_memory_address (GET_MODE (to_rtx), value);
3907 emit_move_insn (to_rtx, value);
3909 preserve_temp_slots (to_rtx);
3912 return want_value ? to_rtx : NULL_RTX;
3915 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3916 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3921 if (GET_CODE (to_rtx) == MEM)
3922 set_mem_alias_set (to_rtx, get_alias_set (to));
3925 /* Don't move directly into a return register. */
3926 if (TREE_CODE (to) == RESULT_DECL
3927 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3932 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3934 if (GET_CODE (to_rtx) == PARALLEL)
3935 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3936 TYPE_ALIGN (TREE_TYPE (from)));
3938 emit_move_insn (to_rtx, temp);
3940 preserve_temp_slots (to_rtx);
3943 return want_value ? to_rtx : NULL_RTX;
3946 /* In case we are returning the contents of an object which overlaps
3947 the place the value is being stored, use a safe function when copying
3948 a value through a pointer into a structure value return block. */
3949 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3950 && current_function_returns_struct
3951 && !current_function_returns_pcc_struct)
3956 size = expr_size (from);
3957 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3958 EXPAND_MEMORY_USE_DONT);
3960 /* Copy the rights of the bitmap. */
3961 if (current_function_check_memory_usage)
3962 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3963 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3964 XEXP (from_rtx, 0), Pmode,
3965 convert_to_mode (TYPE_MODE (sizetype),
3966 size, TREE_UNSIGNED (sizetype)),
3967 TYPE_MODE (sizetype));
3969 #ifdef TARGET_MEM_FUNCTIONS
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TREE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3981 size, TREE_UNSIGNED (integer_type_node)),
3982 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4020 If WANT_VALUE is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE. */
4025 store_expr (exp, target, want_value)
4031 int dont_return_target = 0;
4032 int dont_store_target = 0;
4034 if (TREE_CODE (exp) == COMPOUND_EXPR)
4036 /* Perform first part of compound expression, then assign from second
4038 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4040 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4042 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4044 /* For conditional expression, get safe form of the target. Then
4045 test the condition, doing the appropriate assignment on either
4046 side. This avoids the creation of unnecessary temporaries.
4047 For non-BLKmode, it is more efficient not to do this. */
4049 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4052 target = protect_from_queue (target, 1);
4054 do_pending_stack_adjust ();
4056 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4057 start_cleanup_deferral ();
4058 store_expr (TREE_OPERAND (exp, 1), target, 0);
4059 end_cleanup_deferral ();
4061 emit_jump_insn (gen_jump (lab2));
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 2), target, 0);
4066 end_cleanup_deferral ();
4071 return want_value ? target : NULL_RTX;
4073 else if (queued_subexp_p (target))
4074 /* If target contains a postincrement, let's not risk
4075 using it as the place to generate the rhs. */
4077 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4079 /* Expand EXP into a new pseudo. */
4080 temp = gen_reg_rtx (GET_MODE (target));
4081 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4084 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4086 /* If target is volatile, ANSI requires accessing the value
4087 *from* the target, if it is accessed. So make that happen.
4088 In no case return the target itself. */
4089 if (! MEM_VOLATILE_P (target) && want_value)
4090 dont_return_target = 1;
4092 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4093 && GET_MODE (target) != BLKmode)
4094 /* If target is in memory and caller wants value in a register instead,
4095 arrange that. Pass TARGET as target for expand_expr so that,
4096 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4097 We know expand_expr will not use the target in that case.
4098 Don't do this if TARGET is volatile because we are supposed
4099 to write it and then read it. */
4101 temp = expand_expr (exp, target, GET_MODE (target), 0);
4102 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4104 /* If TEMP is already in the desired TARGET, only copy it from
4105 memory and don't store it there again. */
4107 || (rtx_equal_p (temp, target)
4108 && ! side_effects_p (temp) && ! side_effects_p (target)))
4109 dont_store_target = 1;
4110 temp = copy_to_reg (temp);
4112 dont_return_target = 1;
4114 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4115 /* If this is an scalar in a register that is stored in a wider mode
4116 than the declared mode, compute the result into its declared mode
4117 and then convert to the wider mode. Our value is the computed
4120 /* If we don't want a value, we can do the conversion inside EXP,
4121 which will often result in some optimizations. Do the conversion
4122 in two steps: first change the signedness, if needed, then
4123 the extend. But don't do this if the type of EXP is a subtype
4124 of something else since then the conversion might involve
4125 more than just converting modes. */
4126 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4127 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4129 if (TREE_UNSIGNED (TREE_TYPE (exp))
4130 != SUBREG_PROMOTED_UNSIGNED_P (target))
4133 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4137 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4138 SUBREG_PROMOTED_UNSIGNED_P (target)),
4142 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
4144 /* If TEMP is a volatile MEM and we want a result value, make
4145 the access now so it gets done only once. Likewise if
4146 it contains TARGET. */
4147 if (GET_CODE (temp) == MEM && want_value
4148 && (MEM_VOLATILE_P (temp)
4149 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4150 temp = copy_to_reg (temp);
4152 /* If TEMP is a VOIDmode constant, use convert_modes to make
4153 sure that we properly convert it. */
4154 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4156 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4157 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4158 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4159 GET_MODE (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4163 convert_move (SUBREG_REG (target), temp,
4164 SUBREG_PROMOTED_UNSIGNED_P (target));
4166 /* If we promoted a constant, change the mode back down to match
4167 target. Otherwise, the caller might get confused by a result whose
4168 mode is larger than expected. */
4170 if (want_value && GET_MODE (temp) != GET_MODE (target)
4171 && GET_MODE (temp) != VOIDmode)
4173 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4174 SUBREG_PROMOTED_VAR_P (temp) = 1;
4175 SUBREG_PROMOTED_UNSIGNED_P (temp)
4176 = SUBREG_PROMOTED_UNSIGNED_P (target);
4179 return want_value ? temp : NULL_RTX;
4183 temp = expand_expr (exp, target, GET_MODE (target), 0);
4184 /* Return TARGET if it's a specified hardware register.
4185 If TARGET is a volatile mem ref, either return TARGET
4186 or return a reg copied *from* TARGET; ANSI requires this.
4188 Otherwise, if TEMP is not TARGET, return TEMP
4189 if it is constant (for efficiency),
4190 or if we really want the correct value. */
4191 if (!(target && GET_CODE (target) == REG
4192 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4193 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4194 && ! rtx_equal_p (temp, target)
4195 && (CONSTANT_P (temp) || want_value))
4196 dont_return_target = 1;
4199 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4200 the same as that of TARGET, adjust the constant. This is needed, for
4201 example, in case it is a CONST_DOUBLE and we want only a word-sized
4203 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4204 && TREE_CODE (exp) != ERROR_MARK
4205 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4206 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4207 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4209 if (current_function_check_memory_usage
4210 && GET_CODE (target) == MEM
4211 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4213 in_check_memory_usage = 1;
4214 if (GET_CODE (temp) == MEM)
4215 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4216 VOIDmode, 3, XEXP (target, 0), Pmode,
4217 XEXP (temp, 0), Pmode,
4218 expr_size (exp), TYPE_MODE (sizetype));
4220 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4221 VOIDmode, 3, XEXP (target, 0), Pmode,
4222 expr_size (exp), TYPE_MODE (sizetype),
4223 GEN_INT (MEMORY_USE_WO),
4224 TYPE_MODE (integer_type_node));
4225 in_check_memory_usage = 0;
4228 /* If value was not generated in the target, store it there.
4229 Convert the value to TARGET's type first if nec. */
4230 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4231 one or both of them are volatile memory refs, we have to distinguish
4233 - expand_expr has used TARGET. In this case, we must not generate
4234 another copy. This can be detected by TARGET being equal according
4236 - expand_expr has not used TARGET - that means that the source just
4237 happens to have the same RTX form. Since temp will have been created
4238 by expand_expr, it will compare unequal according to == .
4239 We must generate a copy in this case, to reach the correct number
4240 of volatile memory references. */
4242 if ((! rtx_equal_p (temp, target)
4243 || (temp != target && (side_effects_p (temp)
4244 || side_effects_p (target))))
4245 && TREE_CODE (exp) != ERROR_MARK
4246 && ! dont_store_target)
4248 target = protect_from_queue (target, 1);
4249 if (GET_MODE (temp) != GET_MODE (target)
4250 && GET_MODE (temp) != VOIDmode)
4252 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4253 if (dont_return_target)
4255 /* In this case, we will return TEMP,
4256 so make sure it has the proper mode.
4257 But don't forget to store the value into TARGET. */
4258 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4259 emit_move_insn (target, temp);
4262 convert_move (target, temp, unsignedp);
4265 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4267 /* Handle copying a string constant into an array.
4268 The string constant may be shorter than the array.
4269 So copy just the string's actual length, and clear the rest. */
4273 /* Get the size of the data type of the string,
4274 which is actually the size of the target. */
4275 size = expr_size (exp);
4276 if (GET_CODE (size) == CONST_INT
4277 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4278 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
4281 /* Compute the size of the data to copy from the string. */
4283 = size_binop (MIN_EXPR,
4284 make_tree (sizetype, size),
4285 size_int (TREE_STRING_LENGTH (exp)));
4286 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
4287 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4291 /* Copy that much. */
4292 emit_block_move (target, temp, copy_size_rtx,
4293 TYPE_ALIGN (TREE_TYPE (exp)));
4295 /* Figure out how much is left in TARGET that we have to clear.
4296 Do all calculations in ptr_mode. */
4298 addr = XEXP (target, 0);
4299 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4301 if (GET_CODE (copy_size_rtx) == CONST_INT)
4303 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
4304 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
4306 (unsigned int) (BITS_PER_UNIT
4307 * (INTVAL (copy_size_rtx)
4308 & - INTVAL (copy_size_rtx))));
4312 addr = force_reg (ptr_mode, addr);
4313 addr = expand_binop (ptr_mode, add_optab, addr,
4314 copy_size_rtx, NULL_RTX, 0,
4317 size = expand_binop (ptr_mode, sub_optab, size,
4318 copy_size_rtx, NULL_RTX, 0,
4321 align = BITS_PER_UNIT;
4322 label = gen_label_rtx ();
4323 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4324 GET_MODE (size), 0, 0, label);
4326 align = MIN (align, expr_align (copy_size));
4328 if (size != const0_rtx)
4330 rtx dest = gen_rtx_MEM (BLKmode, addr);
4332 MEM_COPY_ATTRIBUTES (dest, target);
4334 /* Be sure we can write on ADDR. */
4335 in_check_memory_usage = 1;
4336 if (current_function_check_memory_usage)
4337 emit_library_call (chkr_check_addr_libfunc,
4338 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
4340 size, TYPE_MODE (sizetype),
4341 GEN_INT (MEMORY_USE_WO),
4342 TYPE_MODE (integer_type_node));
4343 in_check_memory_usage = 0;
4344 clear_storage (dest, size, align);
4351 /* Handle calls that return values in multiple non-contiguous locations.
4352 The Irix 6 ABI has examples of this. */
4353 else if (GET_CODE (target) == PARALLEL)
4354 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
4355 TYPE_ALIGN (TREE_TYPE (exp)));
4356 else if (GET_MODE (temp) == BLKmode)
4357 emit_block_move (target, temp, expr_size (exp),
4358 TYPE_ALIGN (TREE_TYPE (exp)));
4360 emit_move_insn (target, temp);
4363 /* If we don't want a value, return NULL_RTX. */
4367 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4368 ??? The latter test doesn't seem to make sense. */
4369 else if (dont_return_target && GET_CODE (temp) != MEM)
4372 /* Return TARGET itself if it is a hard register. */
4373 else if (want_value && GET_MODE (target) != BLKmode
4374 && ! (GET_CODE (target) == REG
4375 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4376 return copy_to_reg (target);
4382 /* Return 1 if EXP just contains zeros. */
4390 switch (TREE_CODE (exp))
4394 case NON_LVALUE_EXPR:
4395 return is_zeros_p (TREE_OPERAND (exp, 0));
4398 return integer_zerop (exp);
4402 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4405 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4408 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4409 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4410 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4411 if (! is_zeros_p (TREE_VALUE (elt)))
4421 /* Return 1 if EXP contains mostly (3/4) zeros. */
4424 mostly_zeros_p (exp)
4427 if (TREE_CODE (exp) == CONSTRUCTOR)
4429 int elts = 0, zeros = 0;
4430 tree elt = CONSTRUCTOR_ELTS (exp);
4431 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4433 /* If there are no ranges of true bits, it is all zero. */
4434 return elt == NULL_TREE;
4436 for (; elt; elt = TREE_CHAIN (elt))
4438 /* We do not handle the case where the index is a RANGE_EXPR,
4439 so the statistic will be somewhat inaccurate.
4440 We do make a more accurate count in store_constructor itself,
4441 so since this function is only used for nested array elements,
4442 this should be close enough. */
4443 if (mostly_zeros_p (TREE_VALUE (elt)))
4448 return 4 * zeros >= 3 * elts;
4451 return is_zeros_p (exp);
4454 /* Helper function for store_constructor.
4455 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4456 TYPE is the type of the CONSTRUCTOR, not the element type.
4457 ALIGN and CLEARED are as for store_constructor.
4458 ALIAS_SET is the alias set to use for any stores.
4460 This provides a recursive shortcut back to store_constructor when it isn't
4461 necessary to go through store_field. This is so that we can pass through
4462 the cleared field to let store_constructor know that we may not have to
4463 clear a substructure if the outer structure has already been cleared. */
4466 store_constructor_field (target, bitsize, bitpos,
4467 mode, exp, type, align, cleared, alias_set)
4469 unsigned HOST_WIDE_INT bitsize;
4470 HOST_WIDE_INT bitpos;
4471 enum machine_mode mode;
4477 if (TREE_CODE (exp) == CONSTRUCTOR
4478 && bitpos % BITS_PER_UNIT == 0
4479 /* If we have a non-zero bitpos for a register target, then we just
4480 let store_field do the bitfield handling. This is unlikely to
4481 generate unnecessary clear instructions anyways. */
4482 && (bitpos == 0 || GET_CODE (target) == MEM))
4486 = adjust_address (target,
4487 GET_MODE (target) == BLKmode
4489 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4490 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4493 /* Show the alignment may no longer be what it was and update the alias
4494 set, if required. */
4496 align = MIN (align, (unsigned int) bitpos & - bitpos);
4497 if (GET_CODE (target) == MEM)
4498 set_mem_alias_set (target, alias_set);
4500 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
4503 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
4504 int_size_in_bytes (type), alias_set);
4507 /* Store the value of constructor EXP into the rtx TARGET.
4508 TARGET is either a REG or a MEM.
4509 ALIGN is the maximum known alignment for TARGET.
4510 CLEARED is true if TARGET is known to have been zero'd.
4511 SIZE is the number of bytes of TARGET we are allowed to modify: this
4512 may not be the same as the size of EXP if we are assigning to a field
4513 which has been packed to exclude padding bits. */
4516 store_constructor (exp, target, align, cleared, size)
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4528 /* We know our target cannot conflict, since safe_from_p has been called. */
4530 /* Don't try copying piece by piece into a hard register
4531 since that is vulnerable to being clobbered by EXP.
4532 Instead, construct in a pseudo register and then copy it all. */
4533 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4535 rtx temp = gen_reg_rtx (GET_MODE (target));
4536 store_constructor (exp, temp, align, cleared, size);
4537 emit_move_insn (target, temp);
4542 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4543 || TREE_CODE (type) == QUAL_UNION_TYPE)
4547 /* Inform later passes that the whole union value is dead. */
4548 if ((TREE_CODE (type) == UNION_TYPE
4549 || TREE_CODE (type) == QUAL_UNION_TYPE)
4552 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4554 /* If the constructor is empty, clear the union. */
4555 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4556 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
4559 /* If we are building a static constructor into a register,
4560 set the initial value as zero so we can fold the value into
4561 a constant. But if more than one register is involved,
4562 this probably loses. */
4563 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4564 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4567 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4572 /* If the constructor has fewer fields than the structure
4573 or if we are initializing the structure to mostly zeros,
4574 clear the whole structure first. Don't do this if TARGET is a
4575 register whose mode size isn't equal to SIZE since clear_storage
4576 can't handle this case. */
4578 && ((list_length (CONSTRUCTOR_ELTS (exp))
4579 != fields_length (type))
4580 || mostly_zeros_p (exp))
4581 && (GET_CODE (target) != REG
4582 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
4585 clear_storage (target, GEN_INT (size), align);
4590 /* Inform later passes that the old value is dead. */
4591 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4593 /* Store each element of the constructor into
4594 the corresponding field of TARGET. */
4596 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4598 tree field = TREE_PURPOSE (elt);
4599 #ifdef WORD_REGISTER_OPERATIONS
4600 tree value = TREE_VALUE (elt);
4602 enum machine_mode mode;
4603 HOST_WIDE_INT bitsize;
4604 HOST_WIDE_INT bitpos = 0;
4607 rtx to_rtx = target;
4609 /* Just ignore missing fields.
4610 We cleared the whole structure, above,
4611 if any fields are missing. */
4615 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4618 if (host_integerp (DECL_SIZE (field), 1))
4619 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4623 unsignedp = TREE_UNSIGNED (field);
4624 mode = DECL_MODE (field);
4625 if (DECL_BIT_FIELD (field))
4628 offset = DECL_FIELD_OFFSET (field);
4629 if (host_integerp (offset, 0)
4630 && host_integerp (bit_position (field), 0))
4632 bitpos = int_bit_position (field);
4636 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4642 if (contains_placeholder_p (offset))
4643 offset = build (WITH_RECORD_EXPR, sizetype,
4644 offset, make_tree (TREE_TYPE (exp), target));
4646 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4647 if (GET_CODE (to_rtx) != MEM)
4650 if (GET_MODE (offset_rtx) != ptr_mode)
4652 #ifdef POINTERS_EXTEND_UNSIGNED
4653 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
4655 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4660 = change_address (to_rtx, VOIDmode,
4661 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
4662 force_reg (ptr_mode,
4664 align = DECL_OFFSET_ALIGN (field);
4667 if (TREE_READONLY (field))
4669 if (GET_CODE (to_rtx) == MEM)
4670 to_rtx = copy_rtx (to_rtx);
4672 RTX_UNCHANGING_P (to_rtx) = 1;
4675 #ifdef WORD_REGISTER_OPERATIONS
4676 /* If this initializes a field that is smaller than a word, at the
4677 start of a word, try to widen it to a full word.
4678 This special case allows us to output C++ member function
4679 initializations in a form that the optimizers can understand. */
4680 if (GET_CODE (target) == REG
4681 && bitsize < BITS_PER_WORD
4682 && bitpos % BITS_PER_WORD == 0
4683 && GET_MODE_CLASS (mode) == MODE_INT
4684 && TREE_CODE (value) == INTEGER_CST
4686 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4688 tree type = TREE_TYPE (value);
4689 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4691 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4692 value = convert (type, value);
4694 if (BYTES_BIG_ENDIAN)
4696 = fold (build (LSHIFT_EXPR, type, value,
4697 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4698 bitsize = BITS_PER_WORD;
4702 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4703 TREE_VALUE (elt), type, align, cleared,
4704 (DECL_NONADDRESSABLE_P (field)
4705 && GET_CODE (to_rtx) == MEM)
4706 ? MEM_ALIAS_SET (to_rtx)
4707 : get_alias_set (TREE_TYPE (field)));
4710 else if (TREE_CODE (type) == ARRAY_TYPE)
4715 tree domain = TYPE_DOMAIN (type);
4716 tree elttype = TREE_TYPE (type);
4717 int const_bounds_p = (TYPE_MIN_VALUE (domain)
4718 && TYPE_MAX_VALUE (domain)
4719 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4720 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4721 HOST_WIDE_INT minelt = 0;
4722 HOST_WIDE_INT maxelt = 0;
4724 /* If we have constant bounds for the range of the type, get them. */
4727 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4728 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4731 /* If the constructor has fewer elements than the array,
4732 clear the whole array first. Similarly if this is
4733 static constructor of a non-BLKmode object. */
4734 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4738 HOST_WIDE_INT count = 0, zero_count = 0;
4739 need_to_clear = ! const_bounds_p;
4741 /* This loop is a more accurate version of the loop in
4742 mostly_zeros_p (it handles RANGE_EXPR in an index).
4743 It is also needed to check for missing elements. */
4744 for (elt = CONSTRUCTOR_ELTS (exp);
4745 elt != NULL_TREE && ! need_to_clear;
4746 elt = TREE_CHAIN (elt))
4748 tree index = TREE_PURPOSE (elt);
4749 HOST_WIDE_INT this_node_count;
4751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4753 tree lo_index = TREE_OPERAND (index, 0);
4754 tree hi_index = TREE_OPERAND (index, 1);
4756 if (! host_integerp (lo_index, 1)
4757 || ! host_integerp (hi_index, 1))
4763 this_node_count = (tree_low_cst (hi_index, 1)
4764 - tree_low_cst (lo_index, 1) + 1);
4767 this_node_count = 1;
4769 count += this_node_count;
4770 if (mostly_zeros_p (TREE_VALUE (elt)))
4771 zero_count += this_node_count;
4774 /* Clear the entire array first if there are any missing elements,
4775 or if the incidence of zero elements is >= 75%. */
4777 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4781 if (need_to_clear && size > 0)
4784 clear_storage (target, GEN_INT (size), align);
4787 else if (REG_P (target))
4788 /* Inform later passes that the old value is dead. */
4789 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4791 /* Store each element of the constructor into
4792 the corresponding element of TARGET, determined
4793 by counting the elements. */
4794 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4796 elt = TREE_CHAIN (elt), i++)
4798 enum machine_mode mode;
4799 HOST_WIDE_INT bitsize;
4800 HOST_WIDE_INT bitpos;
4802 tree value = TREE_VALUE (elt);
4803 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
4804 tree index = TREE_PURPOSE (elt);
4805 rtx xtarget = target;
4807 if (cleared && is_zeros_p (value))
4810 unsignedp = TREE_UNSIGNED (elttype);
4811 mode = TYPE_MODE (elttype);
4812 if (mode == BLKmode)
4813 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4814 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4817 bitsize = GET_MODE_BITSIZE (mode);
4819 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4821 tree lo_index = TREE_OPERAND (index, 0);
4822 tree hi_index = TREE_OPERAND (index, 1);
4823 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4824 struct nesting *loop;
4825 HOST_WIDE_INT lo, hi, count;
4828 /* If the range is constant and "small", unroll the loop. */
4830 && host_integerp (lo_index, 0)
4831 && host_integerp (hi_index, 0)
4832 && (lo = tree_low_cst (lo_index, 0),
4833 hi = tree_low_cst (hi_index, 0),
4834 count = hi - lo + 1,
4835 (GET_CODE (target) != MEM
4837 || (host_integerp (TYPE_SIZE (elttype), 1)
4838 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4841 lo -= minelt; hi -= minelt;
4842 for (; lo <= hi; lo++)
4844 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4845 store_constructor_field
4846 (target, bitsize, bitpos, mode, value, type, align,
4848 TYPE_NONALIASED_COMPONENT (type)
4849 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
4854 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4855 loop_top = gen_label_rtx ();
4856 loop_end = gen_label_rtx ();
4858 unsignedp = TREE_UNSIGNED (domain);
4860 index = build_decl (VAR_DECL, NULL_TREE, domain);
4863 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4865 SET_DECL_RTL (index, index_r);
4866 if (TREE_CODE (value) == SAVE_EXPR
4867 && SAVE_EXPR_RTL (value) == 0)
4869 /* Make sure value gets expanded once before the
4871 expand_expr (value, const0_rtx, VOIDmode, 0);
4874 store_expr (lo_index, index_r, 0);
4875 loop = expand_start_loop (0);
4877 /* Assign value to element index. */
4879 = convert (ssizetype,
4880 fold (build (MINUS_EXPR, TREE_TYPE (index),
4881 index, TYPE_MIN_VALUE (domain))));
4882 position = size_binop (MULT_EXPR, position,
4884 TYPE_SIZE_UNIT (elttype)));
4886 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4887 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4888 xtarget = change_address (target, mode, addr);
4889 if (TREE_CODE (value) == CONSTRUCTOR)
4890 store_constructor (value, xtarget, align, cleared,
4891 bitsize / BITS_PER_UNIT);
4893 store_expr (value, xtarget, 0);
4895 expand_exit_loop_if_false (loop,
4896 build (LT_EXPR, integer_type_node,
4899 expand_increment (build (PREINCREMENT_EXPR,
4901 index, integer_one_node), 0, 0);
4903 emit_label (loop_end);
4906 else if ((index != 0 && ! host_integerp (index, 0))
4907 || ! host_integerp (TYPE_SIZE (elttype), 1))
4913 index = ssize_int (1);
4916 index = convert (ssizetype,
4917 fold (build (MINUS_EXPR, index,
4918 TYPE_MIN_VALUE (domain))));
4920 position = size_binop (MULT_EXPR, index,
4922 TYPE_SIZE_UNIT (elttype)));
4923 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4924 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
4925 xtarget = change_address (target, mode, addr);
4926 store_expr (value, xtarget, 0);
4931 bitpos = ((tree_low_cst (index, 0) - minelt)
4932 * tree_low_cst (TYPE_SIZE (elttype), 1));
4934 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4936 store_constructor_field (target, bitsize, bitpos, mode, value,
4937 type, align, cleared,
4938 TYPE_NONALIASED_COMPONENT (type)
4939 && GET_CODE (target) == MEM
4940 ? MEM_ALIAS_SET (target) :
4941 get_alias_set (elttype));
4947 /* Set constructor assignments. */
4948 else if (TREE_CODE (type) == SET_TYPE)
4950 tree elt = CONSTRUCTOR_ELTS (exp);
4951 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4952 tree domain = TYPE_DOMAIN (type);
4953 tree domain_min, domain_max, bitlength;
4955 /* The default implementation strategy is to extract the constant
4956 parts of the constructor, use that to initialize the target,
4957 and then "or" in whatever non-constant ranges we need in addition.
4959 If a large set is all zero or all ones, it is
4960 probably better to set it using memset (if available) or bzero.
4961 Also, if a large set has just a single range, it may also be
4962 better to first clear all the first clear the set (using
4963 bzero/memset), and set the bits we want. */
4965 /* Check for all zeros. */
4966 if (elt == NULL_TREE && size > 0)
4969 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
4973 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4974 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4975 bitlength = size_binop (PLUS_EXPR,
4976 size_diffop (domain_max, domain_min),
4979 nbits = tree_low_cst (bitlength, 1);
4981 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4982 are "complicated" (more than one range), initialize (the
4983 constant parts) by copying from a constant. */
4984 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4985 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
4987 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4988 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
4989 char *bit_buffer = (char *) alloca (nbits);
4990 HOST_WIDE_INT word = 0;
4991 unsigned int bit_pos = 0;
4992 unsigned int ibit = 0;
4993 unsigned int offset = 0; /* In bytes from beginning of set. */
4995 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
4998 if (bit_buffer[ibit])
5000 if (BYTES_BIG_ENDIAN)
5001 word |= (1 << (set_word_size - 1 - bit_pos));
5003 word |= 1 << bit_pos;
5007 if (bit_pos >= set_word_size || ibit == nbits)
5009 if (word != 0 || ! cleared)
5011 rtx datum = GEN_INT (word);
5014 /* The assumption here is that it is safe to use
5015 XEXP if the set is multi-word, but not if
5016 it's single-word. */
5017 if (GET_CODE (target) == MEM)
5018 to_rtx = adjust_address (target, mode, offset);
5019 else if (offset == 0)
5023 emit_move_insn (to_rtx, datum);
5030 offset += set_word_size / BITS_PER_UNIT;
5035 /* Don't bother clearing storage if the set is all ones. */
5036 if (TREE_CHAIN (elt) != NULL_TREE
5037 || (TREE_PURPOSE (elt) == NULL_TREE
5039 : ( ! host_integerp (TREE_VALUE (elt), 0)
5040 || ! host_integerp (TREE_PURPOSE (elt), 0)
5041 || (tree_low_cst (TREE_VALUE (elt), 0)
5042 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5043 != (HOST_WIDE_INT) nbits))))
5044 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
5046 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5048 /* Start of range of element or NULL. */
5049 tree startbit = TREE_PURPOSE (elt);
5050 /* End of range of element, or element value. */
5051 tree endbit = TREE_VALUE (elt);
5052 #ifdef TARGET_MEM_FUNCTIONS
5053 HOST_WIDE_INT startb, endb;
5055 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5057 bitlength_rtx = expand_expr (bitlength,
5058 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5060 /* Handle non-range tuple element like [ expr ]. */
5061 if (startbit == NULL_TREE)
5063 startbit = save_expr (endbit);
5067 startbit = convert (sizetype, startbit);
5068 endbit = convert (sizetype, endbit);
5069 if (! integer_zerop (domain_min))
5071 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5072 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5074 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5075 EXPAND_CONST_ADDRESS);
5076 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5077 EXPAND_CONST_ADDRESS);
5083 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
5086 emit_move_insn (targetx, target);
5089 else if (GET_CODE (target) == MEM)
5094 #ifdef TARGET_MEM_FUNCTIONS
5095 /* Optimization: If startbit and endbit are
5096 constants divisible by BITS_PER_UNIT,
5097 call memset instead. */
5098 if (TREE_CODE (startbit) == INTEGER_CST
5099 && TREE_CODE (endbit) == INTEGER_CST
5100 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5101 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5103 emit_library_call (memset_libfunc, LCT_NORMAL,
5105 plus_constant (XEXP (targetx, 0),
5106 startb / BITS_PER_UNIT),
5108 constm1_rtx, TYPE_MODE (integer_type_node),
5109 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5110 TYPE_MODE (sizetype));
5114 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5115 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5116 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5117 startbit_rtx, TYPE_MODE (sizetype),
5118 endbit_rtx, TYPE_MODE (sizetype));
5121 emit_move_insn (target, targetx);
5129 /* Store the value of EXP (an expression tree)
5130 into a subfield of TARGET which has mode MODE and occupies
5131 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5132 If MODE is VOIDmode, it means that we are storing into a bit-field.
5134 If VALUE_MODE is VOIDmode, return nothing in particular.
5135 UNSIGNEDP is not used in this case.
5137 Otherwise, return an rtx for the value stored. This rtx
5138 has mode VALUE_MODE if that is convenient to do.
5139 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5141 ALIGN is the alignment that TARGET is known to have.
5142 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
5144 ALIAS_SET is the alias set for the destination. This value will
5145 (in general) be different from that for TARGET, since TARGET is a
5146 reference to the containing structure. */
5149 store_field (target, bitsize, bitpos, mode, exp, value_mode,
5150 unsignedp, align, total_size, alias_set)
5152 HOST_WIDE_INT bitsize;
5153 HOST_WIDE_INT bitpos;
5154 enum machine_mode mode;
5156 enum machine_mode value_mode;
5159 HOST_WIDE_INT total_size;
5162 HOST_WIDE_INT width_mask = 0;
5164 if (TREE_CODE (exp) == ERROR_MARK)
5167 /* If we have nothing to store, do nothing unless the expression has
5170 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5172 if (bitsize < HOST_BITS_PER_WIDE_INT)
5173 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5175 /* If we are storing into an unaligned field of an aligned union that is
5176 in a register, we may have the mode of TARGET being an integer mode but
5177 MODE == BLKmode. In that case, get an aligned object whose size and
5178 alignment are the same as TARGET and store TARGET into it (we can avoid
5179 the store if the field being stored is the entire width of TARGET). Then
5180 call ourselves recursively to store the field into a BLKmode version of
5181 that object. Finally, load from the object into TARGET. This is not
5182 very efficient in general, but should only be slightly more expensive
5183 than the otherwise-required unaligned accesses. Perhaps this can be
5184 cleaned up later. */
5187 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5191 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5194 rtx blk_object = copy_rtx (object);
5196 PUT_MODE (blk_object, BLKmode);
5198 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5199 emit_move_insn (object, target);
5201 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
5202 align, total_size, alias_set);
5204 /* Even though we aren't returning target, we need to
5205 give it the updated value. */
5206 emit_move_insn (target, object);
5211 if (GET_CODE (target) == CONCAT)
5213 /* We're storing into a struct containing a single __complex. */
5217 return store_expr (exp, target, 0);
5220 /* If the structure is in a register or if the component
5221 is a bit field, we cannot use addressing to access it.
5222 Use bit-field techniques or SUBREG to store in it. */
5224 if (mode == VOIDmode
5225 || (mode != BLKmode && ! direct_store[(int) mode]
5226 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5227 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5228 || GET_CODE (target) == REG
5229 || GET_CODE (target) == SUBREG
5230 /* If the field isn't aligned enough to store as an ordinary memref,
5231 store it as a bit field. */
5232 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5233 && (align < GET_MODE_ALIGNMENT (mode)
5234 || bitpos % GET_MODE_ALIGNMENT (mode)))
5235 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
5236 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
5237 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5238 /* If the RHS and field are a constant size and the size of the
5239 RHS isn't the same size as the bitfield, we must use bitfield
5242 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5243 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5245 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5247 /* If BITSIZE is narrower than the size of the type of EXP
5248 we will be narrowing TEMP. Normally, what's wanted are the
5249 low-order bits. However, if EXP's type is a record and this is
5250 big-endian machine, we want the upper BITSIZE bits. */
5251 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5252 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5253 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5254 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5255 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5259 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5261 if (mode != VOIDmode && mode != BLKmode
5262 && mode != TYPE_MODE (TREE_TYPE (exp)))
5263 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5265 /* If the modes of TARGET and TEMP are both BLKmode, both
5266 must be in memory and BITPOS must be aligned on a byte
5267 boundary. If so, we simply do a block copy. */
5268 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5270 unsigned int exp_align = expr_align (exp);
5272 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5273 || bitpos % BITS_PER_UNIT != 0)
5276 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5278 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5279 align = MIN (exp_align, align);
5281 /* Find an alignment that is consistent with the bit position. */
5282 while ((bitpos % align) != 0)
5285 emit_block_move (target, temp,
5286 bitsize == -1 ? expr_size (exp)
5287 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5291 return value_mode == VOIDmode ? const0_rtx : target;
5294 /* Store the value in the bitfield. */
5295 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5296 if (value_mode != VOIDmode)
5298 /* The caller wants an rtx for the value. */
5299 /* If possible, avoid refetching from the bitfield itself. */
5301 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5304 enum machine_mode tmode;
5307 return expand_and (temp,
5311 GET_MODE (temp) == VOIDmode
5313 : GET_MODE (temp))), NULL_RTX);
5314 tmode = GET_MODE (temp);
5315 if (tmode == VOIDmode)
5317 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5318 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5319 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5321 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5322 NULL_RTX, value_mode, 0, align,
5329 rtx addr = XEXP (target, 0);
5332 /* If a value is wanted, it must be the lhs;
5333 so make the address stable for multiple use. */
5335 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5336 && ! CONSTANT_ADDRESS_P (addr)
5337 /* A frame-pointer reference is already stable. */
5338 && ! (GET_CODE (addr) == PLUS
5339 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5340 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5341 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5342 target = replace_equiv_address (target, copy_to_reg (addr));
5344 /* Now build a reference to just the desired component. */
5346 to_rtx = copy_rtx (adjust_address (target, mode,
5347 bitpos / BITS_PER_UNIT));
5349 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5350 /* If the address of the structure varies, then it might be on
5351 the stack. And, stack slots may be shared across scopes.
5352 So, two different structures, of different types, can end up
5353 at the same location. We will give the structures alias set
5354 zero; here we must be careful not to give non-zero alias sets
5356 set_mem_alias_set (to_rtx,
5357 rtx_varies_p (addr, /*for_alias=*/0)
5360 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5364 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
5374 We set *PALIGNMENT to the alignment of the address that will be
5375 computed. This is the alignment of the thing we return if *POFFSET
5376 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
5378 If any of the extraction expressions is volatile,
5379 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5381 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5382 is a mode that can be used to access the field. In that case, *PBITSIZE
5385 If the field describes a variable-sized object, *PMODE is set to
5386 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5387 this case, but the address of the object can be found. */
5390 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5391 punsignedp, pvolatilep, palignment)
5393 HOST_WIDE_INT *pbitsize;
5394 HOST_WIDE_INT *pbitpos;
5396 enum machine_mode *pmode;
5399 unsigned int *palignment;
5402 enum machine_mode mode = VOIDmode;
5403 tree offset = size_zero_node;
5404 tree bit_offset = bitsize_zero_node;
5405 unsigned int alignment = BIGGEST_ALIGNMENT;
5408 /* First get the mode, signedness, and size. We do this from just the
5409 outermost expression. */
5410 if (TREE_CODE (exp) == COMPONENT_REF)
5412 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5413 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5414 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5416 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5418 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5420 size_tree = TREE_OPERAND (exp, 1);
5421 *punsignedp = TREE_UNSIGNED (exp);
5425 mode = TYPE_MODE (TREE_TYPE (exp));
5426 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5428 if (mode == BLKmode)
5429 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5431 *pbitsize = GET_MODE_BITSIZE (mode);
5436 if (! host_integerp (size_tree, 1))
5437 mode = BLKmode, *pbitsize = -1;
5439 *pbitsize = tree_low_cst (size_tree, 1);
5442 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5443 and find the ultimate containing object. */
5446 if (TREE_CODE (exp) == BIT_FIELD_REF)
5447 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5448 else if (TREE_CODE (exp) == COMPONENT_REF)
5450 tree field = TREE_OPERAND (exp, 1);
5451 tree this_offset = DECL_FIELD_OFFSET (field);
5453 /* If this field hasn't been filled in yet, don't go
5454 past it. This should only happen when folding expressions
5455 made during type construction. */
5456 if (this_offset == 0)
5458 else if (! TREE_CONSTANT (this_offset)
5459 && contains_placeholder_p (this_offset))
5460 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5462 offset = size_binop (PLUS_EXPR, offset, this_offset);
5463 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5464 DECL_FIELD_BIT_OFFSET (field));
5466 if (! host_integerp (offset, 0))
5467 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
5470 else if (TREE_CODE (exp) == ARRAY_REF
5471 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5473 tree index = TREE_OPERAND (exp, 1);
5474 tree array = TREE_OPERAND (exp, 0);
5475 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5476 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5477 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5479 /* We assume all arrays have sizes that are a multiple of a byte.
5480 First subtract the lower bound, if any, in the type of the
5481 index, then convert to sizetype and multiply by the size of the
5483 if (low_bound != 0 && ! integer_zerop (low_bound))
5484 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5487 /* If the index has a self-referential type, pass it to a
5488 WITH_RECORD_EXPR; if the component size is, pass our
5489 component to one. */
5490 if (! TREE_CONSTANT (index)
5491 && contains_placeholder_p (index))
5492 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5493 if (! TREE_CONSTANT (unit_size)
5494 && contains_placeholder_p (unit_size))
5495 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5497 offset = size_binop (PLUS_EXPR, offset,
5498 size_binop (MULT_EXPR,
5499 convert (sizetype, index),
5503 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5504 && ! ((TREE_CODE (exp) == NOP_EXPR
5505 || TREE_CODE (exp) == CONVERT_EXPR)
5506 && (TYPE_MODE (TREE_TYPE (exp))
5507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5510 /* If any reference in the chain is volatile, the effect is volatile. */
5511 if (TREE_THIS_VOLATILE (exp))
5514 /* If the offset is non-constant already, then we can't assume any
5515 alignment more than the alignment here. */
5516 if (! TREE_CONSTANT (offset))
5517 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5519 exp = TREE_OPERAND (exp, 0);
5523 alignment = MIN (alignment, DECL_ALIGN (exp));
5524 else if (TREE_TYPE (exp) != 0)
5525 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5527 /* If OFFSET is constant, see if we can return the whole thing as a
5528 constant bit position. Otherwise, split it up. */
5529 if (host_integerp (offset, 0)
5530 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5532 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5533 && host_integerp (tem, 0))
5534 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5536 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5539 *palignment = alignment;
5543 /* Subroutine of expand_exp: compute memory_usage from modifier. */
5545 static enum memory_use_mode
5546 get_memory_usage_from_modifier (modifier)
5547 enum expand_modifier modifier;
5553 return MEMORY_USE_RO;
5555 case EXPAND_MEMORY_USE_WO:
5556 return MEMORY_USE_WO;
5558 case EXPAND_MEMORY_USE_RW:
5559 return MEMORY_USE_RW;
5561 case EXPAND_MEMORY_USE_DONT:
5562 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5563 MEMORY_USE_DONT, because they are modifiers to a call of
5564 expand_expr in the ADDR_EXPR case of expand_expr. */
5565 case EXPAND_CONST_ADDRESS:
5566 case EXPAND_INITIALIZER:
5567 return MEMORY_USE_DONT;
5568 case EXPAND_MEMORY_USE_BAD:
5574 /* Given an rtx VALUE that may contain additions and multiplications, return
5575 an equivalent value that just refers to a register, memory, or constant.
5576 This is done by generating instructions to perform the arithmetic and
5577 returning a pseudo-register containing the value.
5579 The returned value may be a REG, SUBREG, MEM or constant. */
5582 force_operand (value, target)
5586 /* Use a temporary to force order of execution of calls to
5590 /* Use subtarget as the target for operand 0 of a binary operation. */
5591 rtx subtarget = get_subtarget (target);
5593 /* Check for a PIC address load. */
5595 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5596 && XEXP (value, 0) == pic_offset_table_rtx
5597 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5598 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5599 || GET_CODE (XEXP (value, 1)) == CONST))
5602 subtarget = gen_reg_rtx (GET_MODE (value));
5603 emit_move_insn (subtarget, value);
5607 if (GET_CODE (value) == PLUS)
5608 binoptab = add_optab;
5609 else if (GET_CODE (value) == MINUS)
5610 binoptab = sub_optab;
5611 else if (GET_CODE (value) == MULT)
5613 op2 = XEXP (value, 1);
5614 if (!CONSTANT_P (op2)
5615 && !(GET_CODE (op2) == REG && op2 != subtarget))
5617 tmp = force_operand (XEXP (value, 0), subtarget);
5618 return expand_mult (GET_MODE (value), tmp,
5619 force_operand (op2, NULL_RTX),
5625 op2 = XEXP (value, 1);
5626 if (!CONSTANT_P (op2)
5627 && !(GET_CODE (op2) == REG && op2 != subtarget))
5629 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5631 binoptab = add_optab;
5632 op2 = negate_rtx (GET_MODE (value), op2);
5635 /* Check for an addition with OP2 a constant integer and our first
5636 operand a PLUS of a virtual register and something else. In that
5637 case, we want to emit the sum of the virtual register and the
5638 constant first and then add the other value. This allows virtual
5639 register instantiation to simply modify the constant rather than
5640 creating another one around this addition. */
5641 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5642 && GET_CODE (XEXP (value, 0)) == PLUS
5643 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5644 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5645 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5647 rtx temp = expand_binop (GET_MODE (value), binoptab,
5648 XEXP (XEXP (value, 0), 0), op2,
5649 subtarget, 0, OPTAB_LIB_WIDEN);
5650 return expand_binop (GET_MODE (value), binoptab, temp,
5651 force_operand (XEXP (XEXP (value, 0), 1), 0),
5652 target, 0, OPTAB_LIB_WIDEN);
5655 tmp = force_operand (XEXP (value, 0), subtarget);
5656 return expand_binop (GET_MODE (value), binoptab, tmp,
5657 force_operand (op2, NULL_RTX),
5658 target, 0, OPTAB_LIB_WIDEN);
5659 /* We give UNSIGNEDP = 0 to expand_binop
5660 because the only operations we are expanding here are signed ones. */
5665 /* Subroutine of expand_expr: return nonzero iff there is no way that
5666 EXP can reference X, which is being modified. TOP_P is nonzero if this
5667 call is going to be used to determine whether we need a temporary
5668 for EXP, as opposed to a recursive call to this function.
5670 It is always safe for this routine to return zero since it merely
5671 searches for optimization opportunities. */
5674 safe_from_p (x, exp, top_p)
5681 static tree save_expr_list;
5684 /* If EXP has varying size, we MUST use a target since we currently
5685 have no way of allocating temporaries of variable size
5686 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5687 So we assume here that something at a higher level has prevented a
5688 clash. This is somewhat bogus, but the best we can do. Only
5689 do this when X is BLKmode and when we are at the top level. */
5690 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5691 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5692 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5693 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5694 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5696 && GET_MODE (x) == BLKmode)
5697 /* If X is in the outgoing argument area, it is always safe. */
5698 || (GET_CODE (x) == MEM
5699 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5700 || (GET_CODE (XEXP (x, 0)) == PLUS
5701 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5704 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5705 find the underlying pseudo. */
5706 if (GET_CODE (x) == SUBREG)
5709 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5713 /* A SAVE_EXPR might appear many times in the expression passed to the
5714 top-level safe_from_p call, and if it has a complex subexpression,
5715 examining it multiple times could result in a combinatorial explosion.
5716 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5717 with optimization took about 28 minutes to compile -- even though it was
5718 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5719 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5720 we have processed. Note that the only test of top_p was above. */
5729 rtn = safe_from_p (x, exp, 0);
5731 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5732 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5737 /* Now look at our tree code and possibly recurse. */
5738 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5741 exp_rtl = DECL_RTL_SET_P (exp) ? DECL_RTL (exp) : NULL_RTX;
5748 if (TREE_CODE (exp) == TREE_LIST)
5749 return ((TREE_VALUE (exp) == 0
5750 || safe_from_p (x, TREE_VALUE (exp), 0))
5751 && (TREE_CHAIN (exp) == 0
5752 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5753 else if (TREE_CODE (exp) == ERROR_MARK)
5754 return 1; /* An already-visited SAVE_EXPR? */
5759 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5763 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5764 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
5768 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5769 the expression. If it is set, we conflict iff we are that rtx or
5770 both are in memory. Otherwise, we check all operands of the
5771 expression recursively. */
5773 switch (TREE_CODE (exp))
5776 return (staticp (TREE_OPERAND (exp, 0))
5777 || TREE_STATIC (exp)
5778 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
5781 if (GET_CODE (x) == MEM
5782 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5783 get_alias_set (exp)))
5788 /* Assume that the call will clobber all hard registers and
5790 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5791 || GET_CODE (x) == MEM)
5796 /* If a sequence exists, we would have to scan every instruction
5797 in the sequence to see if it was safe. This is probably not
5799 if (RTL_EXPR_SEQUENCE (exp))
5802 exp_rtl = RTL_EXPR_RTL (exp);
5805 case WITH_CLEANUP_EXPR:
5806 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5809 case CLEANUP_POINT_EXPR:
5810 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5813 exp_rtl = SAVE_EXPR_RTL (exp);
5817 /* If we've already scanned this, don't do it again. Otherwise,
5818 show we've scanned it and record for clearing the flag if we're
5820 if (TREE_PRIVATE (exp))
5823 TREE_PRIVATE (exp) = 1;
5824 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5826 TREE_PRIVATE (exp) = 0;
5830 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5834 /* The only operand we look at is operand 1. The rest aren't
5835 part of the expression. */
5836 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5838 case METHOD_CALL_EXPR:
5839 /* This takes an rtx argument, but shouldn't appear here. */
5846 /* If we have an rtx, we do not need to scan our operands. */
5850 nops = first_rtl_op (TREE_CODE (exp));
5851 for (i = 0; i < nops; i++)
5852 if (TREE_OPERAND (exp, i) != 0
5853 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5856 /* If this is a language-specific tree code, it may require
5857 special handling. */
5858 if ((unsigned int) TREE_CODE (exp)
5859 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5861 && !(*lang_safe_from_p) (x, exp))
5865 /* If we have an rtl, find any enclosed object. Then see if we conflict
5869 if (GET_CODE (exp_rtl) == SUBREG)
5871 exp_rtl = SUBREG_REG (exp_rtl);
5872 if (GET_CODE (exp_rtl) == REG
5873 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5877 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5878 are memory and they conflict. */
5879 return ! (rtx_equal_p (x, exp_rtl)
5880 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5881 && true_dependence (exp_rtl, GET_MODE (x), x,
5882 rtx_addr_varies_p)));
5885 /* If we reach here, it is safe. */
5889 /* Subroutine of expand_expr: return rtx if EXP is a
5890 variable or parameter; else return 0. */
5897 switch (TREE_CODE (exp))
5901 return DECL_RTL (exp);
5907 #ifdef MAX_INTEGER_COMPUTATION_MODE
5910 check_max_integer_computation_mode (exp)
5913 enum tree_code code;
5914 enum machine_mode mode;
5916 /* Strip any NOPs that don't change the mode. */
5918 code = TREE_CODE (exp);
5920 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5921 if (code == NOP_EXPR
5922 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5925 /* First check the type of the overall operation. We need only look at
5926 unary, binary and relational operations. */
5927 if (TREE_CODE_CLASS (code) == '1'
5928 || TREE_CODE_CLASS (code) == '2'
5929 || TREE_CODE_CLASS (code) == '<')
5931 mode = TYPE_MODE (TREE_TYPE (exp));
5932 if (GET_MODE_CLASS (mode) == MODE_INT
5933 && mode > MAX_INTEGER_COMPUTATION_MODE)
5934 internal_error ("unsupported wide integer operation");
5937 /* Check operand of a unary op. */
5938 if (TREE_CODE_CLASS (code) == '1')
5940 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5941 if (GET_MODE_CLASS (mode) == MODE_INT
5942 && mode > MAX_INTEGER_COMPUTATION_MODE)
5943 internal_error ("unsupported wide integer operation");
5946 /* Check operands of a binary/comparison op. */
5947 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5949 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5950 if (GET_MODE_CLASS (mode) == MODE_INT
5951 && mode > MAX_INTEGER_COMPUTATION_MODE)
5952 internal_error ("unsupported wide integer operation");
5954 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5955 if (GET_MODE_CLASS (mode) == MODE_INT
5956 && mode > MAX_INTEGER_COMPUTATION_MODE)
5957 internal_error ("unsupported wide integer operation");
5962 /* Return an object on the placeholder list that matches EXP, a
5963 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
5964 PLACEHOLDER_EXPR or a pointer type to it. For further information,
5965 see tree.def. If no such object is found, abort. If PLIST is nonzero,
5966 it is a location into which a pointer into the placeholder list at
5967 which the object is found is placed. */
5970 find_placeholder (exp, plist)
5974 tree type = TREE_TYPE (exp);
5975 tree placeholder_expr;
5977 for (placeholder_expr = placeholder_list; placeholder_expr != 0;
5978 placeholder_expr = TREE_CHAIN (placeholder_expr))
5980 tree need_type = TYPE_MAIN_VARIANT (type);
5983 /* Find the outermost reference that is of the type we want. If none,
5984 see if any object has a type that is a pointer to the type we
5986 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
5987 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5988 || TREE_CODE (elt) == COND_EXPR)
5989 ? TREE_OPERAND (elt, 1)
5990 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5991 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5992 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5993 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5994 ? TREE_OPERAND (elt, 0) : 0))
5995 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5998 *plist = placeholder_expr;
6002 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6004 = ((TREE_CODE (elt) == COMPOUND_EXPR
6005 || TREE_CODE (elt) == COND_EXPR)
6006 ? TREE_OPERAND (elt, 1)
6007 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6008 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6009 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6010 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6011 ? TREE_OPERAND (elt, 0) : 0))
6012 if (POINTER_TYPE_P (TREE_TYPE (elt))
6013 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6017 *plist = placeholder_expr;
6018 return build1 (INDIRECT_REF, need_type, elt);
6025 /* expand_expr: generate code for computing expression EXP.
6026 An rtx for the computed value is returned. The value is never null.
6027 In the case of a void EXP, const0_rtx is returned.
6029 The value may be stored in TARGET if TARGET is nonzero.
6030 TARGET is just a suggestion; callers must assume that
6031 the rtx returned may not be the same as TARGET.
6033 If TARGET is CONST0_RTX, it means that the value will be ignored.
6035 If TMODE is not VOIDmode, it suggests generating the
6036 result in mode TMODE. But this is done only when convenient.
6037 Otherwise, TMODE is ignored and the value generated in its natural mode.
6038 TMODE is just a suggestion; callers must assume that
6039 the rtx returned may not have mode TMODE.
6041 Note that TARGET may have neither TMODE nor MODE. In that case, it
6042 probably will not be used.
6044 If MODIFIER is EXPAND_SUM then when EXP is an addition
6045 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6046 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6047 products as above, or REG or MEM, or constant.
6048 Ordinarily in such cases we would output mul or add instructions
6049 and then return a pseudo reg containing the sum.
6051 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6052 it also marks a label as absolutely required (it can't be dead).
6053 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6054 This is used for outputting expressions used in initializers.
6056 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6057 with a constant address even if that address is not normally legitimate.
6058 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6061 expand_expr (exp, target, tmode, modifier)
6064 enum machine_mode tmode;
6065 enum expand_modifier modifier;
6068 tree type = TREE_TYPE (exp);
6069 int unsignedp = TREE_UNSIGNED (type);
6070 enum machine_mode mode;
6071 enum tree_code code = TREE_CODE (exp);
6073 rtx subtarget, original_target;
6076 /* Used by check-memory-usage to make modifier read only. */
6077 enum expand_modifier ro_modifier;
6079 /* Handle ERROR_MARK before anybody tries to access its type. */
6080 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6082 op0 = CONST0_RTX (tmode);
6088 mode = TYPE_MODE (type);
6089 /* Use subtarget as the target for operand 0 of a binary operation. */
6090 subtarget = get_subtarget (target);
6091 original_target = target;
6092 ignore = (target == const0_rtx
6093 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6094 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6095 || code == COND_EXPR)
6096 && TREE_CODE (type) == VOID_TYPE));
6098 /* Make a read-only version of the modifier. */
6099 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
6100 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
6101 ro_modifier = modifier;
6103 ro_modifier = EXPAND_NORMAL;
6105 /* If we are going to ignore this result, we need only do something
6106 if there is a side-effect somewhere in the expression. If there
6107 is, short-circuit the most common cases here. Note that we must
6108 not call expand_expr with anything but const0_rtx in case this
6109 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6113 if (! TREE_SIDE_EFFECTS (exp))
6116 /* Ensure we reference a volatile object even if value is ignored, but
6117 don't do this if all we are doing is taking its address. */
6118 if (TREE_THIS_VOLATILE (exp)
6119 && TREE_CODE (exp) != FUNCTION_DECL
6120 && mode != VOIDmode && mode != BLKmode
6121 && modifier != EXPAND_CONST_ADDRESS)
6123 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
6124 if (GET_CODE (temp) == MEM)
6125 temp = copy_to_reg (temp);
6129 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6130 || code == INDIRECT_REF || code == BUFFER_REF)
6131 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6132 VOIDmode, ro_modifier);
6133 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6134 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6136 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6138 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6142 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6143 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6144 /* If the second operand has no side effects, just evaluate
6146 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
6147 VOIDmode, ro_modifier);
6148 else if (code == BIT_FIELD_REF)
6150 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6152 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode,
6154 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode,
6162 #ifdef MAX_INTEGER_COMPUTATION_MODE
6163 /* Only check stuff here if the mode we want is different from the mode
6164 of the expression; if it's the same, check_max_integer_computiation_mode
6165 will handle it. Do we really need to check this stuff at all? */
6168 && GET_MODE (target) != mode
6169 && TREE_CODE (exp) != INTEGER_CST
6170 && TREE_CODE (exp) != PARM_DECL
6171 && TREE_CODE (exp) != ARRAY_REF
6172 && TREE_CODE (exp) != ARRAY_RANGE_REF
6173 && TREE_CODE (exp) != COMPONENT_REF
6174 && TREE_CODE (exp) != BIT_FIELD_REF
6175 && TREE_CODE (exp) != INDIRECT_REF
6176 && TREE_CODE (exp) != CALL_EXPR
6177 && TREE_CODE (exp) != VAR_DECL
6178 && TREE_CODE (exp) != RTL_EXPR)
6180 enum machine_mode mode = GET_MODE (target);
6182 if (GET_MODE_CLASS (mode) == MODE_INT
6183 && mode > MAX_INTEGER_COMPUTATION_MODE)
6184 internal_error ("unsupported wide integer operation");
6188 && TREE_CODE (exp) != INTEGER_CST
6189 && TREE_CODE (exp) != PARM_DECL
6190 && TREE_CODE (exp) != ARRAY_REF
6191 && TREE_CODE (exp) != ARRAY_RANGE_REF
6192 && TREE_CODE (exp) != COMPONENT_REF
6193 && TREE_CODE (exp) != BIT_FIELD_REF
6194 && TREE_CODE (exp) != INDIRECT_REF
6195 && TREE_CODE (exp) != VAR_DECL
6196 && TREE_CODE (exp) != CALL_EXPR
6197 && TREE_CODE (exp) != RTL_EXPR
6198 && GET_MODE_CLASS (tmode) == MODE_INT
6199 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6200 internal_error ("unsupported wide integer operation");
6202 check_max_integer_computation_mode (exp);
6205 /* If will do cse, generate all results into pseudo registers
6206 since 1) that allows cse to find more things
6207 and 2) otherwise cse could produce an insn the machine
6210 if (! cse_not_expected && mode != BLKmode && target
6211 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6218 tree function = decl_function_context (exp);
6219 /* Handle using a label in a containing function. */
6220 if (function != current_function_decl
6221 && function != inline_function_decl && function != 0)
6223 struct function *p = find_function_data (function);
6224 p->expr->x_forced_labels
6225 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6226 p->expr->x_forced_labels);
6230 if (modifier == EXPAND_INITIALIZER)
6231 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6236 temp = gen_rtx_MEM (FUNCTION_MODE,
6237 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6238 if (function != current_function_decl
6239 && function != inline_function_decl && function != 0)
6240 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6245 if (DECL_RTL (exp) == 0)
6247 error_with_decl (exp, "prior parameter's size depends on `%s'");
6248 return CONST0_RTX (mode);
6251 /* ... fall through ... */
6254 /* If a static var's type was incomplete when the decl was written,
6255 but the type is complete now, lay out the decl now. */
6256 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6257 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6259 layout_decl (exp, 0);
6260 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
6263 /* Although static-storage variables start off initialized, according to
6264 ANSI C, a memcpy could overwrite them with uninitialized values. So
6265 we check them too. This also lets us check for read-only variables
6266 accessed via a non-const declaration, in case it won't be detected
6267 any other way (e.g., in an embedded system or OS kernel without
6270 Aggregates are not checked here; they're handled elsewhere. */
6271 if (cfun && current_function_check_memory_usage
6273 && GET_CODE (DECL_RTL (exp)) == MEM
6274 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6276 enum memory_use_mode memory_usage;
6277 memory_usage = get_memory_usage_from_modifier (modifier);
6279 in_check_memory_usage = 1;
6280 if (memory_usage != MEMORY_USE_DONT)
6281 emit_library_call (chkr_check_addr_libfunc,
6282 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6283 XEXP (DECL_RTL (exp), 0), Pmode,
6284 GEN_INT (int_size_in_bytes (type)),
6285 TYPE_MODE (sizetype),
6286 GEN_INT (memory_usage),
6287 TYPE_MODE (integer_type_node));
6288 in_check_memory_usage = 0;
6291 /* ... fall through ... */
6295 if (DECL_RTL (exp) == 0)
6298 /* Ensure variable marked as used even if it doesn't go through
6299 a parser. If it hasn't be used yet, write out an external
6301 if (! TREE_USED (exp))
6303 assemble_external (exp);
6304 TREE_USED (exp) = 1;
6307 /* Show we haven't gotten RTL for this yet. */
6310 /* Handle variables inherited from containing functions. */
6311 context = decl_function_context (exp);
6313 /* We treat inline_function_decl as an alias for the current function
6314 because that is the inline function whose vars, types, etc.
6315 are being merged into the current function.
6316 See expand_inline_function. */
6318 if (context != 0 && context != current_function_decl
6319 && context != inline_function_decl
6320 /* If var is static, we don't need a static chain to access it. */
6321 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6322 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6326 /* Mark as non-local and addressable. */
6327 DECL_NONLOCAL (exp) = 1;
6328 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6330 mark_addressable (exp);
6331 if (GET_CODE (DECL_RTL (exp)) != MEM)
6333 addr = XEXP (DECL_RTL (exp), 0);
6334 if (GET_CODE (addr) == MEM)
6336 = replace_equiv_address (addr,
6337 fix_lexical_addr (XEXP (addr, 0), exp));
6339 addr = fix_lexical_addr (addr, exp);
6341 temp = replace_equiv_address (DECL_RTL (exp), addr);
6344 /* This is the case of an array whose size is to be determined
6345 from its initializer, while the initializer is still being parsed.
6348 else if (GET_CODE (DECL_RTL (exp)) == MEM
6349 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6350 temp = validize_mem (DECL_RTL (exp));
6352 /* If DECL_RTL is memory, we are in the normal case and either
6353 the address is not valid or it is not a register and -fforce-addr
6354 is specified, get the address into a register. */
6356 else if (GET_CODE (DECL_RTL (exp)) == MEM
6357 && modifier != EXPAND_CONST_ADDRESS
6358 && modifier != EXPAND_SUM
6359 && modifier != EXPAND_INITIALIZER
6360 && (! memory_address_p (DECL_MODE (exp),
6361 XEXP (DECL_RTL (exp), 0))
6363 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6364 temp = replace_equiv_address (DECL_RTL (exp),
6365 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6367 /* If we got something, return it. But first, set the alignment
6368 if the address is a register. */
6371 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6372 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6377 /* If the mode of DECL_RTL does not match that of the decl, it
6378 must be a promoted value. We return a SUBREG of the wanted mode,
6379 but mark it so that we know that it was already extended. */
6381 if (GET_CODE (DECL_RTL (exp)) == REG
6382 && GET_MODE (DECL_RTL (exp)) != mode)
6384 /* Get the signedness used for this variable. Ensure we get the
6385 same mode we got when the variable was declared. */
6386 if (GET_MODE (DECL_RTL (exp))
6387 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
6390 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6391 SUBREG_PROMOTED_VAR_P (temp) = 1;
6392 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6396 return DECL_RTL (exp);
6399 return immed_double_const (TREE_INT_CST_LOW (exp),
6400 TREE_INT_CST_HIGH (exp), mode);
6403 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6404 EXPAND_MEMORY_USE_BAD);
6407 /* If optimized, generate immediate CONST_DOUBLE
6408 which will be turned into memory by reload if necessary.
6410 We used to force a register so that loop.c could see it. But
6411 this does not allow gen_* patterns to perform optimizations with
6412 the constants. It also produces two insns in cases like "x = 1.0;".
6413 On most machines, floating-point constants are not permitted in
6414 many insns, so we'd end up copying it to a register in any case.
6416 Now, we do the copying in expand_binop, if appropriate. */
6417 return immed_real_const (exp);
6421 if (! TREE_CST_RTL (exp))
6422 output_constant_def (exp, 1);
6424 /* TREE_CST_RTL probably contains a constant address.
6425 On RISC machines where a constant address isn't valid,
6426 make some insns to get that address into a register. */
6427 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6428 && modifier != EXPAND_CONST_ADDRESS
6429 && modifier != EXPAND_INITIALIZER
6430 && modifier != EXPAND_SUM
6431 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6433 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6434 return replace_equiv_address (TREE_CST_RTL (exp),
6435 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6436 return TREE_CST_RTL (exp);
6438 case EXPR_WITH_FILE_LOCATION:
6441 const char *saved_input_filename = input_filename;
6442 int saved_lineno = lineno;
6443 input_filename = EXPR_WFL_FILENAME (exp);
6444 lineno = EXPR_WFL_LINENO (exp);
6445 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6446 emit_line_note (input_filename, lineno);
6447 /* Possibly avoid switching back and forth here. */
6448 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6449 input_filename = saved_input_filename;
6450 lineno = saved_lineno;
6455 context = decl_function_context (exp);
6457 /* If this SAVE_EXPR was at global context, assume we are an
6458 initialization function and move it into our context. */
6460 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6462 /* We treat inline_function_decl as an alias for the current function
6463 because that is the inline function whose vars, types, etc.
6464 are being merged into the current function.
6465 See expand_inline_function. */
6466 if (context == current_function_decl || context == inline_function_decl)
6469 /* If this is non-local, handle it. */
6472 /* The following call just exists to abort if the context is
6473 not of a containing function. */
6474 find_function_data (context);
6476 temp = SAVE_EXPR_RTL (exp);
6477 if (temp && GET_CODE (temp) == REG)
6479 put_var_into_stack (exp);
6480 temp = SAVE_EXPR_RTL (exp);
6482 if (temp == 0 || GET_CODE (temp) != MEM)
6485 replace_equiv_address (temp,
6486 fix_lexical_addr (XEXP (temp, 0), exp));
6488 if (SAVE_EXPR_RTL (exp) == 0)
6490 if (mode == VOIDmode)
6493 temp = assign_temp (build_qualified_type (type,
6495 | TYPE_QUAL_CONST)),
6498 SAVE_EXPR_RTL (exp) = temp;
6499 if (!optimize && GET_CODE (temp) == REG)
6500 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6503 /* If the mode of TEMP does not match that of the expression, it
6504 must be a promoted value. We pass store_expr a SUBREG of the
6505 wanted mode but mark it so that we know that it was already
6506 extended. Note that `unsignedp' was modified above in
6509 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6511 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6512 SUBREG_PROMOTED_VAR_P (temp) = 1;
6513 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6516 if (temp == const0_rtx)
6517 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6518 EXPAND_MEMORY_USE_BAD);
6520 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6522 TREE_USED (exp) = 1;
6525 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6526 must be a promoted value. We return a SUBREG of the wanted mode,
6527 but mark it so that we know that it was already extended. */
6529 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6530 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6532 /* Compute the signedness and make the proper SUBREG. */
6533 promote_mode (type, mode, &unsignedp, 0);
6534 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6535 SUBREG_PROMOTED_VAR_P (temp) = 1;
6536 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6540 return SAVE_EXPR_RTL (exp);
6545 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6546 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6550 case PLACEHOLDER_EXPR:
6552 tree old_list = placeholder_list;
6553 tree placeholder_expr;
6555 exp = find_placeholder (exp, &placeholder_expr);
6556 placeholder_list = TREE_CHAIN (placeholder_expr);
6557 temp = expand_expr (exp, original_target, tmode, ro_modifier);
6558 placeholder_list = old_list;
6562 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6565 case WITH_RECORD_EXPR:
6566 /* Put the object on the placeholder list, expand our first operand,
6567 and pop the list. */
6568 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6570 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
6571 tmode, ro_modifier);
6572 placeholder_list = TREE_CHAIN (placeholder_list);
6576 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6577 expand_goto (TREE_OPERAND (exp, 0));
6579 expand_computed_goto (TREE_OPERAND (exp, 0));
6583 expand_exit_loop_if_false (NULL,
6584 invert_truthvalue (TREE_OPERAND (exp, 0)));
6587 case LABELED_BLOCK_EXPR:
6588 if (LABELED_BLOCK_BODY (exp))
6589 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6590 /* Should perhaps use expand_label, but this is simpler and safer. */
6591 do_pending_stack_adjust ();
6592 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6595 case EXIT_BLOCK_EXPR:
6596 if (EXIT_BLOCK_RETURN (exp))
6597 sorry ("returned value in block_exit_expr");
6598 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6603 expand_start_loop (1);
6604 expand_expr_stmt (TREE_OPERAND (exp, 0));
6612 tree vars = TREE_OPERAND (exp, 0);
6613 int vars_need_expansion = 0;
6615 /* Need to open a binding contour here because
6616 if there are any cleanups they must be contained here. */
6617 expand_start_bindings (2);
6619 /* Mark the corresponding BLOCK for output in its proper place. */
6620 if (TREE_OPERAND (exp, 2) != 0
6621 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6622 insert_block (TREE_OPERAND (exp, 2));
6624 /* If VARS have not yet been expanded, expand them now. */
6627 if (!DECL_RTL_SET_P (vars))
6629 vars_need_expansion = 1;
6632 expand_decl_init (vars);
6633 vars = TREE_CHAIN (vars);
6636 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
6638 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6644 if (RTL_EXPR_SEQUENCE (exp))
6646 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6648 emit_insns (RTL_EXPR_SEQUENCE (exp));
6649 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6651 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6652 free_temps_for_rtl_expr (exp);
6653 return RTL_EXPR_RTL (exp);
6656 /* If we don't need the result, just ensure we evaluate any
6661 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6662 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6663 EXPAND_MEMORY_USE_BAD);
6667 /* All elts simple constants => refer to a constant in memory. But
6668 if this is a non-BLKmode mode, let it store a field at a time
6669 since that should make a CONST_INT or CONST_DOUBLE when we
6670 fold. Likewise, if we have a target we can use, it is best to
6671 store directly into the target unless the type is large enough
6672 that memcpy will be used. If we are making an initializer and
6673 all operands are constant, put it in memory as well. */
6674 else if ((TREE_STATIC (exp)
6675 && ((mode == BLKmode
6676 && ! (target != 0 && safe_from_p (target, exp, 1)))
6677 || TREE_ADDRESSABLE (exp)
6678 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6679 && (! MOVE_BY_PIECES_P
6680 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6682 && ! mostly_zeros_p (exp))))
6683 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
6685 rtx constructor = output_constant_def (exp, 1);
6687 if (modifier != EXPAND_CONST_ADDRESS
6688 && modifier != EXPAND_INITIALIZER
6689 && modifier != EXPAND_SUM)
6690 constructor = validize_mem (constructor);
6696 /* Handle calls that pass values in multiple non-contiguous
6697 locations. The Irix 6 ABI has examples of this. */
6698 if (target == 0 || ! safe_from_p (target, exp, 1)
6699 || GET_CODE (target) == PARALLEL)
6701 = assign_temp (build_qualified_type (type,
6703 | (TREE_READONLY (exp)
6704 * TYPE_QUAL_CONST))),
6705 TREE_ADDRESSABLE (exp), 1, 1);
6707 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6708 int_size_in_bytes (TREE_TYPE (exp)));
6714 tree exp1 = TREE_OPERAND (exp, 0);
6716 tree string = string_constant (exp1, &index);
6718 /* Try to optimize reads from const strings. */
6720 && TREE_CODE (string) == STRING_CST
6721 && TREE_CODE (index) == INTEGER_CST
6722 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6723 && GET_MODE_CLASS (mode) == MODE_INT
6724 && GET_MODE_SIZE (mode) == 1
6725 && modifier != EXPAND_MEMORY_USE_WO)
6727 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
6729 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6730 op0 = memory_address (mode, op0);
6732 if (cfun && current_function_check_memory_usage
6733 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6735 enum memory_use_mode memory_usage;
6736 memory_usage = get_memory_usage_from_modifier (modifier);
6738 if (memory_usage != MEMORY_USE_DONT)
6740 in_check_memory_usage = 1;
6741 emit_library_call (chkr_check_addr_libfunc,
6742 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6743 Pmode, GEN_INT (int_size_in_bytes (type)),
6744 TYPE_MODE (sizetype),
6745 GEN_INT (memory_usage),
6746 TYPE_MODE (integer_type_node));
6747 in_check_memory_usage = 0;
6751 temp = gen_rtx_MEM (mode, op0);
6752 set_mem_attributes (temp, exp, 0);
6754 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6755 here, because, in C and C++, the fact that a location is accessed
6756 through a pointer to const does not mean that the value there can
6757 never change. Languages where it can never change should
6758 also set TREE_STATIC. */
6759 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
6761 /* If we are writing to this object and its type is a record with
6762 readonly fields, we must mark it as readonly so it will
6763 conflict with readonly references to those fields. */
6764 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
6765 RTX_UNCHANGING_P (temp) = 1;
6771 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6775 tree array = TREE_OPERAND (exp, 0);
6776 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6777 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6778 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6781 /* Optimize the special-case of a zero lower bound.
6783 We convert the low_bound to sizetype to avoid some problems
6784 with constant folding. (E.g. suppose the lower bound is 1,
6785 and its mode is QI. Without the conversion, (ARRAY
6786 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6787 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6789 if (! integer_zerop (low_bound))
6790 index = size_diffop (index, convert (sizetype, low_bound));
6792 /* Fold an expression like: "foo"[2].
6793 This is not done in fold so it won't happen inside &.
6794 Don't fold if this is for wide characters since it's too
6795 difficult to do correctly and this is a very rare case. */
6797 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6798 && TREE_CODE (array) == STRING_CST
6799 && TREE_CODE (index) == INTEGER_CST
6800 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6801 && GET_MODE_CLASS (mode) == MODE_INT
6802 && GET_MODE_SIZE (mode) == 1)
6804 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
6806 /* If this is a constant index into a constant array,
6807 just get the value from the array. Handle both the cases when
6808 we have an explicit constructor and when our operand is a variable
6809 that was declared const. */
6811 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
6812 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6813 && TREE_CODE (index) == INTEGER_CST
6814 && 0 > compare_tree_int (index,
6815 list_length (CONSTRUCTOR_ELTS
6816 (TREE_OPERAND (exp, 0)))))
6820 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6821 i = TREE_INT_CST_LOW (index);
6822 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6826 return expand_expr (fold (TREE_VALUE (elem)), target,
6827 tmode, ro_modifier);
6830 else if (optimize >= 1
6831 && modifier != EXPAND_CONST_ADDRESS
6832 && modifier != EXPAND_INITIALIZER
6833 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6834 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6835 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6837 if (TREE_CODE (index) == INTEGER_CST)
6839 tree init = DECL_INITIAL (array);
6841 if (TREE_CODE (init) == CONSTRUCTOR)
6845 for (elem = CONSTRUCTOR_ELTS (init);
6847 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6848 elem = TREE_CHAIN (elem))
6851 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6852 return expand_expr (fold (TREE_VALUE (elem)), target,
6853 tmode, ro_modifier);
6855 else if (TREE_CODE (init) == STRING_CST
6856 && 0 > compare_tree_int (index,
6857 TREE_STRING_LENGTH (init)))
6859 tree type = TREE_TYPE (TREE_TYPE (init));
6860 enum machine_mode mode = TYPE_MODE (type);
6862 if (GET_MODE_CLASS (mode) == MODE_INT
6863 && GET_MODE_SIZE (mode) == 1)
6865 (TREE_STRING_POINTER
6866 (init)[TREE_INT_CST_LOW (index)]));
6875 case ARRAY_RANGE_REF:
6876 /* If the operand is a CONSTRUCTOR, we can just extract the
6877 appropriate field if it is present. Don't do this if we have
6878 already written the data since we want to refer to that copy
6879 and varasm.c assumes that's what we'll do. */
6880 if (code == COMPONENT_REF
6881 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6882 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
6886 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6887 elt = TREE_CHAIN (elt))
6888 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6889 /* We can normally use the value of the field in the
6890 CONSTRUCTOR. However, if this is a bitfield in
6891 an integral mode that we can fit in a HOST_WIDE_INT,
6892 we must mask only the number of bits in the bitfield,
6893 since this is done implicitly by the constructor. If
6894 the bitfield does not meet either of those conditions,
6895 we can't do this optimization. */
6896 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6897 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6899 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6900 <= HOST_BITS_PER_WIDE_INT))))
6902 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6903 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6905 HOST_WIDE_INT bitsize
6906 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6908 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6910 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6911 op0 = expand_and (op0, op1, target);
6915 enum machine_mode imode
6916 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6918 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6921 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6923 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6933 enum machine_mode mode1;
6934 HOST_WIDE_INT bitsize, bitpos;
6937 unsigned int alignment;
6938 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6939 &mode1, &unsignedp, &volatilep,
6943 /* If we got back the original object, something is wrong. Perhaps
6944 we are evaluating an expression too early. In any event, don't
6945 infinitely recurse. */
6949 /* If TEM's type is a union of variable size, pass TARGET to the inner
6950 computation, since it will need a temporary and TARGET is known
6951 to have to do. This occurs in unchecked conversion in Ada. */
6955 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6956 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6958 ? target : NULL_RTX),
6960 (modifier == EXPAND_INITIALIZER
6961 || modifier == EXPAND_CONST_ADDRESS)
6962 ? modifier : EXPAND_NORMAL);
6964 /* If this is a constant, put it into a register if it is a
6965 legitimate constant and OFFSET is 0 and memory if it isn't. */
6966 if (CONSTANT_P (op0))
6968 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6969 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6971 op0 = force_reg (mode, op0);
6973 op0 = validize_mem (force_const_mem (mode, op0));
6978 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
6980 /* If this object is in a register, put it into memory.
6981 This case can't occur in C, but can in Ada if we have
6982 unchecked conversion of an expression from a scalar type to
6983 an array or record type. */
6984 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6985 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6987 /* If the operand is a SAVE_EXPR, we can deal with this by
6988 forcing the SAVE_EXPR into memory. */
6989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
6991 put_var_into_stack (TREE_OPERAND (exp, 0));
6992 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
6997 = build_qualified_type (TREE_TYPE (tem),
6998 (TYPE_QUALS (TREE_TYPE (tem))
6999 | TYPE_QUAL_CONST));
7000 rtx memloc = assign_temp (nt, 1, 1, 1);
7002 mark_temp_addr_taken (memloc);
7003 emit_move_insn (memloc, op0);
7008 if (GET_CODE (op0) != MEM)
7011 if (GET_MODE (offset_rtx) != ptr_mode)
7013 #ifdef POINTERS_EXTEND_UNSIGNED
7014 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
7016 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7020 /* A constant address in OP0 can have VOIDmode, we must not try
7021 to call force_reg for that case. Avoid that case. */
7022 if (GET_CODE (op0) == MEM
7023 && GET_MODE (op0) == BLKmode
7024 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7026 && (bitpos % bitsize) == 0
7027 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7028 && alignment == GET_MODE_ALIGNMENT (mode1))
7030 rtx temp = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7032 if (GET_CODE (XEXP (temp, 0)) == REG)
7035 op0 = (replace_equiv_address
7037 force_reg (GET_MODE (XEXP (temp, 0)),
7042 op0 = change_address (op0, VOIDmode,
7043 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
7044 force_reg (ptr_mode,
7048 /* Don't forget about volatility even if this is a bitfield. */
7049 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7051 if (op0 == orig_op0)
7052 op0 = copy_rtx (op0);
7054 MEM_VOLATILE_P (op0) = 1;
7057 /* Check the access. */
7058 if (cfun != 0 && current_function_check_memory_usage
7059 && GET_CODE (op0) == MEM)
7061 enum memory_use_mode memory_usage;
7062 memory_usage = get_memory_usage_from_modifier (modifier);
7064 if (memory_usage != MEMORY_USE_DONT)
7069 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
7070 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
7072 /* Check the access right of the pointer. */
7073 in_check_memory_usage = 1;
7074 if (size > BITS_PER_UNIT)
7075 emit_library_call (chkr_check_addr_libfunc,
7076 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
7077 Pmode, GEN_INT (size / BITS_PER_UNIT),
7078 TYPE_MODE (sizetype),
7079 GEN_INT (memory_usage),
7080 TYPE_MODE (integer_type_node));
7081 in_check_memory_usage = 0;
7085 /* In cases where an aligned union has an unaligned object
7086 as a field, we might be extracting a BLKmode value from
7087 an integer-mode (e.g., SImode) object. Handle this case
7088 by doing the extract into an object as wide as the field
7089 (which we know to be the width of a basic mode), then
7090 storing into memory, and changing the mode to BLKmode. */
7091 if (mode1 == VOIDmode
7092 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7093 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7094 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7095 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7096 && modifier != EXPAND_CONST_ADDRESS
7097 && modifier != EXPAND_INITIALIZER)
7098 /* If the field isn't aligned enough to fetch as a memref,
7099 fetch it as a bit field. */
7100 || (mode1 != BLKmode
7101 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
7102 && ((TYPE_ALIGN (TREE_TYPE (tem))
7103 < GET_MODE_ALIGNMENT (mode))
7104 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7105 /* If the type and the field are a constant size and the
7106 size of the type isn't the same size as the bitfield,
7107 we must use bitfield operations. */
7109 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7111 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7114 && SLOW_UNALIGNED_ACCESS (mode, alignment)
7115 && (TYPE_ALIGN (type) > alignment
7116 || bitpos % TYPE_ALIGN (type) != 0)))
7118 enum machine_mode ext_mode = mode;
7120 if (ext_mode == BLKmode
7121 && ! (target != 0 && GET_CODE (op0) == MEM
7122 && GET_CODE (target) == MEM
7123 && bitpos % BITS_PER_UNIT == 0))
7124 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7126 if (ext_mode == BLKmode)
7128 /* In this case, BITPOS must start at a byte boundary and
7129 TARGET, if specified, must be a MEM. */
7130 if (GET_CODE (op0) != MEM
7131 || (target != 0 && GET_CODE (target) != MEM)
7132 || bitpos % BITS_PER_UNIT != 0)
7135 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7137 target = assign_temp (type, 0, 1, 1);
7139 emit_block_move (target, op0,
7140 bitsize == -1 ? expr_size (exp)
7141 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7148 op0 = validize_mem (op0);
7150 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7151 mark_reg_pointer (XEXP (op0, 0), alignment);
7153 op0 = extract_bit_field (op0, bitsize, bitpos,
7154 unsignedp, target, ext_mode, ext_mode,
7156 int_size_in_bytes (TREE_TYPE (tem)));
7158 /* If the result is a record type and BITSIZE is narrower than
7159 the mode of OP0, an integral mode, and this is a big endian
7160 machine, we must put the field into the high-order bits. */
7161 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7162 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7163 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7164 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7165 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7169 if (mode == BLKmode)
7171 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
7173 rtx new = assign_temp (nt, 0, 1, 1);
7175 emit_move_insn (new, op0);
7176 op0 = copy_rtx (new);
7177 PUT_MODE (op0, BLKmode);
7183 /* If the result is BLKmode, use that to access the object
7185 if (mode == BLKmode)
7188 /* Get a reference to just this component. */
7189 if (modifier == EXPAND_CONST_ADDRESS
7190 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7191 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7193 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7195 if (op0 == orig_op0)
7196 op0 = copy_rtx (op0);
7198 set_mem_attributes (op0, exp, 0);
7199 if (GET_CODE (XEXP (op0, 0)) == REG)
7200 mark_reg_pointer (XEXP (op0, 0), alignment);
7202 MEM_VOLATILE_P (op0) |= volatilep;
7203 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7204 || modifier == EXPAND_CONST_ADDRESS
7205 || modifier == EXPAND_INITIALIZER)
7207 else if (target == 0)
7208 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7210 convert_move (target, op0, unsignedp);
7214 /* Intended for a reference to a buffer of a file-object in Pascal.
7215 But it's not certain that a special tree code will really be
7216 necessary for these. INDIRECT_REF might work for them. */
7222 /* Pascal set IN expression.
7225 rlo = set_low - (set_low%bits_per_word);
7226 the_word = set [ (index - rlo)/bits_per_word ];
7227 bit_index = index % bits_per_word;
7228 bitmask = 1 << bit_index;
7229 return !!(the_word & bitmask); */
7231 tree set = TREE_OPERAND (exp, 0);
7232 tree index = TREE_OPERAND (exp, 1);
7233 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7234 tree set_type = TREE_TYPE (set);
7235 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7236 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7237 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7238 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7239 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7240 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7241 rtx setaddr = XEXP (setval, 0);
7242 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7244 rtx diff, quo, rem, addr, bit, result;
7246 /* If domain is empty, answer is no. Likewise if index is constant
7247 and out of bounds. */
7248 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7249 && TREE_CODE (set_low_bound) == INTEGER_CST
7250 && tree_int_cst_lt (set_high_bound, set_low_bound))
7251 || (TREE_CODE (index) == INTEGER_CST
7252 && TREE_CODE (set_low_bound) == INTEGER_CST
7253 && tree_int_cst_lt (index, set_low_bound))
7254 || (TREE_CODE (set_high_bound) == INTEGER_CST
7255 && TREE_CODE (index) == INTEGER_CST
7256 && tree_int_cst_lt (set_high_bound, index))))
7260 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7262 /* If we get here, we have to generate the code for both cases
7263 (in range and out of range). */
7265 op0 = gen_label_rtx ();
7266 op1 = gen_label_rtx ();
7268 if (! (GET_CODE (index_val) == CONST_INT
7269 && GET_CODE (lo_r) == CONST_INT))
7271 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7272 GET_MODE (index_val), iunsignedp, 0, op1);
7275 if (! (GET_CODE (index_val) == CONST_INT
7276 && GET_CODE (hi_r) == CONST_INT))
7278 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7279 GET_MODE (index_val), iunsignedp, 0, op1);
7282 /* Calculate the element number of bit zero in the first word
7284 if (GET_CODE (lo_r) == CONST_INT)
7285 rlow = GEN_INT (INTVAL (lo_r)
7286 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7288 rlow = expand_binop (index_mode, and_optab, lo_r,
7289 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7290 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7292 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7293 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7295 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7296 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7297 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7298 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7300 addr = memory_address (byte_mode,
7301 expand_binop (index_mode, add_optab, diff,
7302 setaddr, NULL_RTX, iunsignedp,
7305 /* Extract the bit we want to examine. */
7306 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7307 gen_rtx_MEM (byte_mode, addr),
7308 make_tree (TREE_TYPE (index), rem),
7310 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7311 GET_MODE (target) == byte_mode ? target : 0,
7312 1, OPTAB_LIB_WIDEN);
7314 if (result != target)
7315 convert_move (target, result, 1);
7317 /* Output the code to handle the out-of-range case. */
7320 emit_move_insn (target, const0_rtx);
7325 case WITH_CLEANUP_EXPR:
7326 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7328 WITH_CLEANUP_EXPR_RTL (exp)
7329 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7330 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 1));
7332 /* That's it for this cleanup. */
7333 TREE_OPERAND (exp, 1) = 0;
7335 return WITH_CLEANUP_EXPR_RTL (exp);
7337 case CLEANUP_POINT_EXPR:
7339 /* Start a new binding layer that will keep track of all cleanup
7340 actions to be performed. */
7341 expand_start_bindings (2);
7343 target_temp_slot_level = temp_slot_level;
7345 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
7346 /* If we're going to use this value, load it up now. */
7348 op0 = force_not_mem (op0);
7349 preserve_temp_slots (op0);
7350 expand_end_bindings (NULL_TREE, 0, 0);
7355 /* Check for a built-in function. */
7356 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7357 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7359 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7361 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7362 == BUILT_IN_FRONTEND)
7363 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7365 return expand_builtin (exp, target, subtarget, tmode, ignore);
7368 return expand_call (exp, target, ignore);
7370 case NON_LVALUE_EXPR:
7373 case REFERENCE_EXPR:
7374 if (TREE_OPERAND (exp, 0) == error_mark_node)
7377 if (TREE_CODE (type) == UNION_TYPE)
7379 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7381 /* If both input and output are BLKmode, this conversion
7382 isn't actually doing anything unless we need to make the
7383 alignment stricter. */
7384 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7385 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7386 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7387 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7391 target = assign_temp (type, 0, 1, 1);
7393 if (GET_CODE (target) == MEM)
7394 /* Store data into beginning of memory target. */
7395 store_expr (TREE_OPERAND (exp, 0),
7396 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7398 else if (GET_CODE (target) == REG)
7399 /* Store this field into a union of the proper type. */
7400 store_field (target,
7401 MIN ((int_size_in_bytes (TREE_TYPE
7402 (TREE_OPERAND (exp, 0)))
7404 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7405 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7406 VOIDmode, 0, BITS_PER_UNIT,
7407 int_size_in_bytes (type), 0);
7411 /* Return the entire union. */
7415 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7417 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7420 /* If the signedness of the conversion differs and OP0 is
7421 a promoted SUBREG, clear that indication since we now
7422 have to do the proper extension. */
7423 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7424 && GET_CODE (op0) == SUBREG)
7425 SUBREG_PROMOTED_VAR_P (op0) = 0;
7430 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
7431 if (GET_MODE (op0) == mode)
7434 /* If OP0 is a constant, just convert it into the proper mode. */
7435 if (CONSTANT_P (op0))
7437 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7438 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7440 if (modifier == EXPAND_INITIALIZER)
7441 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7445 convert_to_mode (mode, op0,
7446 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7448 convert_move (target, op0,
7449 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7453 /* We come here from MINUS_EXPR when the second operand is a
7456 this_optab = ! unsignedp && flag_trapv
7457 && (GET_MODE_CLASS(mode) == MODE_INT)
7458 ? addv_optab : add_optab;
7460 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7461 something else, make sure we add the register to the constant and
7462 then to the other thing. This case can occur during strength
7463 reduction and doing it this way will produce better code if the
7464 frame pointer or argument pointer is eliminated.
7466 fold-const.c will ensure that the constant is always in the inner
7467 PLUS_EXPR, so the only case we need to do anything about is if
7468 sp, ap, or fp is our second argument, in which case we must swap
7469 the innermost first argument and our second argument. */
7471 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7472 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7473 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7474 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7475 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7476 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7478 tree t = TREE_OPERAND (exp, 1);
7480 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7481 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7484 /* If the result is to be ptr_mode and we are adding an integer to
7485 something, we might be forming a constant. So try to use
7486 plus_constant. If it produces a sum and we can't accept it,
7487 use force_operand. This allows P = &ARR[const] to generate
7488 efficient code on machines where a SYMBOL_REF is not a valid
7491 If this is an EXPAND_SUM call, always return the sum. */
7492 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7493 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7495 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7496 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7497 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7501 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7503 /* Use immed_double_const to ensure that the constant is
7504 truncated according to the mode of OP1, then sign extended
7505 to a HOST_WIDE_INT. Using the constant directly can result
7506 in non-canonical RTL in a 64x32 cross compile. */
7508 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7510 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7511 op1 = plus_constant (op1, INTVAL (constant_part));
7512 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7513 op1 = force_operand (op1, target);
7517 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7518 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7519 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7525 if (! CONSTANT_P (op0))
7527 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7528 VOIDmode, modifier);
7529 /* Don't go to both_summands if modifier
7530 says it's not right to return a PLUS. */
7531 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7535 /* Use immed_double_const to ensure that the constant is
7536 truncated according to the mode of OP1, then sign extended
7537 to a HOST_WIDE_INT. Using the constant directly can result
7538 in non-canonical RTL in a 64x32 cross compile. */
7540 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7542 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7543 op0 = plus_constant (op0, INTVAL (constant_part));
7544 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7545 op0 = force_operand (op0, target);
7550 /* No sense saving up arithmetic to be done
7551 if it's all in the wrong mode to form part of an address.
7552 And force_operand won't know whether to sign-extend or
7554 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7555 || mode != ptr_mode)
7558 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7562 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
7565 /* Make sure any term that's a sum with a constant comes last. */
7566 if (GET_CODE (op0) == PLUS
7567 && CONSTANT_P (XEXP (op0, 1)))
7573 /* If adding to a sum including a constant,
7574 associate it to put the constant outside. */
7575 if (GET_CODE (op1) == PLUS
7576 && CONSTANT_P (XEXP (op1, 1)))
7578 rtx constant_term = const0_rtx;
7580 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7583 /* Ensure that MULT comes first if there is one. */
7584 else if (GET_CODE (op0) == MULT)
7585 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7587 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7589 /* Let's also eliminate constants from op0 if possible. */
7590 op0 = eliminate_constant_term (op0, &constant_term);
7592 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7593 their sum should be a constant. Form it into OP1, since the
7594 result we want will then be OP0 + OP1. */
7596 temp = simplify_binary_operation (PLUS, mode, constant_term,
7601 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
7604 /* Put a constant term last and put a multiplication first. */
7605 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7606 temp = op1, op1 = op0, op0 = temp;
7608 temp = simplify_binary_operation (PLUS, mode, op0, op1);
7609 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
7612 /* For initializers, we are allowed to return a MINUS of two
7613 symbolic constants. Here we handle all cases when both operands
7615 /* Handle difference of two symbolic constants,
7616 for the sake of an initializer. */
7617 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7618 && really_constant_p (TREE_OPERAND (exp, 0))
7619 && really_constant_p (TREE_OPERAND (exp, 1)))
7621 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
7622 VOIDmode, ro_modifier);
7623 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7624 VOIDmode, ro_modifier);
7626 /* If the last operand is a CONST_INT, use plus_constant of
7627 the negated constant. Else make the MINUS. */
7628 if (GET_CODE (op1) == CONST_INT)
7629 return plus_constant (op0, - INTVAL (op1));
7631 return gen_rtx_MINUS (mode, op0, op1);
7633 /* Convert A - const to A + (-const). */
7634 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7636 tree negated = fold (build1 (NEGATE_EXPR, type,
7637 TREE_OPERAND (exp, 1)));
7639 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7640 /* If we can't negate the constant in TYPE, leave it alone and
7641 expand_binop will negate it for us. We used to try to do it
7642 here in the signed version of TYPE, but that doesn't work
7643 on POINTER_TYPEs. */;
7646 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7650 this_optab = ! unsignedp && flag_trapv
7651 && (GET_MODE_CLASS(mode) == MODE_INT)
7652 ? subv_optab : sub_optab;
7656 /* If first operand is constant, swap them.
7657 Thus the following special case checks need only
7658 check the second operand. */
7659 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7661 tree t1 = TREE_OPERAND (exp, 0);
7662 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7663 TREE_OPERAND (exp, 1) = t1;
7666 /* Attempt to return something suitable for generating an
7667 indexed address, for machines that support that. */
7669 if (modifier == EXPAND_SUM && mode == ptr_mode
7670 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7671 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
7673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7676 /* Apply distributive law if OP0 is x+c. */
7677 if (GET_CODE (op0) == PLUS
7678 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7683 (mode, XEXP (op0, 0),
7684 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7685 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7686 * INTVAL (XEXP (op0, 1))));
7688 if (GET_CODE (op0) != REG)
7689 op0 = force_operand (op0, NULL_RTX);
7690 if (GET_CODE (op0) != REG)
7691 op0 = copy_to_mode_reg (mode, op0);
7694 gen_rtx_MULT (mode, op0,
7695 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
7698 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7701 /* Check for multiplying things that have been extended
7702 from a narrower type. If this machine supports multiplying
7703 in that narrower type with a result in the desired type,
7704 do it that way, and avoid the explicit type-conversion. */
7705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7706 && TREE_CODE (type) == INTEGER_TYPE
7707 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7708 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7709 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7710 && int_fits_type_p (TREE_OPERAND (exp, 1),
7711 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7712 /* Don't use a widening multiply if a shift will do. */
7713 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7714 > HOST_BITS_PER_WIDE_INT)
7715 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7717 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7718 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7720 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7721 /* If both operands are extended, they must either both
7722 be zero-extended or both be sign-extended. */
7723 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7725 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7727 enum machine_mode innermode
7728 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7729 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7730 ? smul_widen_optab : umul_widen_optab);
7731 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7732 ? umul_widen_optab : smul_widen_optab);
7733 if (mode == GET_MODE_WIDER_MODE (innermode))
7735 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7737 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7738 NULL_RTX, VOIDmode, 0);
7739 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7743 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7744 NULL_RTX, VOIDmode, 0);
7747 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7748 && innermode == word_mode)
7751 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7752 NULL_RTX, VOIDmode, 0);
7753 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7754 op1 = convert_modes (innermode, mode,
7755 expand_expr (TREE_OPERAND (exp, 1),
7756 NULL_RTX, VOIDmode, 0),
7759 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7760 NULL_RTX, VOIDmode, 0);
7761 temp = expand_binop (mode, other_optab, op0, op1, target,
7762 unsignedp, OPTAB_LIB_WIDEN);
7763 htem = expand_mult_highpart_adjust (innermode,
7764 gen_highpart (innermode, temp),
7766 gen_highpart (innermode, temp),
7768 emit_move_insn (gen_highpart (innermode, temp), htem);
7773 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7774 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7775 return expand_mult (mode, op0, op1, target, unsignedp);
7777 case TRUNC_DIV_EXPR:
7778 case FLOOR_DIV_EXPR:
7780 case ROUND_DIV_EXPR:
7781 case EXACT_DIV_EXPR:
7782 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7784 /* Possible optimization: compute the dividend with EXPAND_SUM
7785 then if the divisor is constant can optimize the case
7786 where some terms of the dividend have coeffs divisible by it. */
7787 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7788 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7789 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7792 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7793 expensive divide. If not, combine will rebuild the original
7795 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7796 && !real_onep (TREE_OPERAND (exp, 0)))
7797 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7798 build (RDIV_EXPR, type,
7799 build_real (type, dconst1),
7800 TREE_OPERAND (exp, 1))),
7801 target, tmode, unsignedp);
7802 this_optab = sdiv_optab;
7805 case TRUNC_MOD_EXPR:
7806 case FLOOR_MOD_EXPR:
7808 case ROUND_MOD_EXPR:
7809 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7811 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7812 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7813 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7815 case FIX_ROUND_EXPR:
7816 case FIX_FLOOR_EXPR:
7818 abort (); /* Not used for C. */
7820 case FIX_TRUNC_EXPR:
7821 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7823 target = gen_reg_rtx (mode);
7824 expand_fix (target, op0, unsignedp);
7828 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7830 target = gen_reg_rtx (mode);
7831 /* expand_float can't figure out what to do if FROM has VOIDmode.
7832 So give it the correct mode. With -O, cse will optimize this. */
7833 if (GET_MODE (op0) == VOIDmode)
7834 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7836 expand_float (target, op0,
7837 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7841 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7842 temp = expand_unop (mode,
7843 ! unsignedp && flag_trapv
7844 && (GET_MODE_CLASS(mode) == MODE_INT)
7845 ? negv_optab : neg_optab, op0, target, 0);
7851 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7853 /* Handle complex values specially. */
7854 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7855 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7856 return expand_complex_abs (mode, op0, target, unsignedp);
7858 /* Unsigned abs is simply the operand. Testing here means we don't
7859 risk generating incorrect code below. */
7860 if (TREE_UNSIGNED (type))
7863 return expand_abs (mode, op0, target, unsignedp,
7864 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7868 target = original_target;
7869 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
7870 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7871 || GET_MODE (target) != mode
7872 || (GET_CODE (target) == REG
7873 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7874 target = gen_reg_rtx (mode);
7875 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7876 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7878 /* First try to do it with a special MIN or MAX instruction.
7879 If that does not win, use a conditional jump to select the proper
7881 this_optab = (TREE_UNSIGNED (type)
7882 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7883 : (code == MIN_EXPR ? smin_optab : smax_optab));
7885 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7890 /* At this point, a MEM target is no longer useful; we will get better
7893 if (GET_CODE (target) == MEM)
7894 target = gen_reg_rtx (mode);
7897 emit_move_insn (target, op0);
7899 op0 = gen_label_rtx ();
7901 /* If this mode is an integer too wide to compare properly,
7902 compare word by word. Rely on cse to optimize constant cases. */
7903 if (GET_MODE_CLASS (mode) == MODE_INT
7904 && ! can_compare_p (GE, mode, ccp_jump))
7906 if (code == MAX_EXPR)
7907 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7908 target, op1, NULL_RTX, op0);
7910 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7911 op1, target, NULL_RTX, op0);
7915 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7916 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7917 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7920 emit_move_insn (target, op1);
7925 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7926 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7932 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7933 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7938 /* ??? Can optimize bitwise operations with one arg constant.
7939 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7940 and (a bitwise1 b) bitwise2 b (etc)
7941 but that is probably not worth while. */
7943 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7944 boolean values when we want in all cases to compute both of them. In
7945 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7946 as actual zero-or-1 values and then bitwise anding. In cases where
7947 there cannot be any side effects, better code would be made by
7948 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7949 how to recognize those cases. */
7951 case TRUTH_AND_EXPR:
7953 this_optab = and_optab;
7958 this_optab = ior_optab;
7961 case TRUTH_XOR_EXPR:
7963 this_optab = xor_optab;
7970 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7973 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7976 /* Could determine the answer when only additive constants differ. Also,
7977 the addition of one can be handled by changing the condition. */
7984 case UNORDERED_EXPR:
7991 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7995 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
7996 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7998 && GET_CODE (original_target) == REG
7999 && (GET_MODE (original_target)
8000 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8002 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8005 if (temp != original_target)
8006 temp = copy_to_reg (temp);
8008 op1 = gen_label_rtx ();
8009 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8010 GET_MODE (temp), unsignedp, 0, op1);
8011 emit_move_insn (temp, const1_rtx);
8016 /* If no set-flag instruction, must generate a conditional
8017 store into a temporary variable. Drop through
8018 and handle this like && and ||. */
8020 case TRUTH_ANDIF_EXPR:
8021 case TRUTH_ORIF_EXPR:
8023 && (target == 0 || ! safe_from_p (target, exp, 1)
8024 /* Make sure we don't have a hard reg (such as function's return
8025 value) live across basic blocks, if not optimizing. */
8026 || (!optimize && GET_CODE (target) == REG
8027 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8028 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8031 emit_clr_insn (target);
8033 op1 = gen_label_rtx ();
8034 jumpifnot (exp, op1);
8037 emit_0_to_1_insn (target);
8040 return ignore ? const0_rtx : target;
8042 case TRUTH_NOT_EXPR:
8043 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8044 /* The parser is careful to generate TRUTH_NOT_EXPR
8045 only with operands that are always zero or one. */
8046 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8047 target, 1, OPTAB_LIB_WIDEN);
8053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8055 return expand_expr (TREE_OPERAND (exp, 1),
8056 (ignore ? const0_rtx : target),
8060 /* If we would have a "singleton" (see below) were it not for a
8061 conversion in each arm, bring that conversion back out. */
8062 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8063 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8064 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8065 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8067 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8068 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8070 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8071 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8072 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8073 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8074 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8075 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8076 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8077 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8078 return expand_expr (build1 (NOP_EXPR, type,
8079 build (COND_EXPR, TREE_TYPE (iftrue),
8080 TREE_OPERAND (exp, 0),
8082 target, tmode, modifier);
8086 /* Note that COND_EXPRs whose type is a structure or union
8087 are required to be constructed to contain assignments of
8088 a temporary variable, so that we can evaluate them here
8089 for side effect only. If type is void, we must do likewise. */
8091 /* If an arm of the branch requires a cleanup,
8092 only that cleanup is performed. */
8095 tree binary_op = 0, unary_op = 0;
8097 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8098 convert it to our mode, if necessary. */
8099 if (integer_onep (TREE_OPERAND (exp, 1))
8100 && integer_zerop (TREE_OPERAND (exp, 2))
8101 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8105 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
8111 if (GET_MODE (op0) == mode)
8115 target = gen_reg_rtx (mode);
8116 convert_move (target, op0, unsignedp);
8120 /* Check for X ? A + B : A. If we have this, we can copy A to the
8121 output and conditionally add B. Similarly for unary operations.
8122 Don't do this if X has side-effects because those side effects
8123 might affect A or B and the "?" operation is a sequence point in
8124 ANSI. (operand_equal_p tests for side effects.) */
8126 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8127 && operand_equal_p (TREE_OPERAND (exp, 2),
8128 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8129 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8130 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8131 && operand_equal_p (TREE_OPERAND (exp, 1),
8132 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8133 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8134 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8135 && operand_equal_p (TREE_OPERAND (exp, 2),
8136 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8137 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8138 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8139 && operand_equal_p (TREE_OPERAND (exp, 1),
8140 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8141 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8143 /* If we are not to produce a result, we have no target. Otherwise,
8144 if a target was specified use it; it will not be used as an
8145 intermediate target unless it is safe. If no target, use a
8150 else if (original_target
8151 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8152 || (singleton && GET_CODE (original_target) == REG
8153 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8154 && original_target == var_rtx (singleton)))
8155 && GET_MODE (original_target) == mode
8156 #ifdef HAVE_conditional_move
8157 && (! can_conditionally_move_p (mode)
8158 || GET_CODE (original_target) == REG
8159 || TREE_ADDRESSABLE (type))
8161 && (GET_CODE (original_target) != MEM
8162 || TREE_ADDRESSABLE (type)))
8163 temp = original_target;
8164 else if (TREE_ADDRESSABLE (type))
8167 temp = assign_temp (type, 0, 0, 1);
8169 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8170 do the test of X as a store-flag operation, do this as
8171 A + ((X != 0) << log C). Similarly for other simple binary
8172 operators. Only do for C == 1 if BRANCH_COST is low. */
8173 if (temp && singleton && binary_op
8174 && (TREE_CODE (binary_op) == PLUS_EXPR
8175 || TREE_CODE (binary_op) == MINUS_EXPR
8176 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8177 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8178 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8179 : integer_onep (TREE_OPERAND (binary_op, 1)))
8180 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8183 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8184 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8185 ? addv_optab : add_optab)
8186 : TREE_CODE (binary_op) == MINUS_EXPR
8187 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8188 ? subv_optab : sub_optab)
8189 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8192 /* If we had X ? A : A + 1, do this as A + (X == 0).
8194 We have to invert the truth value here and then put it
8195 back later if do_store_flag fails. We cannot simply copy
8196 TREE_OPERAND (exp, 0) to another variable and modify that
8197 because invert_truthvalue can modify the tree pointed to
8199 if (singleton == TREE_OPERAND (exp, 1))
8200 TREE_OPERAND (exp, 0)
8201 = invert_truthvalue (TREE_OPERAND (exp, 0));
8203 result = do_store_flag (TREE_OPERAND (exp, 0),
8204 (safe_from_p (temp, singleton, 1)
8206 mode, BRANCH_COST <= 1);
8208 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8209 result = expand_shift (LSHIFT_EXPR, mode, result,
8210 build_int_2 (tree_log2
8214 (safe_from_p (temp, singleton, 1)
8215 ? temp : NULL_RTX), 0);
8219 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8220 return expand_binop (mode, boptab, op1, result, temp,
8221 unsignedp, OPTAB_LIB_WIDEN);
8223 else if (singleton == TREE_OPERAND (exp, 1))
8224 TREE_OPERAND (exp, 0)
8225 = invert_truthvalue (TREE_OPERAND (exp, 0));
8228 do_pending_stack_adjust ();
8230 op0 = gen_label_rtx ();
8232 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8236 /* If the target conflicts with the other operand of the
8237 binary op, we can't use it. Also, we can't use the target
8238 if it is a hard register, because evaluating the condition
8239 might clobber it. */
8241 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8242 || (GET_CODE (temp) == REG
8243 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8244 temp = gen_reg_rtx (mode);
8245 store_expr (singleton, temp, 0);
8248 expand_expr (singleton,
8249 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8250 if (singleton == TREE_OPERAND (exp, 1))
8251 jumpif (TREE_OPERAND (exp, 0), op0);
8253 jumpifnot (TREE_OPERAND (exp, 0), op0);
8255 start_cleanup_deferral ();
8256 if (binary_op && temp == 0)
8257 /* Just touch the other operand. */
8258 expand_expr (TREE_OPERAND (binary_op, 1),
8259 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8261 store_expr (build (TREE_CODE (binary_op), type,
8262 make_tree (type, temp),
8263 TREE_OPERAND (binary_op, 1)),
8266 store_expr (build1 (TREE_CODE (unary_op), type,
8267 make_tree (type, temp)),
8271 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8272 comparison operator. If we have one of these cases, set the
8273 output to A, branch on A (cse will merge these two references),
8274 then set the output to FOO. */
8276 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8277 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8278 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8279 TREE_OPERAND (exp, 1), 0)
8280 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8281 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8282 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8284 if (GET_CODE (temp) == REG
8285 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8286 temp = gen_reg_rtx (mode);
8287 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8288 jumpif (TREE_OPERAND (exp, 0), op0);
8290 start_cleanup_deferral ();
8291 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8295 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8297 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8298 TREE_OPERAND (exp, 2), 0)
8299 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8300 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8301 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8303 if (GET_CODE (temp) == REG
8304 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8305 temp = gen_reg_rtx (mode);
8306 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8307 jumpifnot (TREE_OPERAND (exp, 0), op0);
8309 start_cleanup_deferral ();
8310 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8315 op1 = gen_label_rtx ();
8316 jumpifnot (TREE_OPERAND (exp, 0), op0);
8318 start_cleanup_deferral ();
8320 /* One branch of the cond can be void, if it never returns. For
8321 example A ? throw : E */
8323 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8324 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8326 expand_expr (TREE_OPERAND (exp, 1),
8327 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8328 end_cleanup_deferral ();
8330 emit_jump_insn (gen_jump (op1));
8333 start_cleanup_deferral ();
8335 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8336 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8338 expand_expr (TREE_OPERAND (exp, 2),
8339 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8342 end_cleanup_deferral ();
8353 /* Something needs to be initialized, but we didn't know
8354 where that thing was when building the tree. For example,
8355 it could be the return value of a function, or a parameter
8356 to a function which lays down in the stack, or a temporary
8357 variable which must be passed by reference.
8359 We guarantee that the expression will either be constructed
8360 or copied into our original target. */
8362 tree slot = TREE_OPERAND (exp, 0);
8363 tree cleanups = NULL_TREE;
8366 if (TREE_CODE (slot) != VAR_DECL)
8370 target = original_target;
8372 /* Set this here so that if we get a target that refers to a
8373 register variable that's already been used, put_reg_into_stack
8374 knows that it should fix up those uses. */
8375 TREE_USED (slot) = 1;
8379 if (DECL_RTL_SET_P (slot))
8381 target = DECL_RTL (slot);
8382 /* If we have already expanded the slot, so don't do
8384 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8389 target = assign_temp (type, 2, 0, 1);
8390 /* All temp slots at this level must not conflict. */
8391 preserve_temp_slots (target);
8392 SET_DECL_RTL (slot, target);
8393 if (TREE_ADDRESSABLE (slot))
8394 put_var_into_stack (slot);
8396 /* Since SLOT is not known to the called function
8397 to belong to its stack frame, we must build an explicit
8398 cleanup. This case occurs when we must build up a reference
8399 to pass the reference as an argument. In this case,
8400 it is very likely that such a reference need not be
8403 if (TREE_OPERAND (exp, 2) == 0)
8404 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
8405 cleanups = TREE_OPERAND (exp, 2);
8410 /* This case does occur, when expanding a parameter which
8411 needs to be constructed on the stack. The target
8412 is the actual stack address that we want to initialize.
8413 The function we call will perform the cleanup in this case. */
8415 /* If we have already assigned it space, use that space,
8416 not target that we were passed in, as our target
8417 parameter is only a hint. */
8418 if (DECL_RTL_SET_P (slot))
8420 target = DECL_RTL (slot);
8421 /* If we have already expanded the slot, so don't do
8423 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8428 SET_DECL_RTL (slot, target);
8429 /* If we must have an addressable slot, then make sure that
8430 the RTL that we just stored in slot is OK. */
8431 if (TREE_ADDRESSABLE (slot))
8432 put_var_into_stack (slot);
8436 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8437 /* Mark it as expanded. */
8438 TREE_OPERAND (exp, 1) = NULL_TREE;
8440 store_expr (exp1, target, 0);
8442 expand_decl_cleanup (NULL_TREE, cleanups);
8449 tree lhs = TREE_OPERAND (exp, 0);
8450 tree rhs = TREE_OPERAND (exp, 1);
8452 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8458 /* If lhs is complex, expand calls in rhs before computing it.
8459 That's so we don't compute a pointer and save it over a
8460 call. If lhs is simple, compute it first so we can give it
8461 as a target if the rhs is just a call. This avoids an
8462 extra temp and copy and that prevents a partial-subsumption
8463 which makes bad code. Actually we could treat
8464 component_ref's of vars like vars. */
8466 tree lhs = TREE_OPERAND (exp, 0);
8467 tree rhs = TREE_OPERAND (exp, 1);
8471 /* Check for |= or &= of a bitfield of size one into another bitfield
8472 of size 1. In this case, (unless we need the result of the
8473 assignment) we can do this more efficiently with a
8474 test followed by an assignment, if necessary.
8476 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8477 things change so we do, this code should be enhanced to
8480 && TREE_CODE (lhs) == COMPONENT_REF
8481 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8482 || TREE_CODE (rhs) == BIT_AND_EXPR)
8483 && TREE_OPERAND (rhs, 0) == lhs
8484 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8485 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8486 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8488 rtx label = gen_label_rtx ();
8490 do_jump (TREE_OPERAND (rhs, 1),
8491 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8492 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8493 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8494 (TREE_CODE (rhs) == BIT_IOR_EXPR
8496 : integer_zero_node)),
8498 do_pending_stack_adjust ();
8503 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8509 if (!TREE_OPERAND (exp, 0))
8510 expand_null_return ();
8512 expand_return (TREE_OPERAND (exp, 0));
8515 case PREINCREMENT_EXPR:
8516 case PREDECREMENT_EXPR:
8517 return expand_increment (exp, 0, ignore);
8519 case POSTINCREMENT_EXPR:
8520 case POSTDECREMENT_EXPR:
8521 /* Faster to treat as pre-increment if result is not used. */
8522 return expand_increment (exp, ! ignore, ignore);
8525 /* If nonzero, TEMP will be set to the address of something that might
8526 be a MEM corresponding to a stack slot. */
8529 /* Are we taking the address of a nested function? */
8530 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8531 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8532 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8533 && ! TREE_STATIC (exp))
8535 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8536 op0 = force_operand (op0, target);
8538 /* If we are taking the address of something erroneous, just
8540 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8544 /* We make sure to pass const0_rtx down if we came in with
8545 ignore set, to avoid doing the cleanups twice for something. */
8546 op0 = expand_expr (TREE_OPERAND (exp, 0),
8547 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8548 (modifier == EXPAND_INITIALIZER
8549 ? modifier : EXPAND_CONST_ADDRESS));
8551 /* If we are going to ignore the result, OP0 will have been set
8552 to const0_rtx, so just return it. Don't get confused and
8553 think we are taking the address of the constant. */
8557 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8558 clever and returns a REG when given a MEM. */
8559 op0 = protect_from_queue (op0, 1);
8561 /* We would like the object in memory. If it is a constant, we can
8562 have it be statically allocated into memory. For a non-constant,
8563 we need to allocate some memory and store the value into it. */
8565 if (CONSTANT_P (op0))
8566 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8568 else if (GET_CODE (op0) == MEM)
8570 mark_temp_addr_taken (op0);
8571 temp = XEXP (op0, 0);
8574 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8575 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8576 || GET_CODE (op0) == PARALLEL)
8578 /* If this object is in a register, it must be not
8580 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8581 tree nt = build_qualified_type (inner_type,
8582 (TYPE_QUALS (inner_type)
8583 | TYPE_QUAL_CONST));
8584 rtx memloc = assign_temp (nt, 1, 1, 1);
8586 mark_temp_addr_taken (memloc);
8587 if (GET_CODE (op0) == PARALLEL)
8588 /* Handle calls that pass values in multiple non-contiguous
8589 locations. The Irix 6 ABI has examples of this. */
8590 emit_group_store (memloc, op0,
8591 int_size_in_bytes (inner_type),
8592 TYPE_ALIGN (inner_type));
8594 emit_move_insn (memloc, op0);
8598 if (GET_CODE (op0) != MEM)
8601 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8603 temp = XEXP (op0, 0);
8604 #ifdef POINTERS_EXTEND_UNSIGNED
8605 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8606 && mode == ptr_mode)
8607 temp = convert_memory_address (ptr_mode, temp);
8612 op0 = force_operand (XEXP (op0, 0), target);
8615 if (flag_force_addr && GET_CODE (op0) != REG)
8616 op0 = force_reg (Pmode, op0);
8618 if (GET_CODE (op0) == REG
8619 && ! REG_USERVAR_P (op0))
8620 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8622 /* If we might have had a temp slot, add an equivalent address
8625 update_temp_slot_address (temp, op0);
8627 #ifdef POINTERS_EXTEND_UNSIGNED
8628 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8629 && mode == ptr_mode)
8630 op0 = convert_memory_address (ptr_mode, op0);
8635 case ENTRY_VALUE_EXPR:
8638 /* COMPLEX type for Extended Pascal & Fortran */
8641 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8644 /* Get the rtx code of the operands. */
8645 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8646 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8649 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8653 /* Move the real (op0) and imaginary (op1) parts to their location. */
8654 emit_move_insn (gen_realpart (mode, target), op0);
8655 emit_move_insn (gen_imagpart (mode, target), op1);
8657 insns = get_insns ();
8660 /* Complex construction should appear as a single unit. */
8661 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8662 each with a separate pseudo as destination.
8663 It's not correct for flow to treat them as a unit. */
8664 if (GET_CODE (target) != CONCAT)
8665 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8674 return gen_realpart (mode, op0);
8677 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8678 return gen_imagpart (mode, op0);
8682 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8686 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8689 target = gen_reg_rtx (mode);
8693 /* Store the realpart and the negated imagpart to target. */
8694 emit_move_insn (gen_realpart (partmode, target),
8695 gen_realpart (partmode, op0));
8697 imag_t = gen_imagpart (partmode, target);
8698 temp = expand_unop (partmode,
8699 ! unsignedp && flag_trapv
8700 && (GET_MODE_CLASS(partmode) == MODE_INT)
8701 ? negv_optab : neg_optab,
8702 gen_imagpart (partmode, op0), imag_t, 0);
8704 emit_move_insn (imag_t, temp);
8706 insns = get_insns ();
8709 /* Conjugate should appear as a single unit
8710 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8711 each with a separate pseudo as destination.
8712 It's not correct for flow to treat them as a unit. */
8713 if (GET_CODE (target) != CONCAT)
8714 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8721 case TRY_CATCH_EXPR:
8723 tree handler = TREE_OPERAND (exp, 1);
8725 expand_eh_region_start ();
8727 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8729 expand_eh_region_end_cleanup (handler);
8734 case TRY_FINALLY_EXPR:
8736 tree try_block = TREE_OPERAND (exp, 0);
8737 tree finally_block = TREE_OPERAND (exp, 1);
8738 rtx finally_label = gen_label_rtx ();
8739 rtx done_label = gen_label_rtx ();
8740 rtx return_link = gen_reg_rtx (Pmode);
8741 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8742 (tree) finally_label, (tree) return_link);
8743 TREE_SIDE_EFFECTS (cleanup) = 1;
8745 /* Start a new binding layer that will keep track of all cleanup
8746 actions to be performed. */
8747 expand_start_bindings (2);
8749 target_temp_slot_level = temp_slot_level;
8751 expand_decl_cleanup (NULL_TREE, cleanup);
8752 op0 = expand_expr (try_block, target, tmode, modifier);
8754 preserve_temp_slots (op0);
8755 expand_end_bindings (NULL_TREE, 0, 0);
8756 emit_jump (done_label);
8757 emit_label (finally_label);
8758 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8759 emit_indirect_jump (return_link);
8760 emit_label (done_label);
8764 case GOTO_SUBROUTINE_EXPR:
8766 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8767 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8768 rtx return_address = gen_label_rtx ();
8769 emit_move_insn (return_link,
8770 gen_rtx_LABEL_REF (Pmode, return_address));
8772 emit_label (return_address);
8777 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8780 return get_exception_pointer (cfun);
8783 /* Function descriptors are not valid except for as
8784 initialization constants, and should not be expanded. */
8788 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
8791 /* Here to do an ordinary binary operator, generating an instruction
8792 from the optab already placed in `this_optab'. */
8794 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8796 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8797 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8799 temp = expand_binop (mode, this_optab, op0, op1, target,
8800 unsignedp, OPTAB_LIB_WIDEN);
8806 /* Similar to expand_expr, except that we don't specify a target, target
8807 mode, or modifier and we return the alignment of the inner type. This is
8808 used in cases where it is not necessary to align the result to the
8809 alignment of its type as long as we know the alignment of the result, for
8810 example for comparisons of BLKmode values. */
8813 expand_expr_unaligned (exp, palign)
8815 unsigned int *palign;
8818 tree type = TREE_TYPE (exp);
8819 enum machine_mode mode = TYPE_MODE (type);
8821 /* Default the alignment we return to that of the type. */
8822 *palign = TYPE_ALIGN (type);
8824 /* The only cases in which we do anything special is if the resulting mode
8826 if (mode != BLKmode)
8827 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8829 switch (TREE_CODE (exp))
8833 case NON_LVALUE_EXPR:
8834 /* Conversions between BLKmode values don't change the underlying
8835 alignment or value. */
8836 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8837 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8841 /* Much of the code for this case is copied directly from expand_expr.
8842 We need to duplicate it here because we will do something different
8843 in the fall-through case, so we need to handle the same exceptions
8846 tree array = TREE_OPERAND (exp, 0);
8847 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8848 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8849 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
8852 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8855 /* Optimize the special-case of a zero lower bound.
8857 We convert the low_bound to sizetype to avoid some problems
8858 with constant folding. (E.g. suppose the lower bound is 1,
8859 and its mode is QI. Without the conversion, (ARRAY
8860 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8861 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
8863 if (! integer_zerop (low_bound))
8864 index = size_diffop (index, convert (sizetype, low_bound));
8866 /* If this is a constant index into a constant array,
8867 just get the value from the array. Handle both the cases when
8868 we have an explicit constructor and when our operand is a variable
8869 that was declared const. */
8871 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8872 && host_integerp (index, 0)
8873 && 0 > compare_tree_int (index,
8874 list_length (CONSTRUCTOR_ELTS
8875 (TREE_OPERAND (exp, 0)))))
8879 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8880 i = tree_low_cst (index, 0);
8881 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8885 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
8888 else if (optimize >= 1
8889 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8890 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8891 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8893 if (TREE_CODE (index) == INTEGER_CST)
8895 tree init = DECL_INITIAL (array);
8897 if (TREE_CODE (init) == CONSTRUCTOR)
8901 for (elem = CONSTRUCTOR_ELTS (init);
8902 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8903 elem = TREE_CHAIN (elem))
8907 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8917 case ARRAY_RANGE_REF:
8918 /* If the operand is a CONSTRUCTOR, we can just extract the
8919 appropriate field if it is present. Don't do this if we have
8920 already written the data since we want to refer to that copy
8921 and varasm.c assumes that's what we'll do. */
8922 if (TREE_CODE (exp) == COMPONENT_REF
8923 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8924 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8928 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8929 elt = TREE_CHAIN (elt))
8930 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8931 /* Note that unlike the case in expand_expr, we know this is
8932 BLKmode and hence not an integer. */
8933 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8937 enum machine_mode mode1;
8938 HOST_WIDE_INT bitsize, bitpos;
8941 unsigned int alignment;
8943 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8944 &mode1, &unsignedp, &volatilep,
8947 /* If we got back the original object, something is wrong. Perhaps
8948 we are evaluating an expression too early. In any event, don't
8949 infinitely recurse. */
8953 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8955 /* If this is a constant, put it into a register if it is a
8956 legitimate constant and OFFSET is 0 and memory if it isn't. */
8957 if (CONSTANT_P (op0))
8959 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8961 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8963 op0 = force_reg (inner_mode, op0);
8965 op0 = validize_mem (force_const_mem (inner_mode, op0));
8970 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8972 /* If this object is in a register, put it into memory.
8973 This case can't occur in C, but can in Ada if we have
8974 unchecked conversion of an expression from a scalar type to
8975 an array or record type. */
8976 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8977 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8979 tree nt = build_qualified_type (TREE_TYPE (tem),
8980 (TYPE_QUALS (TREE_TYPE (tem))
8981 | TYPE_QUAL_CONST));
8982 rtx memloc = assign_temp (nt, 1, 1, 1);
8984 mark_temp_addr_taken (memloc);
8985 emit_move_insn (memloc, op0);
8989 if (GET_CODE (op0) != MEM)
8992 if (GET_MODE (offset_rtx) != ptr_mode)
8994 #ifdef POINTERS_EXTEND_UNSIGNED
8995 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8997 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
9001 op0 = change_address (op0, VOIDmode,
9002 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
9003 force_reg (ptr_mode,
9007 /* Don't forget about volatility even if this is a bitfield. */
9008 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
9010 op0 = copy_rtx (op0);
9011 MEM_VOLATILE_P (op0) = 1;
9014 /* Check the access. */
9015 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
9020 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
9021 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
9023 /* Check the access right of the pointer. */
9024 in_check_memory_usage = 1;
9025 if (size > BITS_PER_UNIT)
9026 emit_library_call (chkr_check_addr_libfunc,
9027 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
9028 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
9029 TYPE_MODE (sizetype),
9030 GEN_INT (MEMORY_USE_RO),
9031 TYPE_MODE (integer_type_node));
9032 in_check_memory_usage = 0;
9035 /* In cases where an aligned union has an unaligned object
9036 as a field, we might be extracting a BLKmode value from
9037 an integer-mode (e.g., SImode) object. Handle this case
9038 by doing the extract into an object as wide as the field
9039 (which we know to be the width of a basic mode), then
9040 storing into memory, and changing the mode to BLKmode.
9041 If we ultimately want the address (EXPAND_CONST_ADDRESS or
9042 EXPAND_INITIALIZER), then we must not copy to a temporary. */
9043 if (mode1 == VOIDmode
9044 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
9045 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
9046 && (TYPE_ALIGN (type) > alignment
9047 || bitpos % TYPE_ALIGN (type) != 0)))
9049 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9051 if (ext_mode == BLKmode)
9053 /* In this case, BITPOS must start at a byte boundary. */
9054 if (GET_CODE (op0) != MEM
9055 || bitpos % BITS_PER_UNIT != 0)
9058 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
9062 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
9064 rtx new = assign_temp (nt, 0, 1, 1);
9066 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
9067 unsignedp, NULL_RTX, ext_mode,
9068 ext_mode, alignment,
9069 int_size_in_bytes (TREE_TYPE (tem)));
9071 /* If the result is a record type and BITSIZE is narrower than
9072 the mode of OP0, an integral mode, and this is a big endian
9073 machine, we must put the field into the high-order bits. */
9074 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9075 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9076 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9077 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9078 size_int (GET_MODE_BITSIZE
9083 emit_move_insn (new, op0);
9084 op0 = copy_rtx (new);
9085 PUT_MODE (op0, BLKmode);
9089 /* Get a reference to just this component. */
9090 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9092 set_mem_alias_set (op0, get_alias_set (exp));
9094 /* Adjust the alignment in case the bit position is not
9095 a multiple of the alignment of the inner object. */
9096 while (bitpos % alignment != 0)
9099 if (GET_CODE (XEXP (op0, 0)) == REG)
9100 mark_reg_pointer (XEXP (op0, 0), alignment);
9102 MEM_IN_STRUCT_P (op0) = 1;
9103 MEM_VOLATILE_P (op0) |= volatilep;
9105 *palign = alignment;
9114 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9117 /* Return the tree node if a ARG corresponds to a string constant or zero
9118 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9119 in bytes within the string that ARG is accessing. The type of the
9120 offset will be `sizetype'. */
9123 string_constant (arg, ptr_offset)
9129 if (TREE_CODE (arg) == ADDR_EXPR
9130 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9132 *ptr_offset = size_zero_node;
9133 return TREE_OPERAND (arg, 0);
9135 else if (TREE_CODE (arg) == PLUS_EXPR)
9137 tree arg0 = TREE_OPERAND (arg, 0);
9138 tree arg1 = TREE_OPERAND (arg, 1);
9143 if (TREE_CODE (arg0) == ADDR_EXPR
9144 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9146 *ptr_offset = convert (sizetype, arg1);
9147 return TREE_OPERAND (arg0, 0);
9149 else if (TREE_CODE (arg1) == ADDR_EXPR
9150 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9152 *ptr_offset = convert (sizetype, arg0);
9153 return TREE_OPERAND (arg1, 0);
9160 /* Expand code for a post- or pre- increment or decrement
9161 and return the RTX for the result.
9162 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9165 expand_increment (exp, post, ignore)
9171 tree incremented = TREE_OPERAND (exp, 0);
9172 optab this_optab = add_optab;
9174 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9175 int op0_is_copy = 0;
9176 int single_insn = 0;
9177 /* 1 means we can't store into OP0 directly,
9178 because it is a subreg narrower than a word,
9179 and we don't dare clobber the rest of the word. */
9182 /* Stabilize any component ref that might need to be
9183 evaluated more than once below. */
9185 || TREE_CODE (incremented) == BIT_FIELD_REF
9186 || (TREE_CODE (incremented) == COMPONENT_REF
9187 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9188 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9189 incremented = stabilize_reference (incremented);
9190 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9191 ones into save exprs so that they don't accidentally get evaluated
9192 more than once by the code below. */
9193 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9194 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9195 incremented = save_expr (incremented);
9197 /* Compute the operands as RTX.
9198 Note whether OP0 is the actual lvalue or a copy of it:
9199 I believe it is a copy iff it is a register or subreg
9200 and insns were generated in computing it. */
9202 temp = get_last_insn ();
9203 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
9205 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9206 in place but instead must do sign- or zero-extension during assignment,
9207 so we copy it into a new register and let the code below use it as
9210 Note that we can safely modify this SUBREG since it is know not to be
9211 shared (it was made by the expand_expr call above). */
9213 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9216 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9220 else if (GET_CODE (op0) == SUBREG
9221 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9223 /* We cannot increment this SUBREG in place. If we are
9224 post-incrementing, get a copy of the old value. Otherwise,
9225 just mark that we cannot increment in place. */
9227 op0 = copy_to_reg (op0);
9232 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9233 && temp != get_last_insn ());
9234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9235 EXPAND_MEMORY_USE_BAD);
9237 /* Decide whether incrementing or decrementing. */
9238 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9239 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9240 this_optab = sub_optab;
9242 /* Convert decrement by a constant into a negative increment. */
9243 if (this_optab == sub_optab
9244 && GET_CODE (op1) == CONST_INT)
9246 op1 = GEN_INT (-INTVAL (op1));
9247 this_optab = add_optab;
9250 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9251 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9253 /* For a preincrement, see if we can do this with a single instruction. */
9256 icode = (int) this_optab->handlers[(int) mode].insn_code;
9257 if (icode != (int) CODE_FOR_nothing
9258 /* Make sure that OP0 is valid for operands 0 and 1
9259 of the insn we want to queue. */
9260 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9261 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9262 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9266 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9267 then we cannot just increment OP0. We must therefore contrive to
9268 increment the original value. Then, for postincrement, we can return
9269 OP0 since it is a copy of the old value. For preincrement, expand here
9270 unless we can do it with a single insn.
9272 Likewise if storing directly into OP0 would clobber high bits
9273 we need to preserve (bad_subreg). */
9274 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9276 /* This is the easiest way to increment the value wherever it is.
9277 Problems with multiple evaluation of INCREMENTED are prevented
9278 because either (1) it is a component_ref or preincrement,
9279 in which case it was stabilized above, or (2) it is an array_ref
9280 with constant index in an array in a register, which is
9281 safe to reevaluate. */
9282 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9283 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9284 ? MINUS_EXPR : PLUS_EXPR),
9287 TREE_OPERAND (exp, 1));
9289 while (TREE_CODE (incremented) == NOP_EXPR
9290 || TREE_CODE (incremented) == CONVERT_EXPR)
9292 newexp = convert (TREE_TYPE (incremented), newexp);
9293 incremented = TREE_OPERAND (incremented, 0);
9296 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9297 return post ? op0 : temp;
9302 /* We have a true reference to the value in OP0.
9303 If there is an insn to add or subtract in this mode, queue it.
9304 Queueing the increment insn avoids the register shuffling
9305 that often results if we must increment now and first save
9306 the old value for subsequent use. */
9308 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9309 op0 = stabilize (op0);
9312 icode = (int) this_optab->handlers[(int) mode].insn_code;
9313 if (icode != (int) CODE_FOR_nothing
9314 /* Make sure that OP0 is valid for operands 0 and 1
9315 of the insn we want to queue. */
9316 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9317 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9319 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9320 op1 = force_reg (mode, op1);
9322 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9324 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9326 rtx addr = (general_operand (XEXP (op0, 0), mode)
9327 ? force_reg (Pmode, XEXP (op0, 0))
9328 : copy_to_reg (XEXP (op0, 0)));
9331 op0 = replace_equiv_address (op0, addr);
9332 temp = force_reg (GET_MODE (op0), op0);
9333 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9334 op1 = force_reg (mode, op1);
9336 /* The increment queue is LIFO, thus we have to `queue'
9337 the instructions in reverse order. */
9338 enqueue_insn (op0, gen_move_insn (op0, temp));
9339 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9344 /* Preincrement, or we can't increment with one simple insn. */
9346 /* Save a copy of the value before inc or dec, to return it later. */
9347 temp = value = copy_to_reg (op0);
9349 /* Arrange to return the incremented value. */
9350 /* Copy the rtx because expand_binop will protect from the queue,
9351 and the results of that would be invalid for us to return
9352 if our caller does emit_queue before using our result. */
9353 temp = copy_rtx (value = op0);
9355 /* Increment however we can. */
9356 op1 = expand_binop (mode, this_optab, value, op1,
9357 current_function_check_memory_usage ? NULL_RTX : op0,
9358 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9359 /* Make sure the value is stored into OP0. */
9361 emit_move_insn (op0, op1);
9366 /* At the start of a function, record that we have no previously-pushed
9367 arguments waiting to be popped. */
9370 init_pending_stack_adjust ()
9372 pending_stack_adjust = 0;
9375 /* When exiting from function, if safe, clear out any pending stack adjust
9376 so the adjustment won't get done.
9378 Note, if the current function calls alloca, then it must have a
9379 frame pointer regardless of the value of flag_omit_frame_pointer. */
9382 clear_pending_stack_adjust ()
9384 #ifdef EXIT_IGNORE_STACK
9386 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9387 && EXIT_IGNORE_STACK
9388 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9389 && ! flag_inline_functions)
9391 stack_pointer_delta -= pending_stack_adjust,
9392 pending_stack_adjust = 0;
9397 /* Pop any previously-pushed arguments that have not been popped yet. */
9400 do_pending_stack_adjust ()
9402 if (inhibit_defer_pop == 0)
9404 if (pending_stack_adjust != 0)
9405 adjust_stack (GEN_INT (pending_stack_adjust));
9406 pending_stack_adjust = 0;
9410 /* Expand conditional expressions. */
9412 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9413 LABEL is an rtx of code CODE_LABEL, in this function and all the
9417 jumpifnot (exp, label)
9421 do_jump (exp, label, NULL_RTX);
9424 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9431 do_jump (exp, NULL_RTX, label);
9434 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9435 the result is zero, or IF_TRUE_LABEL if the result is one.
9436 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9437 meaning fall through in that case.
9439 do_jump always does any pending stack adjust except when it does not
9440 actually perform a jump. An example where there is no jump
9441 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9443 This function is responsible for optimizing cases such as
9444 &&, || and comparison operators in EXP. */
9447 do_jump (exp, if_false_label, if_true_label)
9449 rtx if_false_label, if_true_label;
9451 enum tree_code code = TREE_CODE (exp);
9452 /* Some cases need to create a label to jump to
9453 in order to properly fall through.
9454 These cases set DROP_THROUGH_LABEL nonzero. */
9455 rtx drop_through_label = 0;
9459 enum machine_mode mode;
9461 #ifdef MAX_INTEGER_COMPUTATION_MODE
9462 check_max_integer_computation_mode (exp);
9473 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9479 /* This is not true with #pragma weak */
9481 /* The address of something can never be zero. */
9483 emit_jump (if_true_label);
9488 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9489 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9490 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9491 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9494 /* If we are narrowing the operand, we have to do the compare in the
9496 if ((TYPE_PRECISION (TREE_TYPE (exp))
9497 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9499 case NON_LVALUE_EXPR:
9500 case REFERENCE_EXPR:
9505 /* These cannot change zero->non-zero or vice versa. */
9506 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9509 case WITH_RECORD_EXPR:
9510 /* Put the object on the placeholder list, recurse through our first
9511 operand, and pop the list. */
9512 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9514 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9515 placeholder_list = TREE_CHAIN (placeholder_list);
9519 /* This is never less insns than evaluating the PLUS_EXPR followed by
9520 a test and can be longer if the test is eliminated. */
9522 /* Reduce to minus. */
9523 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9524 TREE_OPERAND (exp, 0),
9525 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9526 TREE_OPERAND (exp, 1))));
9527 /* Process as MINUS. */
9531 /* Non-zero iff operands of minus differ. */
9532 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9533 TREE_OPERAND (exp, 0),
9534 TREE_OPERAND (exp, 1)),
9535 NE, NE, if_false_label, if_true_label);
9539 /* If we are AND'ing with a small constant, do this comparison in the
9540 smallest type that fits. If the machine doesn't have comparisons
9541 that small, it will be converted back to the wider comparison.
9542 This helps if we are testing the sign bit of a narrower object.
9543 combine can't do this for us because it can't know whether a
9544 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9546 if (! SLOW_BYTE_ACCESS
9547 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9548 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9549 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9550 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9551 && (type = type_for_mode (mode, 1)) != 0
9552 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9553 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9554 != CODE_FOR_nothing))
9556 do_jump (convert (type, exp), if_false_label, if_true_label);
9561 case TRUTH_NOT_EXPR:
9562 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9565 case TRUTH_ANDIF_EXPR:
9566 if (if_false_label == 0)
9567 if_false_label = drop_through_label = gen_label_rtx ();
9568 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9569 start_cleanup_deferral ();
9570 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9571 end_cleanup_deferral ();
9574 case TRUTH_ORIF_EXPR:
9575 if (if_true_label == 0)
9576 if_true_label = drop_through_label = gen_label_rtx ();
9577 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9578 start_cleanup_deferral ();
9579 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9580 end_cleanup_deferral ();
9585 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9586 preserve_temp_slots (NULL_RTX);
9590 do_pending_stack_adjust ();
9591 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9597 case ARRAY_RANGE_REF:
9599 HOST_WIDE_INT bitsize, bitpos;
9601 enum machine_mode mode;
9605 unsigned int alignment;
9607 /* Get description of this reference. We don't actually care
9608 about the underlying object here. */
9609 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9610 &unsignedp, &volatilep, &alignment);
9612 type = type_for_size (bitsize, unsignedp);
9613 if (! SLOW_BYTE_ACCESS
9614 && type != 0 && bitsize >= 0
9615 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9616 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9617 != CODE_FOR_nothing))
9619 do_jump (convert (type, exp), if_false_label, if_true_label);
9626 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9627 if (integer_onep (TREE_OPERAND (exp, 1))
9628 && integer_zerop (TREE_OPERAND (exp, 2)))
9629 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9631 else if (integer_zerop (TREE_OPERAND (exp, 1))
9632 && integer_onep (TREE_OPERAND (exp, 2)))
9633 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9637 rtx label1 = gen_label_rtx ();
9638 drop_through_label = gen_label_rtx ();
9640 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9642 start_cleanup_deferral ();
9643 /* Now the THEN-expression. */
9644 do_jump (TREE_OPERAND (exp, 1),
9645 if_false_label ? if_false_label : drop_through_label,
9646 if_true_label ? if_true_label : drop_through_label);
9647 /* In case the do_jump just above never jumps. */
9648 do_pending_stack_adjust ();
9649 emit_label (label1);
9651 /* Now the ELSE-expression. */
9652 do_jump (TREE_OPERAND (exp, 2),
9653 if_false_label ? if_false_label : drop_through_label,
9654 if_true_label ? if_true_label : drop_through_label);
9655 end_cleanup_deferral ();
9661 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9663 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9664 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9666 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9667 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9670 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9671 fold (build (EQ_EXPR, TREE_TYPE (exp),
9672 fold (build1 (REALPART_EXPR,
9673 TREE_TYPE (inner_type),
9675 fold (build1 (REALPART_EXPR,
9676 TREE_TYPE (inner_type),
9678 fold (build (EQ_EXPR, TREE_TYPE (exp),
9679 fold (build1 (IMAGPART_EXPR,
9680 TREE_TYPE (inner_type),
9682 fold (build1 (IMAGPART_EXPR,
9683 TREE_TYPE (inner_type),
9685 if_false_label, if_true_label);
9688 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9689 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9691 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9692 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9693 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9695 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9701 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9703 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9704 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9706 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9707 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9710 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9711 fold (build (NE_EXPR, TREE_TYPE (exp),
9712 fold (build1 (REALPART_EXPR,
9713 TREE_TYPE (inner_type),
9715 fold (build1 (REALPART_EXPR,
9716 TREE_TYPE (inner_type),
9718 fold (build (NE_EXPR, TREE_TYPE (exp),
9719 fold (build1 (IMAGPART_EXPR,
9720 TREE_TYPE (inner_type),
9722 fold (build1 (IMAGPART_EXPR,
9723 TREE_TYPE (inner_type),
9725 if_false_label, if_true_label);
9728 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9729 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9731 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9732 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9733 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9735 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9740 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9741 if (GET_MODE_CLASS (mode) == MODE_INT
9742 && ! can_compare_p (LT, mode, ccp_jump))
9743 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9745 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9749 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9750 if (GET_MODE_CLASS (mode) == MODE_INT
9751 && ! can_compare_p (LE, mode, ccp_jump))
9752 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9754 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9758 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9759 if (GET_MODE_CLASS (mode) == MODE_INT
9760 && ! can_compare_p (GT, mode, ccp_jump))
9761 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9763 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9767 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9768 if (GET_MODE_CLASS (mode) == MODE_INT
9769 && ! can_compare_p (GE, mode, ccp_jump))
9770 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9772 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9775 case UNORDERED_EXPR:
9778 enum rtx_code cmp, rcmp;
9781 if (code == UNORDERED_EXPR)
9782 cmp = UNORDERED, rcmp = ORDERED;
9784 cmp = ORDERED, rcmp = UNORDERED;
9785 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9788 if (! can_compare_p (cmp, mode, ccp_jump)
9789 && (can_compare_p (rcmp, mode, ccp_jump)
9790 /* If the target doesn't provide either UNORDERED or ORDERED
9791 comparisons, canonicalize on UNORDERED for the library. */
9792 || rcmp == UNORDERED))
9796 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9798 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9803 enum rtx_code rcode1;
9804 enum tree_code tcode2;
9828 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9829 if (can_compare_p (rcode1, mode, ccp_jump))
9830 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9834 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9835 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9838 /* If the target doesn't support combined unordered
9839 compares, decompose into UNORDERED + comparison. */
9840 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9841 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9842 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9843 do_jump (exp, if_false_label, if_true_label);
9849 __builtin_expect (<test>, 0) and
9850 __builtin_expect (<test>, 1)
9852 We need to do this here, so that <test> is not converted to a SCC
9853 operation on machines that use condition code registers and COMPARE
9854 like the PowerPC, and then the jump is done based on whether the SCC
9855 operation produced a 1 or 0. */
9857 /* Check for a built-in function. */
9858 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
9860 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
9861 tree arglist = TREE_OPERAND (exp, 1);
9863 if (TREE_CODE (fndecl) == FUNCTION_DECL
9864 && DECL_BUILT_IN (fndecl)
9865 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
9866 && arglist != NULL_TREE
9867 && TREE_CHAIN (arglist) != NULL_TREE)
9869 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
9872 if (seq != NULL_RTX)
9879 /* fall through and generate the normal code. */
9883 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9885 /* This is not needed any more and causes poor code since it causes
9886 comparisons and tests from non-SI objects to have different code
9888 /* Copy to register to avoid generating bad insns by cse
9889 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9890 if (!cse_not_expected && GET_CODE (temp) == MEM)
9891 temp = copy_to_reg (temp);
9893 do_pending_stack_adjust ();
9894 /* Do any postincrements in the expression that was tested. */
9897 if (GET_CODE (temp) == CONST_INT
9898 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9899 || GET_CODE (temp) == LABEL_REF)
9901 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9905 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9906 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
9907 /* Note swapping the labels gives us not-equal. */
9908 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9909 else if (GET_MODE (temp) != VOIDmode)
9910 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9911 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9912 GET_MODE (temp), NULL_RTX, 0,
9913 if_false_label, if_true_label);
9918 if (drop_through_label)
9920 /* If do_jump produces code that might be jumped around,
9921 do any stack adjusts from that code, before the place
9922 where control merges in. */
9923 do_pending_stack_adjust ();
9924 emit_label (drop_through_label);
9928 /* Given a comparison expression EXP for values too wide to be compared
9929 with one insn, test the comparison and jump to the appropriate label.
9930 The code of EXP is ignored; we always test GT if SWAP is 0,
9931 and LT if SWAP is 1. */
9934 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9937 rtx if_false_label, if_true_label;
9939 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9940 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9941 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9942 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
9944 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
9947 /* Compare OP0 with OP1, word at a time, in mode MODE.
9948 UNSIGNEDP says to do unsigned comparison.
9949 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
9952 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9953 enum machine_mode mode;
9956 rtx if_false_label, if_true_label;
9958 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9959 rtx drop_through_label = 0;
9962 if (! if_true_label || ! if_false_label)
9963 drop_through_label = gen_label_rtx ();
9964 if (! if_true_label)
9965 if_true_label = drop_through_label;
9966 if (! if_false_label)
9967 if_false_label = drop_through_label;
9969 /* Compare a word at a time, high order first. */
9970 for (i = 0; i < nwords; i++)
9972 rtx op0_word, op1_word;
9974 if (WORDS_BIG_ENDIAN)
9976 op0_word = operand_subword_force (op0, i, mode);
9977 op1_word = operand_subword_force (op1, i, mode);
9981 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9982 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9985 /* All but high-order word must be compared as unsigned. */
9986 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9987 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9988 NULL_RTX, if_true_label);
9990 /* Consider lower words only if these are equal. */
9991 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9992 NULL_RTX, 0, NULL_RTX, if_false_label);
9996 emit_jump (if_false_label);
9997 if (drop_through_label)
9998 emit_label (drop_through_label);
10001 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10002 with one insn, test the comparison and jump to the appropriate label. */
10005 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10007 rtx if_false_label, if_true_label;
10009 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10010 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10011 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10012 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10014 rtx drop_through_label = 0;
10016 if (! if_false_label)
10017 drop_through_label = if_false_label = gen_label_rtx ();
10019 for (i = 0; i < nwords; i++)
10020 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10021 operand_subword_force (op1, i, mode),
10022 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10023 word_mode, NULL_RTX, 0, if_false_label,
10027 emit_jump (if_true_label);
10028 if (drop_through_label)
10029 emit_label (drop_through_label);
10032 /* Jump according to whether OP0 is 0.
10033 We assume that OP0 has an integer mode that is too wide
10034 for the available compare insns. */
10037 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10039 rtx if_false_label, if_true_label;
10041 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10044 rtx drop_through_label = 0;
10046 /* The fastest way of doing this comparison on almost any machine is to
10047 "or" all the words and compare the result. If all have to be loaded
10048 from memory and this is a very wide item, it's possible this may
10049 be slower, but that's highly unlikely. */
10051 part = gen_reg_rtx (word_mode);
10052 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10053 for (i = 1; i < nwords && part != 0; i++)
10054 part = expand_binop (word_mode, ior_optab, part,
10055 operand_subword_force (op0, i, GET_MODE (op0)),
10056 part, 1, OPTAB_WIDEN);
10060 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10061 NULL_RTX, 0, if_false_label, if_true_label);
10066 /* If we couldn't do the "or" simply, do this with a series of compares. */
10067 if (! if_false_label)
10068 drop_through_label = if_false_label = gen_label_rtx ();
10070 for (i = 0; i < nwords; i++)
10071 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10072 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
10073 if_false_label, NULL_RTX);
10076 emit_jump (if_true_label);
10078 if (drop_through_label)
10079 emit_label (drop_through_label);
10082 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10083 (including code to compute the values to be compared)
10084 and set (CC0) according to the result.
10085 The decision as to signed or unsigned comparison must be made by the caller.
10087 We force a stack adjustment unless there are currently
10088 things pushed on the stack that aren't yet used.
10090 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10093 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10094 size of MODE should be used. */
10097 compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10099 enum rtx_code code;
10101 enum machine_mode mode;
10103 unsigned int align;
10107 /* If one operand is constant, make it the second one. Only do this
10108 if the other operand is not constant as well. */
10110 if (swap_commutative_operands_p (op0, op1))
10115 code = swap_condition (code);
10118 if (flag_force_mem)
10120 op0 = force_not_mem (op0);
10121 op1 = force_not_mem (op1);
10124 do_pending_stack_adjust ();
10126 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10127 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10131 /* There's no need to do this now that combine.c can eliminate lots of
10132 sign extensions. This can be less efficient in certain cases on other
10135 /* If this is a signed equality comparison, we can do it as an
10136 unsigned comparison since zero-extension is cheaper than sign
10137 extension and comparisons with zero are done as unsigned. This is
10138 the case even on machines that can do fast sign extension, since
10139 zero-extension is easier to combine with other operations than
10140 sign-extension is. If we are comparing against a constant, we must
10141 convert it to what it would look like unsigned. */
10142 if ((code == EQ || code == NE) && ! unsignedp
10143 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10145 if (GET_CODE (op1) == CONST_INT
10146 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10147 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10152 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
10154 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10157 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10158 The decision as to signed or unsigned comparison must be made by the caller.
10160 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10163 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10164 size of MODE should be used. */
10167 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10168 if_false_label, if_true_label)
10170 enum rtx_code code;
10172 enum machine_mode mode;
10174 unsigned int align;
10175 rtx if_false_label, if_true_label;
10178 int dummy_true_label = 0;
10180 /* Reverse the comparison if that is safe and we want to jump if it is
10182 if (! if_true_label && ! FLOAT_MODE_P (mode))
10184 if_true_label = if_false_label;
10185 if_false_label = 0;
10186 code = reverse_condition (code);
10189 /* If one operand is constant, make it the second one. Only do this
10190 if the other operand is not constant as well. */
10192 if (swap_commutative_operands_p (op0, op1))
10197 code = swap_condition (code);
10200 if (flag_force_mem)
10202 op0 = force_not_mem (op0);
10203 op1 = force_not_mem (op1);
10206 do_pending_stack_adjust ();
10208 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10209 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10211 if (tem == const_true_rtx)
10214 emit_jump (if_true_label);
10218 if (if_false_label)
10219 emit_jump (if_false_label);
10225 /* There's no need to do this now that combine.c can eliminate lots of
10226 sign extensions. This can be less efficient in certain cases on other
10229 /* If this is a signed equality comparison, we can do it as an
10230 unsigned comparison since zero-extension is cheaper than sign
10231 extension and comparisons with zero are done as unsigned. This is
10232 the case even on machines that can do fast sign extension, since
10233 zero-extension is easier to combine with other operations than
10234 sign-extension is. If we are comparing against a constant, we must
10235 convert it to what it would look like unsigned. */
10236 if ((code == EQ || code == NE) && ! unsignedp
10237 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10239 if (GET_CODE (op1) == CONST_INT
10240 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10241 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10246 if (! if_true_label)
10248 dummy_true_label = 1;
10249 if_true_label = gen_label_rtx ();
10252 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10255 if (if_false_label)
10256 emit_jump (if_false_label);
10257 if (dummy_true_label)
10258 emit_label (if_true_label);
10261 /* Generate code for a comparison expression EXP (including code to compute
10262 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10263 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10264 generated code will drop through.
10265 SIGNED_CODE should be the rtx operation for this comparison for
10266 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10268 We force a stack adjustment unless there are currently
10269 things pushed on the stack that aren't yet used. */
10272 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10275 enum rtx_code signed_code, unsigned_code;
10276 rtx if_false_label, if_true_label;
10278 unsigned int align0, align1;
10281 enum machine_mode mode;
10283 enum rtx_code code;
10285 /* Don't crash if the comparison was erroneous. */
10286 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
10287 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10290 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
10291 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10294 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10295 mode = TYPE_MODE (type);
10296 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10297 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10298 || (GET_MODE_BITSIZE (mode)
10299 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10302 /* op0 might have been replaced by promoted constant, in which
10303 case the type of second argument should be used. */
10304 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10305 mode = TYPE_MODE (type);
10307 unsignedp = TREE_UNSIGNED (type);
10308 code = unsignedp ? unsigned_code : signed_code;
10310 #ifdef HAVE_canonicalize_funcptr_for_compare
10311 /* If function pointers need to be "canonicalized" before they can
10312 be reliably compared, then canonicalize them. */
10313 if (HAVE_canonicalize_funcptr_for_compare
10314 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10315 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10318 rtx new_op0 = gen_reg_rtx (mode);
10320 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10324 if (HAVE_canonicalize_funcptr_for_compare
10325 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10329 rtx new_op1 = gen_reg_rtx (mode);
10331 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10336 /* Do any postincrements in the expression that was tested. */
10339 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10341 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10342 MIN (align0, align1),
10343 if_false_label, if_true_label);
10346 /* Generate code to calculate EXP using a store-flag instruction
10347 and return an rtx for the result. EXP is either a comparison
10348 or a TRUTH_NOT_EXPR whose operand is a comparison.
10350 If TARGET is nonzero, store the result there if convenient.
10352 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10355 Return zero if there is no suitable set-flag instruction
10356 available on this machine.
10358 Once expand_expr has been called on the arguments of the comparison,
10359 we are committed to doing the store flag, since it is not safe to
10360 re-evaluate the expression. We emit the store-flag insn by calling
10361 emit_store_flag, but only expand the arguments if we have a reason
10362 to believe that emit_store_flag will be successful. If we think that
10363 it will, but it isn't, we have to simulate the store-flag with a
10364 set/jump/set sequence. */
10367 do_store_flag (exp, target, mode, only_cheap)
10370 enum machine_mode mode;
10373 enum rtx_code code;
10374 tree arg0, arg1, type;
10376 enum machine_mode operand_mode;
10380 enum insn_code icode;
10381 rtx subtarget = target;
10384 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10385 result at the end. We can't simply invert the test since it would
10386 have already been inverted if it were valid. This case occurs for
10387 some floating-point comparisons. */
10389 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10390 invert = 1, exp = TREE_OPERAND (exp, 0);
10392 arg0 = TREE_OPERAND (exp, 0);
10393 arg1 = TREE_OPERAND (exp, 1);
10395 /* Don't crash if the comparison was erroneous. */
10396 if (arg0 == error_mark_node || arg1 == error_mark_node)
10399 type = TREE_TYPE (arg0);
10400 operand_mode = TYPE_MODE (type);
10401 unsignedp = TREE_UNSIGNED (type);
10403 /* We won't bother with BLKmode store-flag operations because it would mean
10404 passing a lot of information to emit_store_flag. */
10405 if (operand_mode == BLKmode)
10408 /* We won't bother with store-flag operations involving function pointers
10409 when function pointers must be canonicalized before comparisons. */
10410 #ifdef HAVE_canonicalize_funcptr_for_compare
10411 if (HAVE_canonicalize_funcptr_for_compare
10412 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10413 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10415 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10416 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10417 == FUNCTION_TYPE))))
10424 /* Get the rtx comparison code to use. We know that EXP is a comparison
10425 operation of some type. Some comparisons against 1 and -1 can be
10426 converted to comparisons with zero. Do so here so that the tests
10427 below will be aware that we have a comparison with zero. These
10428 tests will not catch constants in the first operand, but constants
10429 are rarely passed as the first operand. */
10431 switch (TREE_CODE (exp))
10440 if (integer_onep (arg1))
10441 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10443 code = unsignedp ? LTU : LT;
10446 if (! unsignedp && integer_all_onesp (arg1))
10447 arg1 = integer_zero_node, code = LT;
10449 code = unsignedp ? LEU : LE;
10452 if (! unsignedp && integer_all_onesp (arg1))
10453 arg1 = integer_zero_node, code = GE;
10455 code = unsignedp ? GTU : GT;
10458 if (integer_onep (arg1))
10459 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10461 code = unsignedp ? GEU : GE;
10464 case UNORDERED_EXPR:
10490 /* Put a constant second. */
10491 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10493 tem = arg0; arg0 = arg1; arg1 = tem;
10494 code = swap_condition (code);
10497 /* If this is an equality or inequality test of a single bit, we can
10498 do this by shifting the bit being tested to the low-order bit and
10499 masking the result with the constant 1. If the condition was EQ,
10500 we xor it with 1. This does not require an scc insn and is faster
10501 than an scc insn even if we have it. */
10503 if ((code == NE || code == EQ)
10504 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10505 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10507 tree inner = TREE_OPERAND (arg0, 0);
10508 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10511 /* If INNER is a right shift of a constant and it plus BITNUM does
10512 not overflow, adjust BITNUM and INNER. */
10514 if (TREE_CODE (inner) == RSHIFT_EXPR
10515 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10516 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10517 && bitnum < TYPE_PRECISION (type)
10518 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10519 bitnum - TYPE_PRECISION (type)))
10521 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10522 inner = TREE_OPERAND (inner, 0);
10525 /* If we are going to be able to omit the AND below, we must do our
10526 operations as unsigned. If we must use the AND, we have a choice.
10527 Normally unsigned is faster, but for some machines signed is. */
10528 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10529 #ifdef LOAD_EXTEND_OP
10530 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10536 if (! get_subtarget (subtarget)
10537 || GET_MODE (subtarget) != operand_mode
10538 || ! safe_from_p (subtarget, inner, 1))
10541 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10544 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10545 size_int (bitnum), subtarget, ops_unsignedp);
10547 if (GET_MODE (op0) != mode)
10548 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10550 if ((code == EQ && ! invert) || (code == NE && invert))
10551 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10552 ops_unsignedp, OPTAB_LIB_WIDEN);
10554 /* Put the AND last so it can combine with more things. */
10555 if (bitnum != TYPE_PRECISION (type) - 1)
10556 op0 = expand_and (op0, const1_rtx, subtarget);
10561 /* Now see if we are likely to be able to do this. Return if not. */
10562 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10565 icode = setcc_gen_code[(int) code];
10566 if (icode == CODE_FOR_nothing
10567 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10569 /* We can only do this if it is one of the special cases that
10570 can be handled without an scc insn. */
10571 if ((code == LT && integer_zerop (arg1))
10572 || (! only_cheap && code == GE && integer_zerop (arg1)))
10574 else if (BRANCH_COST >= 0
10575 && ! only_cheap && (code == NE || code == EQ)
10576 && TREE_CODE (type) != REAL_TYPE
10577 && ((abs_optab->handlers[(int) operand_mode].insn_code
10578 != CODE_FOR_nothing)
10579 || (ffs_optab->handlers[(int) operand_mode].insn_code
10580 != CODE_FOR_nothing)))
10586 if (! get_subtarget (target)
10587 || GET_MODE (subtarget) != operand_mode
10588 || ! safe_from_p (subtarget, arg1, 1))
10591 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10592 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10595 target = gen_reg_rtx (mode);
10597 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10598 because, if the emit_store_flag does anything it will succeed and
10599 OP0 and OP1 will not be used subsequently. */
10601 result = emit_store_flag (target, code,
10602 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10603 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10604 operand_mode, unsignedp, 1);
10609 result = expand_binop (mode, xor_optab, result, const1_rtx,
10610 result, 0, OPTAB_LIB_WIDEN);
10614 /* If this failed, we have to do this with set/compare/jump/set code. */
10615 if (GET_CODE (target) != REG
10616 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10617 target = gen_reg_rtx (GET_MODE (target));
10619 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10620 result = compare_from_rtx (op0, op1, code, unsignedp,
10621 operand_mode, NULL_RTX, 0);
10622 if (GET_CODE (result) == CONST_INT)
10623 return (((result == const0_rtx && ! invert)
10624 || (result != const0_rtx && invert))
10625 ? const0_rtx : const1_rtx);
10627 label = gen_label_rtx ();
10628 if (bcc_gen_fctn[(int) code] == 0)
10631 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10632 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10633 emit_label (label);
10639 /* Stubs in case we haven't got a casesi insn. */
10640 #ifndef HAVE_casesi
10641 # define HAVE_casesi 0
10642 # define gen_casesi(a, b, c, d, e) (0)
10643 # define CODE_FOR_casesi CODE_FOR_nothing
10646 /* If the machine does not have a case insn that compares the bounds,
10647 this means extra overhead for dispatch tables, which raises the
10648 threshold for using them. */
10649 #ifndef CASE_VALUES_THRESHOLD
10650 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10651 #endif /* CASE_VALUES_THRESHOLD */
10654 case_values_threshold ()
10656 return CASE_VALUES_THRESHOLD;
10659 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10660 0 otherwise (i.e. if there is no casesi instruction). */
10662 try_casesi (index_type, index_expr, minval, range,
10663 table_label, default_label)
10664 tree index_type, index_expr, minval, range;
10665 rtx table_label ATTRIBUTE_UNUSED;
10668 enum machine_mode index_mode = SImode;
10669 int index_bits = GET_MODE_BITSIZE (index_mode);
10670 rtx op1, op2, index;
10671 enum machine_mode op_mode;
10676 /* Convert the index to SImode. */
10677 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10679 enum machine_mode omode = TYPE_MODE (index_type);
10680 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10682 /* We must handle the endpoints in the original mode. */
10683 index_expr = build (MINUS_EXPR, index_type,
10684 index_expr, minval);
10685 minval = integer_zero_node;
10686 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10687 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10688 omode, 1, 0, default_label);
10689 /* Now we can safely truncate. */
10690 index = convert_to_mode (index_mode, index, 0);
10694 if (TYPE_MODE (index_type) != index_mode)
10696 index_expr = convert (type_for_size (index_bits, 0),
10698 index_type = TREE_TYPE (index_expr);
10701 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10704 index = protect_from_queue (index, 0);
10705 do_pending_stack_adjust ();
10707 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10708 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10710 index = copy_to_mode_reg (op_mode, index);
10712 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10714 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10715 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10716 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10717 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10719 op1 = copy_to_mode_reg (op_mode, op1);
10721 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10723 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10724 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10725 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10726 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10728 op2 = copy_to_mode_reg (op_mode, op2);
10730 emit_jump_insn (gen_casesi (index, op1, op2,
10731 table_label, default_label));
10735 /* Attempt to generate a tablejump instruction; same concept. */
10736 #ifndef HAVE_tablejump
10737 #define HAVE_tablejump 0
10738 #define gen_tablejump(x, y) (0)
10741 /* Subroutine of the next function.
10743 INDEX is the value being switched on, with the lowest value
10744 in the table already subtracted.
10745 MODE is its expected mode (needed if INDEX is constant).
10746 RANGE is the length of the jump table.
10747 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10749 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10750 index value is out of range. */
10753 do_tablejump (index, mode, range, table_label, default_label)
10754 rtx index, range, table_label, default_label;
10755 enum machine_mode mode;
10759 /* Do an unsigned comparison (in the proper mode) between the index
10760 expression and the value which represents the length of the range.
10761 Since we just finished subtracting the lower bound of the range
10762 from the index expression, this comparison allows us to simultaneously
10763 check that the original index expression value is both greater than
10764 or equal to the minimum value of the range and less than or equal to
10765 the maximum value of the range. */
10767 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10770 /* If index is in range, it must fit in Pmode.
10771 Convert to Pmode so we can index with it. */
10773 index = convert_to_mode (Pmode, index, 1);
10775 /* Don't let a MEM slip thru, because then INDEX that comes
10776 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10777 and break_out_memory_refs will go to work on it and mess it up. */
10778 #ifdef PIC_CASE_VECTOR_ADDRESS
10779 if (flag_pic && GET_CODE (index) != REG)
10780 index = copy_to_mode_reg (Pmode, index);
10783 /* If flag_force_addr were to affect this address
10784 it could interfere with the tricky assumptions made
10785 about addresses that contain label-refs,
10786 which may be valid only very near the tablejump itself. */
10787 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10788 GET_MODE_SIZE, because this indicates how large insns are. The other
10789 uses should all be Pmode, because they are addresses. This code
10790 could fail if addresses and insns are not the same size. */
10791 index = gen_rtx_PLUS (Pmode,
10792 gen_rtx_MULT (Pmode, index,
10793 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10794 gen_rtx_LABEL_REF (Pmode, table_label));
10795 #ifdef PIC_CASE_VECTOR_ADDRESS
10797 index = PIC_CASE_VECTOR_ADDRESS (index);
10800 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10801 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10802 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10803 RTX_UNCHANGING_P (vector) = 1;
10804 convert_move (temp, vector, 0);
10806 emit_jump_insn (gen_tablejump (temp, table_label));
10808 /* If we are generating PIC code or if the table is PC-relative, the
10809 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10810 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10815 try_tablejump (index_type, index_expr, minval, range,
10816 table_label, default_label)
10817 tree index_type, index_expr, minval, range;
10818 rtx table_label, default_label;
10822 if (! HAVE_tablejump)
10825 index_expr = fold (build (MINUS_EXPR, index_type,
10826 convert (index_type, index_expr),
10827 convert (index_type, minval)));
10828 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10830 index = protect_from_queue (index, 0);
10831 do_pending_stack_adjust ();
10833 do_tablejump (index, TYPE_MODE (index_type),
10834 convert_modes (TYPE_MODE (index_type),
10835 TYPE_MODE (TREE_TYPE (range)),
10836 expand_expr (range, NULL_RTX,
10838 TREE_UNSIGNED (TREE_TYPE (range))),
10839 table_label, default_label);